From 2f65b9e001eeef0f2494cb1d1cc958a91c74d190 Mon Sep 17 00:00:00 2001 From: Kieran Lane Date: Tue, 21 Oct 2025 13:35:55 +0100 Subject: [PATCH 01/72] Update `oidc_session_no_samesite` cookie to be `Secure` (#19079) --- changelog.d/19079.bugfix | 1 + synapse/handlers/oidc.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/19079.bugfix diff --git a/changelog.d/19079.bugfix b/changelog.d/19079.bugfix new file mode 100644 index 0000000000..a7d9800d1d --- /dev/null +++ b/changelog.d/19079.bugfix @@ -0,0 +1 @@ +Fix the `oidc_session_no_samesite` cookie to have the `Secure` attribute, so the only difference between it and the paired `oidc_session` cookie, is the configuration of the `SameSite` attribute as described in the comments / cookie names. Contributed by @kieranlane. \ No newline at end of file diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index fc93c6b2a8..39505463bb 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -96,7 +96,7 @@ # Here we have the names of the cookies, and the options we use to set them. _SESSION_COOKIES = [ (b"oidc_session", b"HttpOnly; Secure; SameSite=None"), - (b"oidc_session_no_samesite", b"HttpOnly"), + (b"oidc_session_no_samesite", b"HttpOnly; Secure"), ] From 44279083402fb1a046216bfe716fc8689e80f21e Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 21 Oct 2025 14:17:53 +0100 Subject: [PATCH 02/72] newsfile --- changelog.d/19081.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/19081.misc diff --git a/changelog.d/19081.misc b/changelog.d/19081.misc new file mode 100644 index 0000000000..8518840fb6 --- /dev/null +++ b/changelog.d/19081.misc @@ -0,0 +1 @@ +Update the deprecated poetry development dependencies group name in `pyproject.toml`. \ No newline at end of file From 6c16734cf34f9229fc1f552bf8aa8b9f2e7a53ad Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 21 Oct 2025 14:18:40 +0100 Subject: [PATCH 03/72] Revert "newsfile" This reverts commit 44279083402fb1a046216bfe716fc8689e80f21e. This should not have been committed to `develop`. --- changelog.d/19081.misc | 1 - 1 file changed, 1 deletion(-) delete mode 100644 changelog.d/19081.misc diff --git a/changelog.d/19081.misc b/changelog.d/19081.misc deleted file mode 100644 index 8518840fb6..0000000000 --- a/changelog.d/19081.misc +++ /dev/null @@ -1 +0,0 @@ -Update the deprecated poetry development dependencies group name in `pyproject.toml`. \ No newline at end of file From ff242faad0ce3f6a53c365f1470f782aeee19963 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 21 Oct 2025 16:40:26 +0100 Subject: [PATCH 04/72] Don't exit the release script if there are uncommitted changes Instead, all the user to fix them and retry. --- scripts-dev/release.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts-dev/release.py b/scripts-dev/release.py index 391881797e..fafa55c770 100755 --- a/scripts-dev/release.py +++ b/scripts-dev/release.py @@ -820,8 +820,10 @@ def get_repo_and_check_clean_checkout( raise click.ClickException( f"{path} is not a git repository (expecting a {name} repository)." ) - if repo.is_dirty(): - raise click.ClickException(f"Uncommitted changes exist in {path}.") + while repo.is_dirty(): + if not click.confirm(f"Uncommitted changes exist in {path}. Commit or stash them. Ready to continue?"): + raise click.ClickException("Aborted.") + return repo From cba3a814c63ad877c482df3bc75570e4a7d61ddb Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 22 Oct 2025 10:39:04 -0500 Subject: [PATCH 05/72] Fix lints on `develop` (#19092) Snuck in with https://github.com/element-hq/synapse/commit/ff242faad0ce3f6a53c365f1470f782aeee19963 --- changelog.d/19092.misc | 1 + scripts-dev/release.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog.d/19092.misc diff --git a/changelog.d/19092.misc b/changelog.d/19092.misc new file mode 100644 index 0000000000..c5060c1c8b --- /dev/null +++ b/changelog.d/19092.misc @@ -0,0 +1 @@ +Fix lints on main branch. diff --git a/scripts-dev/release.py b/scripts-dev/release.py index fafa55c770..c5c72156cc 100755 --- a/scripts-dev/release.py +++ b/scripts-dev/release.py @@ -821,7 +821,9 @@ def get_repo_and_check_clean_checkout( f"{path} is not a git repository (expecting a {name} repository)." ) while repo.is_dirty(): - if not click.confirm(f"Uncommitted changes exist in {path}. Commit or stash them. Ready to continue?"): + if not click.confirm( + f"Uncommitted changes exist in {path}. Commit or stash them. Ready to continue?" + ): raise click.ClickException("Aborted.") return repo From fc244bb592aa481faf28214a2e2ce3bb4e95d990 Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Wed, 22 Oct 2025 17:48:19 -0400 Subject: [PATCH 06/72] Use type hinting generics in standard collections (#19046) aka PEP 585, added in Python 3.9 - https://peps.python.org/pep-0585/ - https://docs.astral.sh/ruff/rules/non-pep585-annotation/ --- build_rust.py | 4 +- changelog.d/19046.misc | 1 + contrib/graph/graph.py | 5 +- docker/configure_workers_and_start.py | 55 ++--- docker/start.py | 6 +- pyproject.toml | 6 + scripts-dev/build_debian_packages.py | 4 +- scripts-dev/check_locked_deps_have_sdists.py | 3 +- scripts-dev/check_pydantic_models.py | 28 +-- scripts-dev/check_schema_delta.py | 8 +- scripts-dev/federation_client.py | 8 +- scripts-dev/mypy_synapse_plugin.py | 10 +- scripts-dev/release.py | 4 +- scripts-dev/schema_versions.py | 6 +- stubs/sortedcontainers/sorteddict.pyi | 32 ++- stubs/sortedcontainers/sortedlist.pyi | 19 +- stubs/sortedcontainers/sortedset.pyi | 12 +- stubs/txredisapi.pyi | 8 +- synapse/__init__.py | 4 +- synapse/_scripts/generate_workers_map.py | 30 +-- synapse/_scripts/register_new_matrix_user.py | 6 +- synapse/_scripts/review_recent_signups.py | 11 +- synapse/_scripts/synapse_port_db.py | 51 ++-- synapse/api/auth/__init__.py | 6 +- synapse/api/auth/base.py | 6 +- synapse/api/auth/mas.py | 4 +- synapse/api/auth/msc3861_delegated.py | 14 +- synapse/api/errors.py | 16 +- synapse/api/filtering.py | 23 +- synapse/api/ratelimiting.py | 8 +- synapse/api/room_versions.py | 6 +- synapse/app/_base.py | 23 +- synapse/app/admin_cmd.py | 6 +- synapse/app/complement_fork_starter.py | 10 +- synapse/app/generic_worker.py | 5 +- synapse/app/homeserver.py | 10 +- synapse/app/phone_stats_home.py | 8 +- synapse/appservice/__init__.py | 16 +- synapse/appservice/api.py | 31 ++- synapse/appservice/scheduler.py | 26 +- synapse/config/__main__.py | 3 +- synapse/config/_base.py | 30 +-- synapse/config/_base.pyi | 30 +-- synapse/config/_util.py | 8 +- synapse/config/api.py | 4 +- synapse/config/appservice.py | 10 +- synapse/config/cache.py | 6 +- synapse/config/cas.py | 4 +- synapse/config/database.py | 4 +- synapse/config/key.py | 14 +- synapse/config/logger.py | 4 +- synapse/config/modules.py | 4 +- synapse/config/oembed.py | 12 +- synapse/config/oidc.py | 10 +- synapse/config/password_auth_providers.py | 4 +- synapse/config/ratelimiting.py | 8 +- synapse/config/registration.py | 4 +- synapse/config/repository.py | 12 +- synapse/config/retention.py | 4 +- synapse/config/saml2.py | 11 +- synapse/config/server.py | 28 +-- synapse/config/spam_checker.py | 4 +- synapse/config/sso.py | 6 +- synapse/config/tls.py | 4 +- synapse/config/tracer.py | 6 +- synapse/config/user_types.py | 6 +- synapse/config/workers.py | 36 +-- synapse/crypto/event_signing.py | 10 +- synapse/crypto/keyring.py | 72 +++--- synapse/event_auth.py | 12 +- synapse/events/__init__.py | 30 +-- synapse/events/auto_accept_invites.py | 4 +- synapse/events/builder.py | 12 +- synapse/events/presence_router.py | 25 +- synapse/events/snapshot.py | 26 +- synapse/events/utils.py | 26 +- synapse/events/validator.py | 6 +- synapse/federation/federation_base.py | 4 +- synapse/federation/federation_client.py | 59 +++-- synapse/federation/federation_server.py | 89 ++++--- synapse/federation/persistence.py | 4 +- synapse/federation/send_queue.py | 34 ++- synapse/federation/sender/__init__.py | 13 +- .../sender/per_destination_queue.py | 26 +- .../federation/sender/transaction_manager.py | 6 +- synapse/federation/transport/client.py | 43 ++-- .../federation/transport/server/__init__.py | 18 +- synapse/federation/transport/server/_base.py | 12 +- .../federation/transport/server/federation.py | 122 +++++----- synapse/federation/units.py | 12 +- synapse/handlers/account.py | 16 +- synapse/handlers/account_data.py | 6 +- synapse/handlers/account_validity.py | 6 +- synapse/handlers/admin.py | 12 +- synapse/handlers/appservice.py | 55 ++--- synapse/handlers/auth.py | 106 ++++----- synapse/handlers/cas.py | 10 +- synapse/handlers/delayed_events.py | 10 +- synapse/handlers/device.py | 56 ++--- synapse/handlers/devicemessage.py | 8 +- synapse/handlers/directory.py | 4 +- synapse/handlers/e2e_keys.py | 76 +++--- synapse/handlers/e2e_room_keys.py | 6 +- synapse/handlers/event_auth.py | 4 +- synapse/handlers/events.py | 4 +- synapse/handlers/federation.py | 48 ++-- synapse/handlers/federation_event.py | 24 +- synapse/handlers/identity.py | 8 +- synapse/handlers/initial_sync.py | 8 +- synapse/handlers/jwt.py | 4 +- synapse/handlers/message.py | 50 ++-- synapse/handlers/oidc.py | 29 ++- synapse/handlers/pagination.py | 14 +- synapse/handlers/presence.py | 101 ++++---- synapse/handlers/profile.py | 4 +- synapse/handlers/push_rules.py | 6 +- synapse/handlers/receipts.py | 14 +- synapse/handlers/register.py | 8 +- synapse/handlers/relations.py | 21 +- synapse/handlers/room.py | 59 +++-- synapse/handlers/room_list.py | 10 +- synapse/handlers/room_member.py | 72 +++--- synapse/handlers/room_member_worker.py | 14 +- synapse/handlers/room_summary.py | 32 +-- synapse/handlers/saml.py | 8 +- synapse/handlers/search.py | 34 +-- synapse/handlers/send_email.py | 4 +- synapse/handlers/sliding_sync/__init__.py | 32 +-- synapse/handlers/sliding_sync/extensions.py | 26 +- synapse/handlers/sliding_sync/room_lists.py | 108 ++++----- synapse/handlers/sso.py | 13 +- synapse/handlers/stats.py | 10 +- synapse/handlers/sync.py | 131 +++++----- synapse/handlers/typing.py | 24 +- synapse/handlers/ui_auth/checkers.py | 4 +- synapse/handlers/user_directory.py | 8 +- synapse/handlers/worker_lock.py | 15 +- synapse/http/additional_resource.py | 6 +- synapse/http/client.py | 21 +- .../federation/matrix_federation_agent.py | 4 +- synapse/http/federation/srv_resolver.py | 12 +- .../http/federation/well_known_resolver.py | 8 +- synapse/http/matrixfederationclient.py | 19 +- synapse/http/proxy.py | 8 +- synapse/http/proxyagent.py | 12 +- synapse/http/replicationagent.py | 6 +- synapse/http/request_metrics.py | 8 +- synapse/http/server.py | 15 +- synapse/http/servlet.py | 33 ++- synapse/http/site.py | 6 +- synapse/logging/_remote.py | 6 +- synapse/logging/context.py | 10 +- synapse/logging/formatter.py | 6 +- synapse/logging/opentracing.py | 37 ++- synapse/media/_base.py | 10 +- synapse/media/filepath.py | 10 +- synapse/media/media_repository.py | 30 +-- synapse/media/media_storage.py | 9 +- synapse/media/oembed.py | 10 +- synapse/media/preview_html.py | 29 +-- synapse/media/thumbnailer.py | 24 +- synapse/media/url_previewer.py | 4 +- synapse/metrics/__init__.py | 24 +- synapse/metrics/background_process_metrics.py | 9 +- synapse/module_api/__init__.py | 43 ++-- .../callbacks/account_validity_callbacks.py | 10 +- .../callbacks/media_repository_callbacks.py | 16 +- .../callbacks/ratelimit_callbacks.py | 4 +- .../callbacks/spamchecker_callbacks.py | 88 ++++--- .../third_party_event_rules_callbacks.py | 32 +-- synapse/notifier.py | 44 ++-- synapse/push/__init__.py | 4 +- synapse/push/bulk_push_rule_evaluator.py | 30 +-- synapse/push/clientformat.py | 14 +- synapse/push/emailpusher.py | 6 +- synapse/push/httppusher.py | 8 +- synapse/push/mailer.py | 24 +- synapse/push/presentable_names.py | 6 +- synapse/push/push_tools.py | 5 +- synapse/push/push_types.py | 8 +- synapse/push/pusher.py | 6 +- synapse/push/pusherpool.py | 4 +- synapse/replication/http/_base.py | 10 +- synapse/replication/http/account_data.py | 14 +- .../replication/http/deactivate_account.py | 4 +- synapse/replication/http/delayed_events.py | 4 +- synapse/replication/http/devices.py | 20 +- synapse/replication/http/federation.py | 14 +- synapse/replication/http/login.py | 4 +- synapse/replication/http/membership.py | 16 +- synapse/replication/http/presence.py | 6 +- synapse/replication/http/push.py | 8 +- synapse/replication/http/register.py | 6 +- synapse/replication/http/send_events.py | 8 +- synapse/replication/http/state.py | 4 +- synapse/replication/http/streams.py | 4 +- synapse/replication/tcp/client.py | 10 +- synapse/replication/tcp/commands.py | 26 +- synapse/replication/tcp/handler.py | 32 ++- synapse/replication/tcp/protocol.py | 6 +- synapse/replication/tcp/redis.py | 10 +- synapse/replication/tcp/resource.py | 8 +- synapse/replication/tcp/streams/_base.py | 12 +- synapse/replication/tcp/streams/events.py | 16 +- synapse/replication/tcp/streams/federation.py | 6 +- synapse/rest/__init__.py | 8 +- synapse/rest/admin/__init__.py | 8 +- synapse/rest/admin/background_updates.py | 10 +- synapse/rest/admin/devices.py | 14 +- synapse/rest/admin/event_reports.py | 8 +- synapse/rest/admin/events.py | 4 +- synapse/rest/admin/experimental_features.py | 6 +- synapse/rest/admin/federation.py | 10 +- synapse/rest/admin/media.py | 28 +-- synapse/rest/admin/registration_tokens.py | 12 +- synapse/rest/admin/rooms.py | 40 ++-- synapse/rest/admin/scheduled_tasks.py | 4 +- synapse/rest/admin/server_notice_servlet.py | 8 +- synapse/rest/admin/statistics.py | 6 +- synapse/rest/admin/username_available.py | 4 +- synapse/rest/admin/users.py | 70 +++--- synapse/rest/client/_base.py | 6 +- synapse/rest/client/account.py | 32 +-- synapse/rest/client/account_data.py | 14 +- synapse/rest/client/account_validity.py | 4 +- synapse/rest/client/appservice_ping.py | 6 +- synapse/rest/client/auth_metadata.py | 6 +- synapse/rest/client/capabilities.py | 4 +- synapse/rest/client/delayed_events.py | 6 +- synapse/rest/client/devices.py | 28 +-- synapse/rest/client/directory.py | 20 +- synapse/rest/client/events.py | 8 +- synapse/rest/client/filter.py | 6 +- synapse/rest/client/initial_sync.py | 6 +- synapse/rest/client/keys.py | 22 +- synapse/rest/client/knock.py | 6 +- synapse/rest/client/login.py | 19 +- synapse/rest/client/login_token_request.py | 4 +- synapse/rest/client/logout.py | 6 +- synapse/rest/client/matrixrtc.py | 4 +- synapse/rest/client/mutual_rooms.py | 6 +- synapse/rest/client/notifications.py | 4 +- synapse/rest/client/openid.py | 4 +- synapse/rest/client/password_policy.py | 4 +- synapse/rest/client/presence.py | 6 +- synapse/rest/client/profile.py | 10 +- synapse/rest/client/push_rule.py | 18 +- synapse/rest/client/pusher.py | 6 +- synapse/rest/client/read_marker.py | 4 +- synapse/rest/client/receipts.py | 4 +- synapse/rest/client/register.py | 18 +- synapse/rest/client/relations.py | 6 +- synapse/rest/client/reporting.py | 8 +- synapse/rest/client/room.py | 84 +++---- synapse/rest/client/room_keys.py | 18 +- .../rest/client/room_upgrade_rest_servlet.py | 4 +- synapse/rest/client/sendtodevice.py | 6 +- synapse/rest/client/sync.py | 22 +- synapse/rest/client/tags.py | 8 +- synapse/rest/client/thirdparty.py | 14 +- synapse/rest/client/thread_subscriptions.py | 14 +- synapse/rest/client/transactions.py | 10 +- synapse/rest/client/user_directory.py | 4 +- synapse/rest/client/versions.py | 4 +- synapse/rest/client/voip.py | 4 +- synapse/rest/consent/consent_resource.py | 6 +- synapse/rest/key/v2/local_key_resource.py | 4 +- synapse/rest/key/v2/remote_key_resource.py | 16 +- synapse/rest/media/upload_resource.py | 6 +- .../synapse/client/federation_whitelist.py | 4 +- synapse/rest/synapse/client/jwks.py | 4 +- synapse/rest/synapse/client/password_reset.py | 6 +- synapse/rest/synapse/client/pick_username.py | 6 +- synapse/rest/synapse/client/rendezvous.py | 8 +- synapse/rest/synapse/mas/devices.py | 10 +- synapse/rest/synapse/mas/users.py | 18 +- synapse/rest/well_known.py | 4 +- synapse/server.py | 20 +- .../server_notices/consent_server_notices.py | 4 +- .../resource_limits_server_notices.py | 8 +- synapse/state/__init__.py | 28 +-- synapse/state/v1.py | 30 +-- synapse/state/v2.py | 86 ++++--- synapse/storage/_base.py | 4 +- synapse/storage/background_updates.py | 18 +- synapse/storage/controllers/persist_events.py | 59 +++-- synapse/storage/controllers/purge_events.py | 5 +- synapse/storage/controllers/state.py | 28 +-- synapse/storage/controllers/stats.py | 4 +- synapse/storage/database.py | 202 ++++++++-------- synapse/storage/databases/__init__.py | 6 +- synapse/storage/databases/main/__init__.py | 22 +- .../storage/databases/main/account_data.py | 46 ++-- synapse/storage/databases/main/appservice.py | 20 +- synapse/storage/databases/main/cache.py | 14 +- synapse/storage/databases/main/client_ips.py | 37 ++- .../storage/databases/main/delayed_events.py | 18 +- synapse/storage/databases/main/deviceinbox.py | 54 ++--- synapse/storage/databases/main/devices.py | 126 +++++----- synapse/storage/databases/main/directory.py | 6 +- .../storage/databases/main/e2e_room_keys.py | 27 +-- .../storage/databases/main/end_to_end_keys.py | 134 +++++------ .../databases/main/event_federation.py | 225 +++++++++--------- .../databases/main/event_push_actions.py | 69 +++--- synapse/storage/databases/main/events.py | 160 ++++++------- .../databases/main/events_bg_updates.py | 64 ++--- .../main/events_forward_extremities.py | 8 +- .../storage/databases/main/events_worker.py | 150 ++++++------ .../databases/main/experimental_features.py | 8 +- synapse/storage/databases/main/filtering.py | 4 +- synapse/storage/databases/main/keys.py | 20 +- synapse/storage/databases/main/lock.py | 14 +- .../databases/main/media_repository.py | 40 ++-- synapse/storage/databases/main/metrics.py | 26 +- .../databases/main/monthly_active_users.py | 20 +- synapse/storage/databases/main/presence.py | 27 +-- synapse/storage/databases/main/profile.py | 10 +- .../storage/databases/main/purge_events.py | 10 +- synapse/storage/databases/main/push_rule.py | 35 ++- synapse/storage/databases/main/pusher.py | 29 +-- synapse/storage/databases/main/receipts.py | 68 +++--- .../storage/databases/main/registration.py | 68 +++--- synapse/storage/databases/main/relations.py | 77 +++--- synapse/storage/databases/main/room.py | 98 ++++---- synapse/storage/databases/main/roommember.py | 109 ++++----- synapse/storage/databases/main/search.py | 35 ++- synapse/storage/databases/main/signatures.py | 6 +- .../storage/databases/main/sliding_sync.py | 18 +- synapse/storage/databases/main/state.py | 31 +-- .../storage/databases/main/state_deltas.py | 14 +- synapse/storage/databases/main/stats.py | 47 ++-- synapse/storage/databases/main/stream.py | 118 +++++---- synapse/storage/databases/main/tags.py | 20 +- .../storage/databases/main/task_scheduler.py | 18 +- .../databases/main/thread_subscriptions.py | 15 +- .../storage/databases/main/transactions.py | 42 ++-- synapse/storage/databases/main/ui_auth.py | 16 +- .../storage/databases/main/user_directory.py | 51 ++-- .../databases/main/user_erasure_store.py | 4 +- synapse/storage/databases/state/bg_updates.py | 17 +- synapse/storage/databases/state/deletion.py | 10 +- synapse/storage/databases/state/store.py | 38 ++- synapse/storage/engines/postgres.py | 6 +- synapse/storage/engines/sqlite.py | 4 +- synapse/storage/prepare_database.py | 8 +- synapse/storage/roommember.py | 4 +- .../storage/schema/main/delta/30/as_users.py | 6 +- synapse/storage/types.py | 33 ++- synapse/storage/util/id_generators.py | 43 ++-- .../util/partial_state_events_tracker.py | 8 +- synapse/storage/util/sequence.py | 8 +- synapse/streams/__init__.py | 4 +- synapse/streams/events.py | 4 +- synapse/synapse_rust/acl.pyi | 4 +- synapse/synapse_rust/events.pyi | 4 +- synapse/synapse_rust/push.pyi | 8 +- synapse/synapse_rust/segmenter.pyi | 4 +- synapse/types/__init__.py | 43 ++-- synapse/types/handlers/__init__.py | 8 +- synapse/types/handlers/sliding_sync.py | 24 +- synapse/types/rest/client/__init__.py | 42 ++-- synapse/types/state.py | 42 ++-- synapse/util/__init__.py | 6 +- synapse/util/async_helpers.py | 72 +++--- synapse/util/batching_queue.py | 10 +- synapse/util/caches/__init__.py | 6 +- synapse/util/caches/deferred_cache.py | 15 +- synapse/util/caches/descriptors.py | 28 +-- synapse/util/caches/dictionary_cache.py | 15 +- synapse/util/caches/lrucache.py | 23 +- synapse/util/caches/response_cache.py | 3 +- synapse/util/caches/stream_change_cache.py | 14 +- synapse/util/caches/ttlcache.py | 6 +- synapse/util/clock.py | 6 +- synapse/util/daemonize.py | 4 +- synapse/util/distributor.py | 10 +- synapse/util/events.py | 6 +- synapse/util/gai_resolver.py | 11 +- synapse/util/httpresourcetree.py | 5 +- synapse/util/iterutils.py | 12 +- synapse/util/json.py | 3 +- synapse/util/linked_list.py | 6 +- synapse/util/manhole.py | 4 +- synapse/util/metrics.py | 8 +- synapse/util/module_loader.py | 4 +- synapse/util/patch_inline_callbacks.py | 6 +- synapse/util/ratelimitutils.py | 17 +- synapse/util/retryutils.py | 4 +- synapse/util/stringutils.py | 8 +- synapse/util/task_scheduler.py | 16 +- synapse/util/wheel_timer.py | 10 +- synapse/visibility.py | 27 +-- synmark/__main__.py | 4 +- tests/api/test_filtering.py | 11 +- tests/app/test_openid_listener.py | 5 +- tests/appservice/test_api.py | 6 +- tests/appservice/test_scheduler.py | 8 +- tests/config/utils.py | 3 +- tests/crypto/test_keyring.py | 30 +-- tests/events/test_auto_accept_invites.py | 8 +- tests/events/test_presence_router.py | 14 +- tests/events/test_utils.py | 4 +- tests/federation/test_federation_catch_up.py | 12 +- .../test_federation_out_of_band_membership.py | 8 +- tests/federation/test_federation_sender.py | 10 +- .../federation/transport/server/test__base.py | 9 +- tests/federation/transport/test_client.py | 4 +- tests/federation/transport/test_knocking.py | 6 +- tests/handlers/test_appservice.py | 10 +- tests/handlers/test_cas.py | 4 +- tests/handlers/test_directory.py | 8 +- tests/handlers/test_e2e_keys.py | 6 +- tests/handlers/test_message.py | 5 +- tests/handlers/test_oauth_delegation.py | 10 +- tests/handlers/test_oidc.py | 8 +- tests/handlers/test_password_providers.py | 10 +- tests/handlers/test_profile.py | 6 +- tests/handlers/test_receipts.py | 3 +- tests/handlers/test_register.py | 20 +- tests/handlers/test_room_list.py | 6 +- tests/handlers/test_room_summary.py | 24 +- tests/handlers/test_saml.py | 8 +- tests/handlers/test_send_email.py | 14 +- tests/handlers/test_sliding_sync.py | 16 +- tests/handlers/test_sso.py | 4 +- tests/handlers/test_stats.py | 8 +- tests/handlers/test_sync.py | 6 +- tests/handlers/test_typing.py | 9 +- tests/handlers/test_user_directory.py | 4 +- tests/http/__init__.py | 7 +- .../test_matrix_federation_agent.py | 4 +- tests/http/federation/test_srv_resolver.py | 28 +-- tests/http/server/_base.py | 20 +- tests/http/test_client.py | 6 +- tests/http/test_matrixfederationclient.py | 4 +- tests/http/test_proxy.py | 3 +- tests/http/test_proxyagent.py | 4 +- tests/http/test_servlet.py | 6 +- tests/logging/test_remote_handler.py | 3 +- tests/media/test_media_storage.py | 36 +-- tests/metrics/test_metrics.py | 10 +- tests/module_api/test_api.py | 4 +- tests/push/test_email.py | 8 +- tests/push/test_http.py | 10 +- tests/push/test_presentable_names.py | 16 +- tests/push/test_push_rule_evaluator.py | 10 +- tests/replication/_base.py | 12 +- tests/replication/http/test__base.py | 5 +- tests/replication/storage/test_events.py | 10 +- tests/replication/tcp/streams/test_events.py | 8 +- tests/replication/test_multi_media_repo.py | 6 +- tests/rest/admin/test_admin.py | 4 +- tests/rest/admin/test_event_reports.py | 3 +- tests/rest/admin/test_federation.py | 10 +- tests/rest/admin/test_jwks.py | 3 +- tests/rest/admin/test_media.py | 3 +- tests/rest/admin/test_room.py | 4 +- tests/rest/admin/test_scheduled_tasks.py | 8 +- tests/rest/admin/test_server_notice.py | 4 +- tests/rest/admin/test_statistics.py | 8 +- tests/rest/admin/test_user.py | 18 +- .../test_extension_thread_subscriptions.py | 6 +- .../sliding_sync/test_extension_to_device.py | 3 +- .../sliding_sync/test_rooms_timeline.py | 10 +- .../client/sliding_sync/test_sliding_sync.py | 8 +- tests/rest/client/test_account.py | 14 +- tests/rest/client/test_auth.py | 20 +- tests/rest/client/test_delayed_events.py | 3 +- tests/rest/client/test_login.py | 37 ++- tests/rest/client/test_media.py | 38 +-- tests/rest/client/test_notifications.py | 4 +- tests/rest/client/test_profile.py | 4 +- tests/rest/client/test_redactions.py | 6 +- tests/rest/client/test_register.py | 10 +- tests/rest/client/test_relations.py | 22 +- tests/rest/client/test_rendezvous.py | 3 +- tests/rest/client/test_retention.py | 4 +- tests/rest/client/test_rooms.py | 30 +-- tests/rest/client/test_sync.py | 3 +- tests/rest/client/test_third_party_rules.py | 22 +- tests/rest/client/test_transactions.py | 8 +- tests/rest/client/utils.py | 32 ++- tests/rest/key/v2/test_remote_key_resource.py | 4 +- tests/rest/media/test_domain_blocking.py | 3 +- tests/rest/media/test_url_preview.py | 10 +- .../client/test_federation_whitelist.py | 3 +- tests/scripts/test_new_matrix_user.py | 14 +- tests/server.py | 29 +-- .../test_resource_limits_server_notices.py | 3 +- tests/state/test_v2.py | 36 ++- tests/state/test_v21.py | 16 +- .../databases/main/test_end_to_end_keys.py | 4 +- .../databases/main/test_events_worker.py | 10 +- tests/storage/databases/main/test_receipts.py | 6 +- tests/storage/test__base.py | 6 +- tests/storage/test_account_data.py | 6 +- tests/storage/test_appservice.py | 10 +- tests/storage/test_background_update.py | 6 +- tests/storage/test_client_ips.py | 22 +- tests/storage/test_database.py | 4 +- tests/storage/test_devices.py | 6 +- tests/storage/test_event_chain.py | 14 +- tests/storage/test_event_federation.py | 51 ++-- tests/storage/test_event_push_actions.py | 4 +- tests/storage/test_events.py | 8 +- tests/storage/test_events_bg_updates.py | 5 +- tests/storage/test_id_generators.py | 8 +- tests/storage/test_monthly_active_users.py | 6 +- tests/storage/test_redaction.py | 6 +- tests/storage/test_rollback_worker.py | 3 +- tests/storage/test_room_search.py | 3 +- tests/storage/test_roommember.py | 8 +- tests/storage/test_sliding_sync_tables.py | 16 +- tests/storage/test_state.py | 8 +- tests/storage/test_stream.py | 5 +- tests/storage/test_user_directory.py | 22 +- .../util/test_partial_state_events_tracker.py | 12 +- tests/test_event_auth.py | 12 +- tests/test_mau.py | 4 +- tests/test_server.py | 14 +- tests/test_state.py | 42 ++-- tests/test_types.py | 3 +- tests/test_utils/__init__.py | 4 +- tests/test_utils/event_injection.py | 8 +- tests/test_utils/html_parsers.py | 10 +- tests/test_utils/oidc.py | 16 +- tests/unittest.py | 28 +-- tests/util/caches/test_deferred_cache.py | 5 +- tests/util/caches/test_descriptors.py | 17 +- tests/util/test_async_helpers.py | 8 +- tests/util/test_batching_queue.py | 5 +- tests/util/test_expiring_cache.py | 3 +- tests/util/test_itertools.py | 34 +-- tests/util/test_linearizer.py | 4 +- tests/util/test_lrucache.py | 9 +- tests/util/test_mutable_overlay_mapping.py | 3 +- tests/util/test_rwlock.py | 12 +- tests/util/test_task_scheduler.py | 14 +- tests/utils.py | 10 +- 539 files changed, 4601 insertions(+), 5068 deletions(-) create mode 100644 changelog.d/19046.misc diff --git a/build_rust.py b/build_rust.py index 5c796af461..af7bd2fdc5 100644 --- a/build_rust.py +++ b/build_rust.py @@ -2,13 +2,13 @@ import itertools import os -from typing import Any, Dict +from typing import Any from packaging.specifiers import SpecifierSet from setuptools_rust import Binding, RustExtension -def build(setup_kwargs: Dict[str, Any]) -> None: +def build(setup_kwargs: dict[str, Any]) -> None: original_project_dir = os.path.dirname(os.path.realpath(__file__)) cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml") diff --git a/changelog.d/19046.misc b/changelog.d/19046.misc new file mode 100644 index 0000000000..4013804f7f --- /dev/null +++ b/changelog.d/19046.misc @@ -0,0 +1 @@ +Use type hinting generics in standard collections, as per PEP 585, added in Python 3.9. diff --git a/contrib/graph/graph.py b/contrib/graph/graph.py index 9d5f3c7f4f..2898bb3448 100644 --- a/contrib/graph/graph.py +++ b/contrib/graph/graph.py @@ -24,7 +24,6 @@ import html import json import urllib.request -from typing import List import pydot @@ -33,7 +32,7 @@ def make_name(pdu_id: str, origin: str) -> str: return f"{pdu_id}@{origin}" -def make_graph(pdus: List[dict], filename_prefix: str) -> None: +def make_graph(pdus: list[dict], filename_prefix: str) -> None: """ Generate a dot and SVG file for a graph of events in the room based on the topological ordering by querying a homeserver. @@ -127,7 +126,7 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None: graph.write_svg("%s.svg" % filename_prefix, prog="dot") -def get_pdus(host: str, room: str) -> List[dict]: +def get_pdus(host: str, room: str) -> list[dict]: transaction = json.loads( urllib.request.urlopen( f"http://{host}/_matrix/federation/v1/context/{room}/" diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py index 6f25653bb7..2451d1f300 100755 --- a/docker/configure_workers_and_start.py +++ b/docker/configure_workers_and_start.py @@ -65,13 +65,10 @@ from pathlib import Path from typing import ( Any, - Dict, - List, Mapping, MutableMapping, NoReturn, Optional, - Set, SupportsIndex, ) @@ -96,7 +93,7 @@ # Watching /_matrix/media and related needs a "media" listener # Stream Writers require "client" and "replication" listeners because they # have to attach by instance_map to the master process and have client endpoints. -WORKERS_CONFIG: Dict[str, Dict[str, Any]] = { +WORKERS_CONFIG: dict[str, dict[str, Any]] = { "pusher": { "app": "synapse.app.generic_worker", "listener_resources": [], @@ -408,7 +405,7 @@ def convert(src: str, dst: str, **template_vars: object) -> None: def add_worker_roles_to_shared_config( shared_config: dict, - worker_types_set: Set[str], + worker_types_set: set[str], worker_name: str, worker_port: int, ) -> None: @@ -471,9 +468,9 @@ def add_worker_roles_to_shared_config( def merge_worker_template_configs( - existing_dict: Optional[Dict[str, Any]], - to_be_merged_dict: Dict[str, Any], -) -> Dict[str, Any]: + existing_dict: Optional[dict[str, Any]], + to_be_merged_dict: dict[str, Any], +) -> dict[str, Any]: """When given an existing dict of worker template configuration consisting with both dicts and lists, merge new template data from WORKERS_CONFIG(or create) and return new dict. @@ -484,7 +481,7 @@ def merge_worker_template_configs( existing_dict. Returns: The newly merged together dict values. """ - new_dict: Dict[str, Any] = {} + new_dict: dict[str, Any] = {} if not existing_dict: # It doesn't exist yet, just use the new dict(but take a copy not a reference) new_dict = to_be_merged_dict.copy() @@ -509,8 +506,8 @@ def merge_worker_template_configs( def insert_worker_name_for_worker_config( - existing_dict: Dict[str, Any], worker_name: str -) -> Dict[str, Any]: + existing_dict: dict[str, Any], worker_name: str +) -> dict[str, Any]: """Insert a given worker name into the worker's configuration dict. Args: @@ -526,7 +523,7 @@ def insert_worker_name_for_worker_config( return dict_to_edit -def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]: +def apply_requested_multiplier_for_worker(worker_types: list[str]) -> list[str]: """ Apply multiplier(if found) by returning a new expanded list with some basic error checking. @@ -587,7 +584,7 @@ def is_sharding_allowed_for_worker_type(worker_type: str) -> bool: def split_and_strip_string( given_string: str, split_char: str, max_split: SupportsIndex = -1 -) -> List[str]: +) -> list[str]: """ Helper to split a string on split_char and strip whitespace from each end of each element. @@ -616,8 +613,8 @@ def generate_base_homeserver_config() -> None: def parse_worker_types( - requested_worker_types: List[str], -) -> Dict[str, Set[str]]: + requested_worker_types: list[str], +) -> dict[str, set[str]]: """Read the desired list of requested workers and prepare the data for use in generating worker config files while also checking for potential gotchas. @@ -633,14 +630,14 @@ def parse_worker_types( # A counter of worker_base_name -> int. Used for determining the name for a given # worker when generating its config file, as each worker's name is just # worker_base_name followed by instance number - worker_base_name_counter: Dict[str, int] = defaultdict(int) + worker_base_name_counter: dict[str, int] = defaultdict(int) # Similar to above, but more finely grained. This is used to determine we don't have # more than a single worker for cases where multiples would be bad(e.g. presence). - worker_type_shard_counter: Dict[str, int] = defaultdict(int) + worker_type_shard_counter: dict[str, int] = defaultdict(int) # The final result of all this processing - dict_to_return: Dict[str, Set[str]] = {} + dict_to_return: dict[str, set[str]] = {} # Handle any multipliers requested for given workers. multiple_processed_worker_types = apply_requested_multiplier_for_worker( @@ -684,7 +681,7 @@ def parse_worker_types( # Split the worker_type_string on "+", remove whitespace from ends then make # the list a set so it's deduplicated. - worker_types_set: Set[str] = set( + worker_types_set: set[str] = set( split_and_strip_string(worker_type_string, "+") ) @@ -743,7 +740,7 @@ def generate_worker_files( environ: Mapping[str, str], config_path: str, data_dir: str, - requested_worker_types: Dict[str, Set[str]], + requested_worker_types: dict[str, set[str]], ) -> None: """Read the desired workers(if any) that is passed in and generate shared homeserver, nginx and supervisord configs. @@ -764,7 +761,7 @@ def generate_worker_files( # First read the original config file and extract the listeners block. Then we'll # add another listener for replication. Later we'll write out the result to the # shared config file. - listeners: List[Any] + listeners: list[Any] if using_unix_sockets: listeners = [ { @@ -792,12 +789,12 @@ def generate_worker_files( # base shared worker jinja2 template. This config file will be passed to all # workers, included Synapse's main process. It is intended mainly for disabling # functionality when certain workers are spun up, and adding a replication listener. - shared_config: Dict[str, Any] = {"listeners": listeners} + shared_config: dict[str, Any] = {"listeners": listeners} # List of dicts that describe workers. # We pass this to the Supervisor template later to generate the appropriate # program blocks. - worker_descriptors: List[Dict[str, Any]] = [] + worker_descriptors: list[dict[str, Any]] = [] # Upstreams for load-balancing purposes. This dict takes the form of the worker # type to the ports of each worker. For example: @@ -805,14 +802,14 @@ def generate_worker_files( # worker_type: {1234, 1235, ...}} # } # and will be used to construct 'upstream' nginx directives. - nginx_upstreams: Dict[str, Set[int]] = {} + nginx_upstreams: dict[str, set[int]] = {} # A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what # will be placed after the proxy_pass directive. The main benefit to representing # this data as a dict over a str is that we can easily deduplicate endpoints # across multiple instances of the same worker. The final rendering will be combined # with nginx_upstreams and placed in /etc/nginx/conf.d. - nginx_locations: Dict[str, str] = {} + nginx_locations: dict[str, str] = {} # Create the worker configuration directory if it doesn't already exist os.makedirs("/conf/workers", exist_ok=True) @@ -846,7 +843,7 @@ def generate_worker_files( # yaml config file for worker_name, worker_types_set in requested_worker_types.items(): # The collected and processed data will live here. - worker_config: Dict[str, Any] = {} + worker_config: dict[str, Any] = {} # Merge all worker config templates for this worker into a single config for worker_type in worker_types_set: @@ -1029,7 +1026,7 @@ def generate_worker_log_config( Returns: the path to the generated file """ # Check whether we should write worker logs to disk, in addition to the console - extra_log_template_args: Dict[str, Optional[str]] = {} + extra_log_template_args: dict[str, Optional[str]] = {} if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"): extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log" @@ -1053,7 +1050,7 @@ def generate_worker_log_config( return log_config_filepath -def main(args: List[str], environ: MutableMapping[str, str]) -> None: +def main(args: list[str], environ: MutableMapping[str, str]) -> None: parser = ArgumentParser() parser.add_argument( "--generate-only", @@ -1087,7 +1084,7 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None: if not worker_types_env: # No workers, just the main process worker_types = [] - requested_worker_types: Dict[str, Any] = {} + requested_worker_types: dict[str, Any] = {} else: # Split type names by comma, ignoring whitespace. worker_types = split_and_strip_string(worker_types_env, ",") diff --git a/docker/start.py b/docker/start.py index 0be9976a0c..daa041d463 100755 --- a/docker/start.py +++ b/docker/start.py @@ -6,7 +6,7 @@ import platform import subprocess import sys -from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional +from typing import Any, Mapping, MutableMapping, NoReturn, Optional import jinja2 @@ -69,7 +69,7 @@ def generate_config_from_template( ) # populate some params from data files (if they exist, else create new ones) - environ: Dict[str, Any] = dict(os_environ) + environ: dict[str, Any] = dict(os_environ) secrets = { "registration": "SYNAPSE_REGISTRATION_SHARED_SECRET", "macaroon": "SYNAPSE_MACAROON_SECRET_KEY", @@ -200,7 +200,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> subprocess.run(args, check=True) -def main(args: List[str], environ: MutableMapping[str, str]) -> None: +def main(args: list[str], environ: MutableMapping[str, str]) -> None: mode = args[1] if len(args) > 1 else "run" # if we were given an explicit user to switch to, do so diff --git a/pyproject.toml b/pyproject.toml index ee7016b1d0..b0cb355c52 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,6 +78,12 @@ select = [ "LOG", # flake8-logging-format "G", + # pyupgrade + "UP006", +] +extend-safe-fixes = [ + # pyupgrade + "UP006" ] [tool.ruff.lint.isort] diff --git a/scripts-dev/build_debian_packages.py b/scripts-dev/build_debian_packages.py index 6150dc15a3..f94c5a37fc 100755 --- a/scripts-dev/build_debian_packages.py +++ b/scripts-dev/build_debian_packages.py @@ -18,7 +18,7 @@ import threading from concurrent.futures import ThreadPoolExecutor from types import FrameType -from typing import Collection, Optional, Sequence, Set +from typing import Collection, Optional, Sequence # These are expanded inside the dockerfile to be a fully qualified image name. # e.g. docker.io/library/debian:bullseye @@ -54,7 +54,7 @@ def __init__( ): self.redirect_stdout = redirect_stdout self._docker_build_args = tuple(docker_build_args or ()) - self.active_containers: Set[str] = set() + self.active_containers: set[str] = set() self._lock = threading.Lock() self._failed = False diff --git a/scripts-dev/check_locked_deps_have_sdists.py b/scripts-dev/check_locked_deps_have_sdists.py index cabe3b8de1..f035ecb644 100755 --- a/scripts-dev/check_locked_deps_have_sdists.py +++ b/scripts-dev/check_locked_deps_have_sdists.py @@ -21,7 +21,6 @@ # import sys from pathlib import Path -from typing import Dict, List import tomli @@ -33,7 +32,7 @@ def main() -> None: # Poetry 1.3+ lockfile format: # There's a `files` inline table in each [[package]] - packages_to_assets: Dict[str, List[Dict[str, str]]] = { + packages_to_assets: dict[str, list[dict[str, str]]] = { package["name"]: package["files"] for package in lockfile_content["package"] } diff --git a/scripts-dev/check_pydantic_models.py b/scripts-dev/check_pydantic_models.py index 26a473a61b..69c49e258d 100755 --- a/scripts-dev/check_pydantic_models.py +++ b/scripts-dev/check_pydantic_models.py @@ -47,11 +47,7 @@ from typing import ( Any, Callable, - Dict, Generator, - List, - Set, - Type, TypeVar, ) @@ -69,7 +65,7 @@ logger = logging.getLogger(__name__) -CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: List[Callable] = [ +CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: list[Callable] = [ constr, conbytes, conint, @@ -145,7 +141,7 @@ class PatchedBaseModel(PydanticBaseModel): """ @classmethod - def __init_subclass__(cls: Type[PydanticBaseModel], **kwargs: object): + def __init_subclass__(cls: type[PydanticBaseModel], **kwargs: object): for field in cls.__fields__.values(): # Note that field.type_ and field.outer_type are computed based on the # annotation type, see pydantic.fields.ModelField._type_analysis @@ -212,7 +208,7 @@ def lint() -> int: return os.EX_DATAERR if failures else os.EX_OK -def do_lint() -> Set[str]: +def do_lint() -> set[str]: """Try to import all of Synapse and see if we spot any Pydantic type coercions.""" failures = set() @@ -258,8 +254,8 @@ def run_test_snippet(source: str) -> None: # > Remember that at the module level, globals and locals are the same dictionary. # > If exec gets two separate objects as globals and locals, the code will be # > executed as if it were embedded in a class definition. - globals_: Dict[str, object] - locals_: Dict[str, object] + globals_: dict[str, object] + locals_: dict[str, object] globals_ = locals_ = {} exec(textwrap.dedent(source), globals_, locals_) @@ -394,10 +390,10 @@ class TestFieldTypeInspection(unittest.TestCase): ("bool"), ("Optional[str]",), ("Union[None, str]",), - ("List[str]",), - ("List[List[str]]",), - ("Dict[StrictStr, str]",), - ("Dict[str, StrictStr]",), + ("list[str]",), + ("list[list[str]]",), + ("dict[StrictStr, str]",), + ("dict[str, StrictStr]",), ("TypedDict('D', x=int)",), ] ) @@ -425,9 +421,9 @@ class C(BaseModel): ("constr(strict=True, min_length=10)",), ("Optional[StrictStr]",), ("Union[None, StrictStr]",), - ("List[StrictStr]",), - ("List[List[StrictStr]]",), - ("Dict[StrictStr, StrictStr]",), + ("list[StrictStr]",), + ("list[list[StrictStr]]",), + ("dict[StrictStr, StrictStr]",), ("TypedDict('D', x=StrictInt)",), ] ) diff --git a/scripts-dev/check_schema_delta.py b/scripts-dev/check_schema_delta.py index 454784c3ae..7b2dec25d4 100755 --- a/scripts-dev/check_schema_delta.py +++ b/scripts-dev/check_schema_delta.py @@ -5,7 +5,7 @@ # Also checks that schema deltas do not try and create or drop indices. import re -from typing import Any, Dict, List +from typing import Any import click import git @@ -48,16 +48,16 @@ def main(force_colors: bool) -> None: r = repo.git.show(f"origin/{DEVELOP_BRANCH}:synapse/storage/schema/__init__.py") - locals: Dict[str, Any] = {} + locals: dict[str, Any] = {} exec(r, locals) current_schema_version = locals["SCHEMA_VERSION"] - diffs: List[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None) + diffs: list[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None) # Get the schema version of the local file to check against current schema on develop with open("synapse/storage/schema/__init__.py") as file: local_schema = file.read() - new_locals: Dict[str, Any] = {} + new_locals: dict[str, Any] = {} exec(local_schema, new_locals) local_schema_version = new_locals["SCHEMA_VERSION"] diff --git a/scripts-dev/federation_client.py b/scripts-dev/federation_client.py index fb879ef555..db8655c1ce 100755 --- a/scripts-dev/federation_client.py +++ b/scripts-dev/federation_client.py @@ -43,7 +43,7 @@ import base64 import json import sys -from typing import Any, Dict, Mapping, Optional, Tuple, Union +from typing import Any, Mapping, Optional, Union from urllib import parse as urlparse import requests @@ -147,7 +147,7 @@ def request( s = requests.Session() s.mount("matrix-federation://", MatrixConnectionAdapter()) - headers: Dict[str, str] = { + headers: dict[str, str] = { "Authorization": authorization_headers[0], } @@ -303,7 +303,7 @@ def get_connection_with_tls_context( request: PreparedRequest, verify: Optional[Union[bool, str]], proxies: Optional[Mapping[str, str]] = None, - cert: Optional[Union[Tuple[str, str], str]] = None, + cert: Optional[Union[tuple[str, str], str]] = None, ) -> HTTPConnectionPool: # overrides the get_connection_with_tls_context() method in the base class parsed = urlparse.urlsplit(request.url) @@ -326,7 +326,7 @@ def get_connection_with_tls_context( ) @staticmethod - def _lookup(server_name: str) -> Tuple[str, int, str]: + def _lookup(server_name: str) -> tuple[str, int, str]: """ Do an SRV lookup on a server name and return the host:port to connect to Given the server_name (after any .well-known lookup), return the host, port and diff --git a/scripts-dev/mypy_synapse_plugin.py b/scripts-dev/mypy_synapse_plugin.py index 0b854cdba5..830c4ac4ab 100644 --- a/scripts-dev/mypy_synapse_plugin.py +++ b/scripts-dev/mypy_synapse_plugin.py @@ -24,7 +24,7 @@ """ import enum -from typing import Callable, Mapping, Optional, Tuple, Type, Union +from typing import Callable, Mapping, Optional, Union import attr import mypy.types @@ -184,8 +184,8 @@ class ArgLocation: # Unbound at this point because we don't know the mypy version yet. # This is set in the `plugin(...)` function below. -MypyPydanticPluginClass: Type[Plugin] -MypyZopePluginClass: Type[Plugin] +MypyPydanticPluginClass: type[Plugin] +MypyZopePluginClass: type[Plugin] class SynapsePlugin(Plugin): @@ -795,7 +795,7 @@ def check_is_cacheable( def is_cacheable( rt: mypy.types.Type, signature: CallableType, verbose: bool -) -> Tuple[bool, Optional[str]]: +) -> tuple[bool, Optional[str]]: """ Check if a particular type is cachable. @@ -905,7 +905,7 @@ def is_cacheable( return False, f"Don't know how to handle {type(rt).__qualname__} return type" -def plugin(version: str) -> Type[SynapsePlugin]: +def plugin(version: str) -> type[SynapsePlugin]: global MypyPydanticPluginClass, MypyZopePluginClass # This is the entry point of the plugin, and lets us deal with the fact # that the mypy plugin interface is *not* stable by looking at the version diff --git a/scripts-dev/release.py b/scripts-dev/release.py index c5c72156cc..16f1fc5f2a 100755 --- a/scripts-dev/release.py +++ b/scripts-dev/release.py @@ -32,7 +32,7 @@ import urllib.request from os import path from tempfile import TemporaryDirectory -from typing import Any, List, Match, Optional, Union +from typing import Any, Match, Optional, Union import attr import click @@ -884,7 +884,7 @@ class VersionSection: start_line: int end_line: Optional[int] = None # Is none if its the last entry - headings: List[VersionSection] = [] + headings: list[VersionSection] = [] for i, token in enumerate(tokens): # We look for level 1 headings (h1 tags). if token.type != "heading_open" or token.tag != "h1": diff --git a/scripts-dev/schema_versions.py b/scripts-dev/schema_versions.py index 5a79a43355..cec58e177f 100755 --- a/scripts-dev/schema_versions.py +++ b/scripts-dev/schema_versions.py @@ -38,7 +38,7 @@ import json import sys from collections import defaultdict -from typing import Any, Dict, Iterator, Optional, Tuple +from typing import Any, Iterator, Optional import git from packaging import version @@ -57,7 +57,7 @@ OLDEST_SHOWN_VERSION = version.parse("v1.0") -def get_schema_versions(tag: git.Tag) -> Tuple[Optional[int], Optional[int]]: +def get_schema_versions(tag: git.Tag) -> tuple[Optional[int], Optional[int]]: """Get the schema and schema compat versions for a tag.""" schema_version = None schema_compat_version = None @@ -81,7 +81,7 @@ def get_schema_versions(tag: git.Tag) -> Tuple[Optional[int], Optional[int]]: # SCHEMA_COMPAT_VERSION is sometimes across multiple lines, the easist # thing to do is exec the code. Luckily it has only ever existed in # a file which imports nothing else from Synapse. - locals: Dict[str, Any] = {} + locals: dict[str, Any] = {} exec(schema_file.data_stream.read().decode("utf-8"), {}, locals) schema_version = locals["SCHEMA_VERSION"] schema_compat_version = locals.get("SCHEMA_COMPAT_VERSION") diff --git a/stubs/sortedcontainers/sorteddict.pyi b/stubs/sortedcontainers/sorteddict.pyi index 81f581b034..a0be3e6349 100644 --- a/stubs/sortedcontainers/sorteddict.pyi +++ b/stubs/sortedcontainers/sorteddict.pyi @@ -7,18 +7,14 @@ from __future__ import annotations from typing import ( Any, Callable, - Dict, Hashable, ItemsView, Iterable, Iterator, KeysView, - List, Mapping, Optional, Sequence, - Tuple, - Type, TypeVar, Union, ValuesView, @@ -35,14 +31,14 @@ _VT_co = TypeVar("_VT_co", covariant=True) _SD = TypeVar("_SD", bound=SortedDict) _Key = Callable[[_T], Any] -class SortedDict(Dict[_KT, _VT]): +class SortedDict(dict[_KT, _VT]): @overload def __init__(self, **kwargs: _VT) -> None: ... @overload def __init__(self, __map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def __init__( - self, __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT + self, __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT ) -> None: ... @overload def __init__(self, __key: _Key[_KT], **kwargs: _VT) -> None: ... @@ -52,7 +48,7 @@ class SortedDict(Dict[_KT, _VT]): ) -> None: ... @overload def __init__( - self, __key: _Key[_KT], __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT + self, __key: _Key[_KT], __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT ) -> None: ... @property def key(self) -> Optional[_Key[_KT]]: ... @@ -84,8 +80,8 @@ class SortedDict(Dict[_KT, _VT]): def pop(self, key: _KT) -> _VT: ... @overload def pop(self, key: _KT, default: _T = ...) -> Union[_VT, _T]: ... - def popitem(self, index: int = ...) -> Tuple[_KT, _VT]: ... - def peekitem(self, index: int = ...) -> Tuple[_KT, _VT]: ... + def popitem(self, index: int = ...) -> tuple[_KT, _VT]: ... + def peekitem(self, index: int = ...) -> tuple[_KT, _VT]: ... def setdefault(self, key: _KT, default: Optional[_VT] = ...) -> _VT: ... # Mypy now reports the first overload as an error, because typeshed widened the type # of `__map` to its internal `_typeshed.SupportsKeysAndGetItem` type in @@ -102,9 +98,9 @@ class SortedDict(Dict[_KT, _VT]): # def update(self, **kwargs: _VT) -> None: ... def __reduce__( self, - ) -> Tuple[ - Type[SortedDict[_KT, _VT]], - Tuple[Callable[[_KT], Any], List[Tuple[_KT, _VT]]], + ) -> tuple[ + type[SortedDict[_KT, _VT]], + tuple[Callable[[_KT], Any], list[tuple[_KT, _VT]]], ]: ... def __repr__(self) -> str: ... def _check(self) -> None: ... @@ -121,20 +117,20 @@ class SortedKeysView(KeysView[_KT_co], Sequence[_KT_co]): @overload def __getitem__(self, index: int) -> _KT_co: ... @overload - def __getitem__(self, index: slice) -> List[_KT_co]: ... + def __getitem__(self, index: slice) -> list[_KT_co]: ... def __delitem__(self, index: Union[int, slice]) -> None: ... -class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]): - def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ... +class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[tuple[_KT_co, _VT_co]]): + def __iter__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... @overload - def __getitem__(self, index: int) -> Tuple[_KT_co, _VT_co]: ... + def __getitem__(self, index: int) -> tuple[_KT_co, _VT_co]: ... @overload - def __getitem__(self, index: slice) -> List[Tuple[_KT_co, _VT_co]]: ... + def __getitem__(self, index: slice) -> list[tuple[_KT_co, _VT_co]]: ... def __delitem__(self, index: Union[int, slice]) -> None: ... class SortedValuesView(ValuesView[_VT_co], Sequence[_VT_co]): @overload def __getitem__(self, index: int) -> _VT_co: ... @overload - def __getitem__(self, index: slice) -> List[_VT_co]: ... + def __getitem__(self, index: slice) -> list[_VT_co]: ... def __delitem__(self, index: Union[int, slice]) -> None: ... diff --git a/stubs/sortedcontainers/sortedlist.pyi b/stubs/sortedcontainers/sortedlist.pyi index 0e745c0a79..25ceb74cc9 100644 --- a/stubs/sortedcontainers/sortedlist.pyi +++ b/stubs/sortedcontainers/sortedlist.pyi @@ -9,12 +9,9 @@ from typing import ( Callable, Iterable, Iterator, - List, MutableSequence, Optional, Sequence, - Tuple, - Type, TypeVar, Union, overload, @@ -37,11 +34,11 @@ class SortedList(MutableSequence[_T]): ): ... # NB: currently mypy does not honour return type, see mypy #3307 @overload - def __new__(cls: Type[_SL], iterable: None, key: None) -> _SL: ... + def __new__(cls: type[_SL], iterable: None, key: None) -> _SL: ... @overload - def __new__(cls: Type[_SL], iterable: None, key: _Key[_T]) -> SortedKeyList[_T]: ... + def __new__(cls: type[_SL], iterable: None, key: _Key[_T]) -> SortedKeyList[_T]: ... @overload - def __new__(cls: Type[_SL], iterable: Iterable[_T], key: None) -> _SL: ... + def __new__(cls: type[_SL], iterable: Iterable[_T], key: None) -> _SL: ... @overload def __new__(cls, iterable: Iterable[_T], key: _Key[_T]) -> SortedKeyList[_T]: ... @property @@ -64,11 +61,11 @@ class SortedList(MutableSequence[_T]): @overload def __getitem__(self, index: int) -> _T: ... @overload - def __getitem__(self, index: slice) -> List[_T]: ... + def __getitem__(self, index: slice) -> list[_T]: ... @overload def _getitem(self, index: int) -> _T: ... @overload - def _getitem(self, index: slice) -> List[_T]: ... + def _getitem(self, index: slice) -> list[_T]: ... @overload def __setitem__(self, index: int, value: _T) -> None: ... @overload @@ -95,7 +92,7 @@ class SortedList(MutableSequence[_T]): self, minimum: Optional[int] = ..., maximum: Optional[int] = ..., - inclusive: Tuple[bool, bool] = ..., + inclusive: tuple[bool, bool] = ..., reverse: bool = ..., ) -> Iterator[_T]: ... def bisect_left(self, value: _T) -> int: ... @@ -151,14 +148,14 @@ class SortedKeyList(SortedList[_T]): self, minimum: Optional[int] = ..., maximum: Optional[int] = ..., - inclusive: Tuple[bool, bool] = ..., + inclusive: tuple[bool, bool] = ..., reverse: bool = ..., ) -> Iterator[_T]: ... def irange_key( self, min_key: Optional[Any] = ..., max_key: Optional[Any] = ..., - inclusive: Tuple[bool, bool] = ..., + inclusive: tuple[bool, bool] = ..., reserve: bool = ..., ) -> Iterator[_T]: ... def bisect_left(self, value: _T) -> int: ... diff --git a/stubs/sortedcontainers/sortedset.pyi b/stubs/sortedcontainers/sortedset.pyi index 6db11eacbe..a3593ca579 100644 --- a/stubs/sortedcontainers/sortedset.pyi +++ b/stubs/sortedcontainers/sortedset.pyi @@ -10,13 +10,9 @@ from typing import ( Hashable, Iterable, Iterator, - List, MutableSet, Optional, Sequence, - Set, - Tuple, - Type, TypeVar, Union, overload, @@ -37,7 +33,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]): ) -> None: ... @classmethod def _fromset( - cls, values: Set[_T], key: Optional[_Key[_T]] = ... + cls, values: set[_T], key: Optional[_Key[_T]] = ... ) -> SortedSet[_T]: ... @property def key(self) -> Optional[_Key[_T]]: ... @@ -45,7 +41,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]): @overload def __getitem__(self, index: int) -> _T: ... @overload - def __getitem__(self, index: slice) -> List[_T]: ... + def __getitem__(self, index: slice) -> list[_T]: ... def __delitem__(self, index: Union[int, slice]) -> None: ... def __eq__(self, other: Any) -> bool: ... def __ne__(self, other: Any) -> bool: ... @@ -94,7 +90,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]): def _update(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ... def __reduce__( self, - ) -> Tuple[Type[SortedSet[_T]], Set[_T], Callable[[_T], Any]]: ... + ) -> tuple[type[SortedSet[_T]], set[_T], Callable[[_T], Any]]: ... def __repr__(self) -> str: ... def _check(self) -> None: ... def bisect_left(self, value: _T) -> int: ... @@ -109,7 +105,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]): self, minimum: Optional[_T] = ..., maximum: Optional[_T] = ..., - inclusive: Tuple[bool, bool] = ..., + inclusive: tuple[bool, bool] = ..., reverse: bool = ..., ) -> Iterator[_T]: ... def index( diff --git a/stubs/txredisapi.pyi b/stubs/txredisapi.pyi index c9a4114b1e..d2539aa37d 100644 --- a/stubs/txredisapi.pyi +++ b/stubs/txredisapi.pyi @@ -15,7 +15,7 @@ """Contains *incomplete* type hints for txredisapi.""" -from typing import Any, List, Optional, Type, Union +from typing import Any, Optional, Union from twisted.internet import protocol from twisted.internet.defer import Deferred @@ -39,7 +39,7 @@ class RedisProtocol(protocol.Protocol): class SubscriberProtocol(RedisProtocol): def __init__(self, *args: object, **kwargs: object): ... password: Optional[str] - def subscribe(self, channels: Union[str, List[str]]) -> "Deferred[None]": ... + def subscribe(self, channels: Union[str, list[str]]) -> "Deferred[None]": ... def connectionMade(self) -> None: ... # type-ignore: twisted.internet.protocol.Protocol provides a default argument for # `reason`. txredisapi's LineReceiver Protocol doesn't. But that's fine: it's what's @@ -69,7 +69,7 @@ class UnixConnectionHandler(ConnectionHandler): ... class RedisFactory(protocol.ReconnectingClientFactory): continueTrying: bool handler: ConnectionHandler - pool: List[RedisProtocol] + pool: list[RedisProtocol] replyTimeout: Optional[int] def __init__( self, @@ -77,7 +77,7 @@ class RedisFactory(protocol.ReconnectingClientFactory): dbid: Optional[int], poolsize: int, isLazy: bool = False, - handler: Type = ConnectionHandler, + handler: type = ConnectionHandler, charset: str = "utf-8", password: Optional[str] = None, replyTimeout: Optional[int] = None, diff --git a/synapse/__init__.py b/synapse/__init__.py index 3bd1b3307e..d1c306b8f3 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -24,7 +24,7 @@ import os import sys -from typing import Any, Dict +from typing import Any from PIL import ImageFile @@ -70,7 +70,7 @@ from canonicaljson import register_preserialisation_callback from immutabledict import immutabledict - def _immutabledict_cb(d: immutabledict) -> Dict[str, Any]: + def _immutabledict_cb(d: immutabledict) -> dict[str, Any]: try: return d._dict except Exception: diff --git a/synapse/_scripts/generate_workers_map.py b/synapse/_scripts/generate_workers_map.py index f66c01040c..e669f6902d 100755 --- a/synapse/_scripts/generate_workers_map.py +++ b/synapse/_scripts/generate_workers_map.py @@ -25,7 +25,7 @@ import re from collections import defaultdict from dataclasses import dataclass -from typing import Dict, Iterable, Optional, Pattern, Set, Tuple +from typing import Iterable, Optional, Pattern import yaml @@ -81,7 +81,7 @@ class EnumerationResource(HttpServer): """ def __init__(self, is_worker: bool) -> None: - self.registrations: Dict[Tuple[str, str], EndpointDescription] = {} + self.registrations: dict[tuple[str, str], EndpointDescription] = {} self._is_worker = is_worker def register_paths( @@ -115,7 +115,7 @@ def register_paths( def get_registered_paths_for_hs( hs: HomeServer, -) -> Dict[Tuple[str, str], EndpointDescription]: +) -> dict[tuple[str, str], EndpointDescription]: """ Given a homeserver, get all registered endpoints and their descriptions. """ @@ -142,7 +142,7 @@ def get_registered_paths_for_hs( def get_registered_paths_for_default( worker_app: Optional[str], base_config: HomeServerConfig -) -> Dict[Tuple[str, str], EndpointDescription]: +) -> dict[tuple[str, str], EndpointDescription]: """ Given the name of a worker application and a base homeserver configuration, returns: @@ -168,9 +168,9 @@ def get_registered_paths_for_default( def elide_http_methods_if_unconflicting( - registrations: Dict[Tuple[str, str], EndpointDescription], - all_possible_registrations: Dict[Tuple[str, str], EndpointDescription], -) -> Dict[Tuple[str, str], EndpointDescription]: + registrations: dict[tuple[str, str], EndpointDescription], + all_possible_registrations: dict[tuple[str, str], EndpointDescription], +) -> dict[tuple[str, str], EndpointDescription]: """ Elides HTTP methods (by replacing them with `*`) if all possible registered methods can be handled by the worker whose registration map is `registrations`. @@ -180,13 +180,13 @@ def elide_http_methods_if_unconflicting( """ def paths_to_methods_dict( - methods_and_paths: Iterable[Tuple[str, str]], - ) -> Dict[str, Set[str]]: + methods_and_paths: Iterable[tuple[str, str]], + ) -> dict[str, set[str]]: """ Given (method, path) pairs, produces a dict from path to set of methods available at that path. """ - result: Dict[str, Set[str]] = {} + result: dict[str, set[str]] = {} for method, path in methods_and_paths: result.setdefault(path, set()).add(method) return result @@ -210,8 +210,8 @@ def paths_to_methods_dict( def simplify_path_regexes( - registrations: Dict[Tuple[str, str], EndpointDescription], -) -> Dict[Tuple[str, str], EndpointDescription]: + registrations: dict[tuple[str, str], EndpointDescription], +) -> dict[tuple[str, str], EndpointDescription]: """ Simplify all the path regexes for the dict of endpoint descriptions, so that we don't use the Python-specific regex extensions @@ -270,8 +270,8 @@ def main() -> None: # TODO SSO endpoints (pick_idp etc) NOT REGISTERED BY THIS SCRIPT - categories_to_methods_and_paths: Dict[ - Optional[str], Dict[Tuple[str, str], EndpointDescription] + categories_to_methods_and_paths: dict[ + Optional[str], dict[tuple[str, str], EndpointDescription] ] = defaultdict(dict) for (method, path), desc in elided_worker_paths.items(): @@ -283,7 +283,7 @@ def main() -> None: def print_category( category_name: Optional[str], - elided_worker_paths: Dict[Tuple[str, str], EndpointDescription], + elided_worker_paths: dict[tuple[str, str], EndpointDescription], ) -> None: """ Prints out a category, in documentation page style. diff --git a/synapse/_scripts/register_new_matrix_user.py b/synapse/_scripts/register_new_matrix_user.py index 4897fa94b0..3fe2f33e52 100644 --- a/synapse/_scripts/register_new_matrix_user.py +++ b/synapse/_scripts/register_new_matrix_user.py @@ -26,7 +26,7 @@ import hmac import logging import sys -from typing import Any, Callable, Dict, Optional +from typing import Any, Callable, Optional import requests import yaml @@ -262,7 +262,7 @@ def main() -> None: args = parser.parse_args() - config: Optional[Dict[str, Any]] = None + config: Optional[dict[str, Any]] = None if "config" in args and args.config: config = yaml.safe_load(args.config) @@ -350,7 +350,7 @@ def _read_file(file_path: Any, config_path: str) -> str: sys.exit(1) -def _find_client_listener(config: Dict[str, Any]) -> Optional[str]: +def _find_client_listener(config: dict[str, Any]) -> Optional[str]: # try to find a listener in the config. Returns a host:port pair for listener in config.get("listeners", []): if listener.get("type") != "http" or listener.get("tls", False): diff --git a/synapse/_scripts/review_recent_signups.py b/synapse/_scripts/review_recent_signups.py index 0ff7fae567..d760a84bf2 100644 --- a/synapse/_scripts/review_recent_signups.py +++ b/synapse/_scripts/review_recent_signups.py @@ -23,7 +23,6 @@ import sys import time from datetime import datetime -from typing import List import attr @@ -50,15 +49,15 @@ class ReviewConfig(RootConfig): class UserInfo: user_id: str creation_ts: int - emails: List[str] = attr.Factory(list) - private_rooms: List[str] = attr.Factory(list) - public_rooms: List[str] = attr.Factory(list) - ips: List[str] = attr.Factory(list) + emails: list[str] = attr.Factory(list) + private_rooms: list[str] = attr.Factory(list) + public_rooms: list[str] = attr.Factory(list) + ips: list[str] = attr.Factory(list) def get_recent_users( txn: LoggingTransaction, since_ms: int, exclude_app_service: bool -) -> List[UserInfo]: +) -> list[UserInfo]: """Fetches recently registered users and some info on them.""" sql = """ diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index 3c79919fea..e83c0de5a4 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -33,15 +33,10 @@ Any, Awaitable, Callable, - Dict, Generator, Iterable, - List, NoReturn, Optional, - Set, - Tuple, - Type, TypedDict, TypeVar, cast, @@ -244,7 +239,7 @@ # not the error then the script will show nothing outside of what's printed in the run # function. If both are defined, the script will print both the error and the stacktrace. end_error_exec_info: Optional[ - Tuple[Type[BaseException], BaseException, TracebackType] + tuple[type[BaseException], BaseException, TracebackType] ] = None R = TypeVar("R") @@ -281,8 +276,8 @@ class Store( def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]: return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs) - def execute_sql(self, sql: str, *args: object) -> Awaitable[List[Tuple]]: - def r(txn: LoggingTransaction) -> List[Tuple]: + def execute_sql(self, sql: str, *args: object) -> Awaitable[list[tuple]]: + def r(txn: LoggingTransaction) -> list[tuple]: txn.execute(sql, args) return txn.fetchall() @@ -292,8 +287,8 @@ def insert_many_txn( self, txn: LoggingTransaction, table: str, - headers: List[str], - rows: List[Tuple], + headers: list[str], + rows: list[tuple], override_system_value: bool = False, ) -> None: sql = "INSERT INTO %s (%s) %s VALUES (%s)" % ( @@ -330,7 +325,7 @@ def __init__(self, config: HomeServerConfig): class Porter: def __init__( self, - sqlite_config: Dict[str, Any], + sqlite_config: dict[str, Any], progress: "Progress", batch_size: int, hs: HomeServer, @@ -340,7 +335,7 @@ def __init__( self.batch_size = batch_size self.hs = hs - async def setup_table(self, table: str) -> Tuple[str, int, int, int, int]: + async def setup_table(self, table: str) -> tuple[str, int, int, int, int]: if table in APPEND_ONLY_TABLES: # It's safe to just carry on inserting. row = await self.postgres_store.db_pool.simple_select_one( @@ -403,10 +398,10 @@ def delete_all(txn: LoggingTransaction) -> None: return table, already_ported, total_to_port, forward_chunk, backward_chunk - async def get_table_constraints(self) -> Dict[str, Set[str]]: + async def get_table_constraints(self) -> dict[str, set[str]]: """Returns a map of tables that have foreign key constraints to tables they depend on.""" - def _get_constraints(txn: LoggingTransaction) -> Dict[str, Set[str]]: + def _get_constraints(txn: LoggingTransaction) -> dict[str, set[str]]: # We can pull the information about foreign key constraints out from # the postgres schema tables. sql = """ @@ -422,7 +417,7 @@ def _get_constraints(txn: LoggingTransaction) -> Dict[str, Set[str]]: """ txn.execute(sql) - results: Dict[str, Set[str]] = {} + results: dict[str, set[str]] = {} for table, foreign_table in txn: results.setdefault(table, set()).add(foreign_table) return results @@ -490,7 +485,7 @@ async def handle_table( def r( txn: LoggingTransaction, - ) -> Tuple[Optional[List[str]], List[Tuple], List[Tuple]]: + ) -> tuple[Optional[list[str]], list[tuple], list[tuple]]: forward_rows = [] backward_rows = [] if do_forward[0]: @@ -507,7 +502,7 @@ def r( if forward_rows or backward_rows: assert txn.description is not None - headers: Optional[List[str]] = [ + headers: Optional[list[str]] = [ column[0] for column in txn.description ] else: @@ -574,7 +569,7 @@ async def handle_search_table( while True: - def r(txn: LoggingTransaction) -> Tuple[List[str], List[Tuple]]: + def r(txn: LoggingTransaction) -> tuple[list[str], list[tuple]]: txn.execute(select, (forward_chunk, self.batch_size)) rows = txn.fetchall() assert txn.description is not None @@ -956,7 +951,7 @@ def alter_table(txn: LoggingTransaction) -> None: self.progress.set_state("Copying to postgres") constraints = await self.get_table_constraints() - tables_ported = set() # type: Set[str] + tables_ported = set() # type: set[str] while tables_to_port_info_map: # Pulls out all tables that are still to be ported and which @@ -995,8 +990,8 @@ def alter_table(txn: LoggingTransaction) -> None: reactor.stop() def _convert_rows( - self, table: str, headers: List[str], rows: List[Tuple] - ) -> List[Tuple]: + self, table: str, headers: list[str], rows: list[tuple] + ) -> list[tuple]: bool_col_names = BOOLEAN_COLUMNS.get(table, []) bool_cols = [i for i, h in enumerate(headers) if h in bool_col_names] @@ -1030,7 +1025,7 @@ def conv(j: int, col: object) -> object: return outrows - async def _setup_sent_transactions(self) -> Tuple[int, int, int]: + async def _setup_sent_transactions(self) -> tuple[int, int, int]: # Only save things from the last day yesterday = int(time.time() * 1000) - 86400000 @@ -1042,7 +1037,7 @@ async def _setup_sent_transactions(self) -> Tuple[int, int, int]: ")" ) - def r(txn: LoggingTransaction) -> Tuple[List[str], List[Tuple]]: + def r(txn: LoggingTransaction) -> tuple[list[str], list[tuple]]: txn.execute(select) rows = txn.fetchall() assert txn.description is not None @@ -1112,14 +1107,14 @@ async def _get_remaining_count_to_port( self, table: str, forward_chunk: int, backward_chunk: int ) -> int: frows = cast( - List[Tuple[int]], + list[tuple[int]], await self.sqlite_store.execute_sql( "SELECT count(*) FROM %s WHERE rowid >= ?" % (table,), forward_chunk ), ) brows = cast( - List[Tuple[int]], + list[tuple[int]], await self.sqlite_store.execute_sql( "SELECT count(*) FROM %s WHERE rowid <= ?" % (table,), backward_chunk ), @@ -1136,7 +1131,7 @@ async def _get_already_ported_count(self, table: str) -> int: async def _get_total_count_to_port( self, table: str, forward_chunk: int, backward_chunk: int - ) -> Tuple[int, int]: + ) -> tuple[int, int]: remaining, done = await make_deferred_yieldable( defer.gatherResults( [ @@ -1221,7 +1216,7 @@ def _setup_events_stream_seqs_set_pos(txn: LoggingTransaction) -> None: async def _setup_sequence( self, sequence_name: str, - stream_id_tables: Iterable[Tuple[str, str]], + stream_id_tables: Iterable[tuple[str, str]], ) -> None: """Set a sequence to the correct value.""" current_stream_ids = [] @@ -1331,7 +1326,7 @@ class Progress: """Used to report progress of the port""" def __init__(self) -> None: - self.tables: Dict[str, TableProgress] = {} + self.tables: dict[str, TableProgress] = {} self.start_time = int(time.time()) diff --git a/synapse/api/auth/__init__.py b/synapse/api/auth/__init__.py index d253938329..cc0c0d4601 100644 --- a/synapse/api/auth/__init__.py +++ b/synapse/api/auth/__init__.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import TYPE_CHECKING, Optional, Protocol, Tuple +from typing import TYPE_CHECKING, Optional, Protocol from prometheus_client import Histogram @@ -51,7 +51,7 @@ async def check_user_in_room( room_id: str, requester: Requester, allow_departed_users: bool = False, - ) -> Tuple[str, Optional[str]]: + ) -> tuple[str, Optional[str]]: """Check if the user is in the room, or was at some point. Args: room_id: The room to check. @@ -190,7 +190,7 @@ def get_access_token_from_request(request: Request) -> str: async def check_user_in_room_or_world_readable( self, room_id: str, requester: Requester, allow_departed_users: bool = False - ) -> Tuple[str, Optional[str]]: + ) -> tuple[str, Optional[str]]: """Checks that the user is or was in the room or the room is world readable. If it isn't then an exception is raised. diff --git a/synapse/api/auth/base.py b/synapse/api/auth/base.py index fd7d761f7d..d5635e588f 100644 --- a/synapse/api/auth/base.py +++ b/synapse/api/auth/base.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from netaddr import IPAddress @@ -64,7 +64,7 @@ async def check_user_in_room( room_id: str, requester: Requester, allow_departed_users: bool = False, - ) -> Tuple[str, Optional[str]]: + ) -> tuple[str, Optional[str]]: """Check if the user is in the room, or was at some point. Args: room_id: The room to check. @@ -114,7 +114,7 @@ async def check_user_in_room( @trace async def check_user_in_room_or_world_readable( self, room_id: str, requester: Requester, allow_departed_users: bool = False - ) -> Tuple[str, Optional[str]]: + ) -> tuple[str, Optional[str]]: """Checks that the user is or was in the room or the room is world readable. If it isn't then an exception is raised. diff --git a/synapse/api/auth/mas.py b/synapse/api/auth/mas.py index baa6b27336..325d264161 100644 --- a/synapse/api/auth/mas.py +++ b/synapse/api/auth/mas.py @@ -13,7 +13,7 @@ # # import logging -from typing import TYPE_CHECKING, Optional, Set +from typing import TYPE_CHECKING, Optional from urllib.parse import urlencode from synapse._pydantic_compat import ( @@ -369,7 +369,7 @@ async def get_user_by_access_token( # We only allow a single device_id in the scope, so we find them all in the # scope list, and raise if there are more than one. The OIDC server should be # the one enforcing valid scopes, so we raise a 500 if we find an invalid scope. - device_ids: Set[str] = set() + device_ids: set[str] = set() for tok in scope: if tok.startswith(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX): device_ids.add(tok[len(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX) :]) diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index b6adcc83dc..48b32aa04a 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -20,7 +20,7 @@ # import logging from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set +from typing import TYPE_CHECKING, Any, Callable, Optional from urllib.parse import urlencode from authlib.oauth2 import ClientAuth @@ -70,7 +70,7 @@ SCOPE_SYNAPSE_ADMIN = "urn:synapse:admin:*" -def scope_to_list(scope: str) -> List[str]: +def scope_to_list(scope: str) -> list[str]: """Convert a scope string to a list of scope tokens""" return scope.strip().split(" ") @@ -96,7 +96,7 @@ def is_active(self, now_ms: int) -> bool: absolute_expiry_ms = expires_in * 1000 + self.retrieved_at_ms return now_ms < absolute_expiry_ms - def get_scope_list(self) -> List[str]: + def get_scope_list(self) -> list[str]: value = self._inner.get("scope") if not isinstance(value, str): return [] @@ -264,7 +264,7 @@ async def account_management_url(self) -> Optional[str]: logger.warning("Failed to load metadata:", exc_info=True) return None - async def auth_metadata(self) -> Dict[str, Any]: + async def auth_metadata(self) -> dict[str, Any]: """ Returns the auth metadata dict """ @@ -303,7 +303,7 @@ async def _introspect_token( # By default, we shouldn't cache the result unless we know it's valid cache_context.should_cache = False introspection_endpoint = await self._introspection_endpoint() - raw_headers: Dict[str, str] = { + raw_headers: dict[str, str] = { "Content-Type": "application/x-www-form-urlencoded", "Accept": "application/json", # Tell MAS that we support reading the device ID as an explicit @@ -520,7 +520,7 @@ async def get_user_by_access_token( raise InvalidClientTokenError("Token is not active") # Let's look at the scope - scope: List[str] = introspection_result.get_scope_list() + scope: list[str] = introspection_result.get_scope_list() # Determine type of user based on presence of particular scopes has_user_scope = ( @@ -575,7 +575,7 @@ async def get_user_by_access_token( # We only allow a single device_id in the scope, so we find them all in the # scope list, and raise if there are more than one. The OIDC server should be # the one enforcing valid scopes, so we raise a 500 if we find an invalid scope. - device_ids: Set[str] = set() + device_ids: set[str] = set() for tok in scope: if tok.startswith(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX): device_ids.add(tok[len(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX) :]) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index fb6721c0ee..f75b34ef69 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -26,7 +26,7 @@ import typing from enum import Enum from http import HTTPStatus -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union from twisted.web import http @@ -166,7 +166,7 @@ def __init__( self, code: Union[int, HTTPStatus], msg: str, - headers: Optional[Dict[str, str]] = None, + headers: Optional[dict[str, str]] = None, ): super().__init__("%d: %s" % (code, msg)) @@ -201,7 +201,7 @@ def __init__(self, location: bytes, http_code: int = http.FOUND): super().__init__(code=http_code, msg=msg) self.location = location - self.cookies: List[bytes] = [] + self.cookies: list[bytes] = [] class SynapseError(CodeMessageException): @@ -223,8 +223,8 @@ def __init__( code: int, msg: str, errcode: str = Codes.UNKNOWN, - additional_fields: Optional[Dict] = None, - headers: Optional[Dict[str, str]] = None, + additional_fields: Optional[dict] = None, + headers: Optional[dict[str, str]] = None, ): """Constructs a synapse error. @@ -236,7 +236,7 @@ def __init__( super().__init__(code, msg, headers) self.errcode = errcode if additional_fields is None: - self._additional_fields: Dict = {} + self._additional_fields: dict = {} else: self._additional_fields = dict(additional_fields) @@ -276,7 +276,7 @@ def __init__( code: int, msg: str, errcode: str = Codes.UNKNOWN, - additional_fields: Optional[Dict] = None, + additional_fields: Optional[dict] = None, ): super().__init__(code, msg, errcode, additional_fields) @@ -409,7 +409,7 @@ class OAuthInsufficientScopeError(SynapseError): def __init__( self, - required_scopes: List[str], + required_scopes: list[str], ): headers = { "WWW-Authenticate": 'Bearer error="insufficient_scope", scope="%s"' diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index 34dd12368a..e31bec1a00 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -26,12 +26,9 @@ Awaitable, Callable, Collection, - Dict, Iterable, - List, Mapping, Optional, - Set, TypeVar, Union, ) @@ -248,34 +245,34 @@ def unread_thread_notifications(self) -> bool: async def filter_presence( self, presence_states: Iterable[UserPresenceState] - ) -> List[UserPresenceState]: + ) -> list[UserPresenceState]: return await self._presence_filter.filter(presence_states) async def filter_global_account_data( self, events: Iterable[JsonDict] - ) -> List[JsonDict]: + ) -> list[JsonDict]: return await self._global_account_data_filter.filter(events) - async def filter_room_state(self, events: Iterable[EventBase]) -> List[EventBase]: + async def filter_room_state(self, events: Iterable[EventBase]) -> list[EventBase]: return await self._room_state_filter.filter( await self._room_filter.filter(events) ) async def filter_room_timeline( self, events: Iterable[EventBase] - ) -> List[EventBase]: + ) -> list[EventBase]: return await self._room_timeline_filter.filter( await self._room_filter.filter(events) ) - async def filter_room_ephemeral(self, events: Iterable[JsonDict]) -> List[JsonDict]: + async def filter_room_ephemeral(self, events: Iterable[JsonDict]) -> list[JsonDict]: return await self._room_ephemeral_filter.filter( await self._room_filter.filter(events) ) async def filter_room_account_data( self, events: Iterable[JsonDict] - ) -> List[JsonDict]: + ) -> list[JsonDict]: return await self._room_account_data_filter.filter( await self._room_filter.filter(events) ) @@ -440,7 +437,7 @@ def _check(self, event: FilterEvent) -> bool: return True - def _check_fields(self, field_matchers: Dict[str, Callable[[str], bool]]) -> bool: + def _check_fields(self, field_matchers: dict[str, Callable[[str], bool]]) -> bool: """Checks whether the filter matches the given event fields. Args: @@ -474,7 +471,7 @@ def _check_fields(self, field_matchers: Dict[str, Callable[[str], bool]]) -> boo # Otherwise, accept it. return True - def filter_rooms(self, room_ids: Iterable[str]) -> Set[str]: + def filter_rooms(self, room_ids: Iterable[str]) -> set[str]: """Apply the 'rooms' filter to a given list of rooms. Args: @@ -496,7 +493,7 @@ def filter_rooms(self, room_ids: Iterable[str]) -> Set[str]: async def _check_event_relations( self, events: Collection[FilterEvent] - ) -> List[FilterEvent]: + ) -> list[FilterEvent]: # The event IDs to check, mypy doesn't understand the isinstance check. event_ids = [event.event_id for event in events if isinstance(event, EventBase)] # type: ignore[attr-defined] event_ids_to_keep = set( @@ -511,7 +508,7 @@ async def _check_event_relations( if not isinstance(event, EventBase) or event.event_id in event_ids_to_keep ] - async def filter(self, events: Iterable[FilterEvent]) -> List[FilterEvent]: + async def filter(self, events: Iterable[FilterEvent]) -> list[FilterEvent]: result = [event for event in events if self._check(event)] if self.related_by_senders or self.related_by_rel_types: diff --git a/synapse/api/ratelimiting.py b/synapse/api/ratelimiting.py index 9d1c7801bc..1a43bdff23 100644 --- a/synapse/api/ratelimiting.py +++ b/synapse/api/ratelimiting.py @@ -20,7 +20,7 @@ # # -from typing import TYPE_CHECKING, Dict, Hashable, Optional, Tuple +from typing import TYPE_CHECKING, Hashable, Optional from synapse.api.errors import LimitExceededError from synapse.config.ratelimiting import RatelimitSettings @@ -92,7 +92,7 @@ def __init__( # * The number of tokens currently in the bucket, # * The time point when the bucket was last completely empty, and # * The rate_hz (leak rate) of this particular bucket. - self.actions: Dict[Hashable, Tuple[float, float, float]] = {} + self.actions: dict[Hashable, tuple[float, float, float]] = {} self.clock.looping_call(self._prune_message_counts, 60 * 1000) @@ -109,7 +109,7 @@ def _get_key( def _get_action_counts( self, key: Hashable, time_now_s: float - ) -> Tuple[float, float, float]: + ) -> tuple[float, float, float]: """Retrieve the action counts, with a fallback representing an empty bucket.""" return self.actions.get(key, (0.0, time_now_s, 0.0)) @@ -122,7 +122,7 @@ async def can_do_action( update: bool = True, n_actions: int = 1, _time_now_s: Optional[float] = None, - ) -> Tuple[bool, float]: + ) -> tuple[bool, float]: """Can the entity (e.g. user or IP address) perform the action? Checks if the user has ratelimiting disabled in the database by looking diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index 71ef5952c3..b6e76379f1 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -18,7 +18,7 @@ # # -from typing import Callable, Dict, Optional, Tuple +from typing import Callable, Optional import attr @@ -109,7 +109,7 @@ class RoomVersion: # is not enough to mark it "supported": the push rule evaluator also needs to # support the flag. Unknown flags are ignored by the evaluator, making conditions # fail if used. - msc3931_push_features: Tuple[str, ...] # values from PushRuleRoomFlag + msc3931_push_features: tuple[str, ...] # values from PushRuleRoomFlag # MSC3757: Restricting who can overwrite a state event msc3757_enabled: bool # MSC4289: Creator power enabled @@ -476,7 +476,7 @@ class RoomVersions: ) -KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = { +KNOWN_ROOM_VERSIONS: dict[str, RoomVersion] = { v.identifier: v for v in ( RoomVersions.V1, diff --git a/synapse/app/_base.py b/synapse/app/_base.py index e30151dfb4..1954dbc1a0 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -34,11 +34,8 @@ Any, Awaitable, Callable, - Dict, - List, NoReturn, Optional, - Tuple, cast, ) from wsgiref.simple_server import WSGIServer @@ -98,8 +95,8 @@ logger = logging.getLogger(__name__) -_instance_id_to_sighup_callbacks_map: Dict[ - str, List[Tuple[Callable[..., None], Tuple[object, ...], Dict[str, object]]] +_instance_id_to_sighup_callbacks_map: dict[ + str, list[tuple[Callable[..., None], tuple[object, ...], dict[str, object]]] ] = {} """ Map from homeserver instance_id to a list of callbacks. @@ -176,7 +173,7 @@ def start_worker_reactor( def start_reactor( appname: str, soft_file_limit: int, - gc_thresholds: Optional[Tuple[int, int, int]], + gc_thresholds: Optional[tuple[int, int, int]], pid_file: Optional[str], daemonize: bool, print_pidfile: bool, @@ -309,7 +306,7 @@ async def wrapper() -> None: def listen_metrics( bind_addresses: StrCollection, port: int -) -> List[Tuple[WSGIServer, Thread]]: +) -> list[tuple[WSGIServer, Thread]]: """ Start Prometheus metrics server. @@ -330,7 +327,7 @@ def listen_metrics( from synapse.metrics import RegistryProxy - servers: List[Tuple[WSGIServer, Thread]] = [] + servers: list[tuple[WSGIServer, Thread]] = [] for host in bind_addresses: logger.info("Starting metrics listener on %s:%d", host, port) server, thread = start_http_server_prometheus( @@ -345,7 +342,7 @@ def listen_manhole( port: int, manhole_settings: ManholeConfig, manhole_globals: dict, -) -> List[Port]: +) -> list[Port]: # twisted.conch.manhole 21.1.0 uses "int_from_bytes", which produces a confusing # warning. It's fixed by https://github.com/twisted/twisted/pull/1522), so # suppress the warning for now. @@ -370,7 +367,7 @@ def listen_tcp( factory: ServerFactory, reactor: IReactorTCP = reactor, backlog: int = 50, -) -> List[Port]: +) -> list[Port]: """ Create a TCP socket for a port and several addresses @@ -395,7 +392,7 @@ def listen_unix( factory: ServerFactory, reactor: IReactorUNIX = reactor, backlog: int = 50, -) -> List[Port]: +) -> list[Port]: """ Create a UNIX socket for a given path and 'mode' permission @@ -419,7 +416,7 @@ def listen_http( max_request_body_size: int, context_factory: Optional[IOpenSSLContextFactory], reactor: ISynapseReactor = reactor, -) -> List[Port]: +) -> list[Port]: """ Args: listener_config: TODO @@ -489,7 +486,7 @@ def listen_ssl( context_factory: IOpenSSLContextFactory, reactor: IReactorSSL = reactor, backlog: int = 50, -) -> List[Port]: +) -> list[Port]: """ Create an TLS-over-TCP socket for a port and several addresses diff --git a/synapse/app/admin_cmd.py b/synapse/app/admin_cmd.py index bafeb46971..b5b1edac0a 100644 --- a/synapse/app/admin_cmd.py +++ b/synapse/app/admin_cmd.py @@ -24,7 +24,7 @@ import os import sys import tempfile -from typing import List, Mapping, Optional, Sequence, Tuple +from typing import Mapping, Optional, Sequence from twisted.internet import defer, task @@ -150,7 +150,7 @@ def __init__(self, user_id: str, directory: Optional[str] = None): if list(os.listdir(self.base_directory)): raise Exception("Directory must be empty") - def write_events(self, room_id: str, events: List[EventBase]) -> None: + def write_events(self, room_id: str, events: list[EventBase]) -> None: room_directory = os.path.join(self.base_directory, "rooms", room_id) os.makedirs(room_directory, exist_ok=True) events_file = os.path.join(room_directory, "events") @@ -255,7 +255,7 @@ def finished(self) -> str: return self.base_directory -def load_config(argv_options: List[str]) -> Tuple[HomeServerConfig, argparse.Namespace]: +def load_config(argv_options: list[str]) -> tuple[HomeServerConfig, argparse.Namespace]: parser = argparse.ArgumentParser(description="Synapse Admin Command") HomeServerConfig.add_arguments_to_parser(parser) diff --git a/synapse/app/complement_fork_starter.py b/synapse/app/complement_fork_starter.py index b981a7631b..73e33d77a5 100644 --- a/synapse/app/complement_fork_starter.py +++ b/synapse/app/complement_fork_starter.py @@ -26,13 +26,13 @@ import signal import sys from types import FrameType -from typing import Any, Callable, Dict, List, Optional +from typing import Any, Callable, Optional from twisted.internet.main import installReactor # a list of the original signal handlers, before we installed our custom ones. # We restore these in our child processes. -_original_signal_handlers: Dict[int, Any] = {} +_original_signal_handlers: dict[int, Any] = {} class ProxiedReactor: @@ -72,7 +72,7 @@ def __getattr__(self, attr_name: str) -> Any: def _worker_entrypoint( - func: Callable[[], None], proxy_reactor: ProxiedReactor, args: List[str] + func: Callable[[], None], proxy_reactor: ProxiedReactor, args: list[str] ) -> None: """ Entrypoint for a forked worker process. @@ -128,7 +128,7 @@ def main() -> None: # Split up the subsequent arguments into each workers' arguments; # `--` is our delimiter of choice. - args_by_worker: List[List[str]] = [ + args_by_worker: list[list[str]] = [ list(args) for cond, args in itertools.groupby(ns.args, lambda ele: ele != "--") if cond and args @@ -167,7 +167,7 @@ def main() -> None: update_proc.join() print("===== PREPARED DATABASE =====", file=sys.stderr) - processes: List[multiprocessing.Process] = [] + processes: list[multiprocessing.Process] = [] # Install signal handlers to propagate signals to all our children, so that they # shut down cleanly. This also inhibits our own exit, but that's good: we want to diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 7518661265..8f512c1577 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -21,7 +21,6 @@ # import logging import sys -from typing import Dict, List from twisted.web.resource import Resource @@ -181,7 +180,7 @@ def _listen_http(self, listener_config: ListenerConfig) -> None: # We always include an admin resource that we populate with servlets as needed admin_resource = JsonResource(self, canonical_json=False) - resources: Dict[str, Resource] = { + resources: dict[str, Resource] = { # We always include a health resource. "/health": HealthResource(), "/_synapse/admin": admin_resource, @@ -314,7 +313,7 @@ def start_listening(self) -> None: self.get_replication_command_handler().start_replication(self) -def load_config(argv_options: List[str]) -> HomeServerConfig: +def load_config(argv_options: list[str]) -> HomeServerConfig: """ Parse the commandline and config files (does not generate config) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index e415d651bc..023a0d877f 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -22,7 +22,7 @@ import logging import os import sys -from typing import Dict, Iterable, List, Optional +from typing import Iterable, Optional from twisted.internet.tcp import Port from twisted.web.resource import EncodingResourceWrapper, Resource @@ -99,7 +99,7 @@ def _listener_http( site_tag = listener_config.get_site_tag() # We always include a health resource. - resources: Dict[str, Resource] = {"/health": HealthResource()} + resources: dict[str, Resource] = {"/health": HealthResource()} for res in listener_config.http_options.resources: for name in res.names: @@ -170,7 +170,7 @@ def _listener_http( def _configure_named_resource( self, name: str, compress: bool = False - ) -> Dict[str, Resource]: + ) -> dict[str, Resource]: """Build a resource map for a named resource Args: @@ -180,7 +180,7 @@ def _configure_named_resource( Returns: map from path to HTTP resource """ - resources: Dict[str, Resource] = {} + resources: dict[str, Resource] = {} if name == "client": client_resource: Resource = ClientRestResource(self) if compress: @@ -318,7 +318,7 @@ def start_listening(self) -> None: logger.warning("Unrecognized listener type: %s", listener.type) -def load_or_generate_config(argv_options: List[str]) -> HomeServerConfig: +def load_or_generate_config(argv_options: list[str]) -> HomeServerConfig: """ Parse the commandline and config files diff --git a/synapse/app/phone_stats_home.py b/synapse/app/phone_stats_home.py index 4bbc33cba2..13a0e3db7c 100644 --- a/synapse/app/phone_stats_home.py +++ b/synapse/app/phone_stats_home.py @@ -22,7 +22,7 @@ import math import resource import sys -from typing import TYPE_CHECKING, List, Mapping, Sized, Tuple +from typing import TYPE_CHECKING, Mapping, Sized from prometheus_client import Gauge @@ -54,7 +54,7 @@ # Contains the list of processes we will be monitoring # currently either 0 or 1 -_stats_process: List[Tuple[int, "resource.struct_rusage"]] = [] +_stats_process: list[tuple[int, "resource.struct_rusage"]] = [] # Gauges to expose monthly active user control metrics current_mau_gauge = Gauge( @@ -82,12 +82,12 @@ def phone_stats_home( hs: "HomeServer", stats: JsonDict, - stats_process: List[Tuple[int, "resource.struct_rusage"]] = _stats_process, + stats_process: list[tuple[int, "resource.struct_rusage"]] = _stats_process, ) -> "defer.Deferred[None]": async def _phone_stats_home( hs: "HomeServer", stats: JsonDict, - stats_process: List[Tuple[int, "resource.struct_rusage"]] = _stats_process, + stats_process: list[tuple[int, "resource.struct_rusage"]] = _stats_process, ) -> None: """Collect usage statistics and send them to the configured endpoint. diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index 1d0735ca1d..e91fa3a624 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -25,9 +25,7 @@ from enum import Enum from typing import ( TYPE_CHECKING, - Dict, Iterable, - List, Optional, Pattern, Sequence, @@ -59,11 +57,11 @@ # Type for the `device_one_time_keys_count` field in an appservice transaction # user ID -> {device ID -> {algorithm -> count}} -TransactionOneTimeKeysCount = Dict[str, Dict[str, Dict[str, int]]] +TransactionOneTimeKeysCount = dict[str, dict[str, dict[str, int]]] # Type for the `device_unused_fallback_key_types` field in an appservice transaction # user ID -> {device ID -> [algorithm]} -TransactionUnusedFallbackKeys = Dict[str, Dict[str, List[str]]] +TransactionUnusedFallbackKeys = dict[str, dict[str, list[str]]] class ApplicationServiceState(Enum): @@ -145,7 +143,7 @@ def __init__( def _check_namespaces( self, namespaces: Optional[JsonDict] - ) -> Dict[str, List[Namespace]]: + ) -> dict[str, list[Namespace]]: # Sanity check that it is of the form: # { # users: [ {regex: "[A-z]+.*", exclusive: true}, ...], @@ -155,7 +153,7 @@ def _check_namespaces( if namespaces is None: namespaces = {} - result: Dict[str, List[Namespace]] = {} + result: dict[str, list[Namespace]] = {} for ns in ApplicationService.NS_LIST: result[ns] = [] @@ -388,7 +386,7 @@ def is_exclusive_alias(self, alias: str) -> bool: def is_exclusive_room(self, room_id: str) -> bool: return self._is_exclusive(ApplicationService.NS_ROOMS, room_id) - def get_exclusive_user_regexes(self) -> List[Pattern[str]]: + def get_exclusive_user_regexes(self) -> list[Pattern[str]]: """Get the list of regexes used to determine if a user is exclusively registered by the AS """ @@ -417,8 +415,8 @@ def __init__( service: ApplicationService, id: int, events: Sequence[EventBase], - ephemeral: List[JsonMapping], - to_device_messages: List[JsonMapping], + ephemeral: list[JsonMapping], + to_device_messages: list[JsonMapping], one_time_keys_count: TransactionOneTimeKeysCount, unused_fallback_keys: TransactionUnusedFallbackKeys, device_list_summary: DeviceListUpdates, diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 55069cc5d3..f08a921998 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -23,13 +23,10 @@ import urllib.parse from typing import ( TYPE_CHECKING, - Dict, Iterable, - List, Mapping, Optional, Sequence, - Tuple, TypeVar, Union, ) @@ -133,14 +130,14 @@ def __init__(self, hs: "HomeServer"): self.clock = hs.get_clock() self.config = hs.config.appservice - self.protocol_meta_cache: ResponseCache[Tuple[str, str]] = ResponseCache( + self.protocol_meta_cache: ResponseCache[tuple[str, str]] = ResponseCache( clock=hs.get_clock(), name="as_protocol_meta", server_name=self.server_name, timeout_ms=HOUR_IN_MS, ) - def _get_headers(self, service: "ApplicationService") -> Dict[bytes, List[bytes]]: + def _get_headers(self, service: "ApplicationService") -> dict[bytes, list[bytes]]: """This makes sure we have always the auth header and opentracing headers set.""" # This is also ensured before in the functions. However this is needed to please @@ -210,8 +207,8 @@ async def query_3pe( service: "ApplicationService", kind: str, protocol: str, - fields: Dict[bytes, List[bytes]], - ) -> List[JsonDict]: + fields: dict[bytes, list[bytes]], + ) -> list[JsonDict]: if kind == ThirdPartyEntityKind.USER: required_field = "userid" elif kind == ThirdPartyEntityKind.LOCATION: @@ -225,7 +222,7 @@ async def query_3pe( assert service.hs_token is not None try: - args: Mapping[bytes, Union[List[bytes], str]] = fields + args: Mapping[bytes, Union[list[bytes], str]] = fields if self.config.use_appservice_legacy_authorization: args = { **fields, @@ -320,8 +317,8 @@ async def push_bulk( self, service: "ApplicationService", events: Sequence[EventBase], - ephemeral: List[JsonMapping], - to_device_messages: List[JsonMapping], + ephemeral: list[JsonMapping], + to_device_messages: list[JsonMapping], one_time_keys_count: TransactionOneTimeKeysCount, unused_fallback_keys: TransactionUnusedFallbackKeys, device_list_summary: DeviceListUpdates, @@ -429,9 +426,9 @@ async def push_bulk( return False async def claim_client_keys( - self, service: "ApplicationService", query: List[Tuple[str, str, str, int]] - ) -> Tuple[ - Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str, int]] + self, service: "ApplicationService", query: list[tuple[str, str, str, int]] + ) -> tuple[ + dict[str, dict[str, dict[str, JsonDict]]], list[tuple[str, str, str, int]] ]: """Claim one time keys from an application service. @@ -457,7 +454,7 @@ async def claim_client_keys( assert service.hs_token is not None # Create the expected payload shape. - body: Dict[str, Dict[str, List[str]]] = {} + body: dict[str, dict[str, list[str]]] = {} for user_id, device, algorithm, count in query: body.setdefault(user_id, {}).setdefault(device, []).extend( [algorithm] * count @@ -502,8 +499,8 @@ async def claim_client_keys( return response, missing async def query_keys( - self, service: "ApplicationService", query: Dict[str, List[str]] - ) -> Dict[str, Dict[str, Dict[str, JsonDict]]]: + self, service: "ApplicationService", query: dict[str, list[str]] + ) -> dict[str, dict[str, dict[str, JsonDict]]]: """Query the application service for keys. Note that any error (including a timeout) is treated as the application @@ -545,7 +542,7 @@ async def query_keys( def _serialize( self, service: "ApplicationService", events: Iterable[EventBase] - ) -> List[JsonDict]: + ) -> list[JsonDict]: time_now = self.clock.time_msec() return [ serialize_event( diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py index b4de759b67..b5fab5f50d 100644 --- a/synapse/appservice/scheduler.py +++ b/synapse/appservice/scheduler.py @@ -61,13 +61,9 @@ Awaitable, Callable, Collection, - Dict, Iterable, - List, Optional, Sequence, - Set, - Tuple, ) from twisted.internet.interfaces import IDelayedCall @@ -183,16 +179,16 @@ class _ServiceQueuer: def __init__(self, txn_ctrl: "_TransactionController", hs: "HomeServer"): # dict of {service_id: [events]} - self.queued_events: Dict[str, List[EventBase]] = {} + self.queued_events: dict[str, list[EventBase]] = {} # dict of {service_id: [events]} - self.queued_ephemeral: Dict[str, List[JsonMapping]] = {} + self.queued_ephemeral: dict[str, list[JsonMapping]] = {} # dict of {service_id: [to_device_message_json]} - self.queued_to_device_messages: Dict[str, List[JsonMapping]] = {} + self.queued_to_device_messages: dict[str, list[JsonMapping]] = {} # dict of {service_id: [device_list_summary]} - self.queued_device_list_summaries: Dict[str, List[DeviceListUpdates]] = {} + self.queued_device_list_summaries: dict[str, list[DeviceListUpdates]] = {} # the appservices which currently have a transaction in flight - self.requests_in_flight: Set[str] = set() + self.requests_in_flight: set[str] = set() self.txn_ctrl = txn_ctrl self._msc3202_transaction_extensions_enabled: bool = ( hs.config.experimental.msc3202_transaction_extensions @@ -302,7 +298,7 @@ async def _compute_msc3202_otk_counts_and_fallback_keys( events: Iterable[EventBase], ephemerals: Iterable[JsonMapping], to_device_messages: Iterable[JsonMapping], - ) -> Tuple[TransactionOneTimeKeysCount, TransactionUnusedFallbackKeys]: + ) -> tuple[TransactionOneTimeKeysCount, TransactionUnusedFallbackKeys]: """ Given a list of the events, ephemeral messages and to-device messages, - first computes a list of application services users that may have @@ -313,14 +309,14 @@ async def _compute_msc3202_otk_counts_and_fallback_keys( """ # Set of 'interesting' users who may have updates - users: Set[str] = set() + users: set[str] = set() # The sender is always included users.add(service.sender.to_string()) # All AS users that would receive the PDUs or EDUs sent to these rooms # are classed as 'interesting'. - rooms_of_interesting_users: Set[str] = set() + rooms_of_interesting_users: set[str] = set() # PDUs rooms_of_interesting_users.update(event.room_id for event in events) # EDUs @@ -364,7 +360,7 @@ def __init__(self, hs: "HomeServer"): self.as_api = hs.get_application_service_api() # map from service id to recoverer instance - self.recoverers: Dict[str, "_Recoverer"] = {} + self.recoverers: dict[str, "_Recoverer"] = {} # for UTs self.RECOVERER_CLASS = _Recoverer @@ -373,8 +369,8 @@ async def send( self, service: ApplicationService, events: Sequence[EventBase], - ephemeral: Optional[List[JsonMapping]] = None, - to_device_messages: Optional[List[JsonMapping]] = None, + ephemeral: Optional[list[JsonMapping]] = None, + to_device_messages: Optional[list[JsonMapping]] = None, one_time_keys_count: Optional[TransactionOneTimeKeysCount] = None, unused_fallback_keys: Optional[TransactionUnusedFallbackKeys] = None, device_list_summary: Optional[DeviceListUpdates] = None, diff --git a/synapse/config/__main__.py b/synapse/config/__main__.py index ef9d36b507..9169b062bf 100644 --- a/synapse/config/__main__.py +++ b/synapse/config/__main__.py @@ -20,13 +20,12 @@ # # import sys -from typing import List from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig -def main(args: List[str]) -> None: +def main(args: list[str]) -> None: action = args[1] if len(args) > 1 and args[1] == "read" else None # If we're reading a key in the config file, then `args[1]` will be `read` and `args[2]` # will be the key to read. diff --git a/synapse/config/_base.py b/synapse/config/_base.py index 5d0560e0f2..ce06905390 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -33,14 +33,10 @@ from typing import ( Any, ClassVar, - Dict, Iterable, Iterator, - List, MutableMapping, Optional, - Tuple, - Type, TypeVar, Union, ) @@ -321,9 +317,9 @@ def read_template(self, filename: str) -> jinja2.Template: def read_templates( self, - filenames: List[str], + filenames: list[str], custom_template_directories: Optional[Iterable[str]] = None, - ) -> List[jinja2.Template]: + ) -> list[jinja2.Template]: """Load a list of template files from disk using the given variables. This function will attempt to load the given templates from the default Synapse @@ -402,7 +398,7 @@ class RootConfig: class, lower-cased and with "Config" removed. """ - config_classes: List[Type[Config]] = [] + config_classes: list[type[Config]] = [] def __init__(self, config_files: StrSequence = ()): # Capture absolute paths here, so we can reload config after we daemonize. @@ -471,7 +467,7 @@ def generate_config( generate_secrets: bool = False, report_stats: Optional[bool] = None, open_private_ports: bool = False, - listeners: Optional[List[dict]] = None, + listeners: Optional[list[dict]] = None, tls_certificate_path: Optional[str] = None, tls_private_key_path: Optional[str] = None, ) -> str: @@ -545,7 +541,7 @@ def generate_config( @classmethod def load_config( - cls: Type[TRootConfig], description: str, argv_options: List[str] + cls: type[TRootConfig], description: str, argv_options: list[str] ) -> TRootConfig: """Parse the commandline and config files @@ -605,8 +601,8 @@ def add_arguments_to_parser(cls, config_parser: argparse.ArgumentParser) -> None @classmethod def load_config_with_parser( - cls: Type[TRootConfig], parser: argparse.ArgumentParser, argv_options: List[str] - ) -> Tuple[TRootConfig, argparse.Namespace]: + cls: type[TRootConfig], parser: argparse.ArgumentParser, argv_options: list[str] + ) -> tuple[TRootConfig, argparse.Namespace]: """Parse the commandline and config files with the given parser Doesn't support config-file-generation: used by the worker apps. @@ -658,7 +654,7 @@ def load_config_with_parser( @classmethod def load_or_generate_config( - cls: Type[TRootConfig], description: str, argv_options: List[str] + cls: type[TRootConfig], description: str, argv_options: list[str] ) -> Optional[TRootConfig]: """Parse the commandline and config files @@ -858,7 +854,7 @@ def load_or_generate_config( def parse_config_dict( self, - config_dict: Dict[str, Any], + config_dict: dict[str, Any], config_dir_path: str, data_dir_path: str, allow_secrets_in_config: bool = True, @@ -883,7 +879,7 @@ def parse_config_dict( ) def generate_missing_files( - self, config_dict: Dict[str, Any], config_dir_path: str + self, config_dict: dict[str, Any], config_dir_path: str ) -> None: self.invoke_all("generate_files", config_dict, config_dir_path) @@ -930,7 +926,7 @@ def validate_config(self) -> None: """ -def read_config_files(config_files: Iterable[str]) -> Dict[str, Any]: +def read_config_files(config_files: Iterable[str]) -> dict[str, Any]: """Read the config files and shallowly merge them into a dict. Successive configurations are shallowly merged into ones provided earlier, @@ -964,7 +960,7 @@ def read_config_files(config_files: Iterable[str]) -> Dict[str, Any]: return specified_config -def find_config_files(search_paths: List[str]) -> List[str]: +def find_config_files(search_paths: list[str]) -> list[str]: """Finds config files using a list of search paths. If a path is a file then that file path is added to the list. If a search path is a directory then all the "*.yaml" files in that directory are added to the list in @@ -1018,7 +1014,7 @@ class ShardedWorkerHandlingConfig: below). """ - instances: List[str] + instances: list[str] def should_handle(self, instance_name: str, key: str) -> bool: """Whether this instance is responsible for handling the given key.""" diff --git a/synapse/config/_base.pyi b/synapse/config/_base.pyi index 02543da388..1a9cb7db47 100644 --- a/synapse/config/_base.pyi +++ b/synapse/config/_base.pyi @@ -2,15 +2,11 @@ import argparse from typing import ( Any, Collection, - Dict, Iterable, Iterator, - List, Literal, MutableMapping, Optional, - Tuple, - Type, TypeVar, Union, overload, @@ -129,8 +125,8 @@ class RootConfig: mas: mas.MasConfig matrix_rtc: matrixrtc.MatrixRtcConfig - config_classes: List[Type["Config"]] = ... - config_files: List[str] + config_classes: list[type["Config"]] = ... + config_files: list[str] def __init__(self, config_files: Collection[str] = ...) -> None: ... def invoke_all( self, func_name: str, *args: Any, **kwargs: Any @@ -139,7 +135,7 @@ class RootConfig: def invoke_all_static(cls, func_name: str, *args: Any, **kwargs: Any) -> None: ... def parse_config_dict( self, - config_dict: Dict[str, Any], + config_dict: dict[str, Any], config_dir_path: str, data_dir_path: str, allow_secrets_in_config: bool = ..., @@ -158,11 +154,11 @@ class RootConfig: ) -> str: ... @classmethod def load_or_generate_config( - cls: Type[TRootConfig], description: str, argv_options: List[str] + cls: type[TRootConfig], description: str, argv_options: list[str] ) -> Optional[TRootConfig]: ... @classmethod def load_config( - cls: Type[TRootConfig], description: str, argv_options: List[str] + cls: type[TRootConfig], description: str, argv_options: list[str] ) -> TRootConfig: ... @classmethod def add_arguments_to_parser( @@ -170,8 +166,8 @@ class RootConfig: ) -> None: ... @classmethod def load_config_with_parser( - cls: Type[TRootConfig], parser: argparse.ArgumentParser, argv_options: List[str] - ) -> Tuple[TRootConfig, argparse.Namespace]: ... + cls: type[TRootConfig], parser: argparse.ArgumentParser, argv_options: list[str] + ) -> tuple[TRootConfig, argparse.Namespace]: ... def generate_missing_files( self, config_dict: dict, config_dir_path: str ) -> None: ... @@ -203,16 +199,16 @@ class Config: def read_template(self, filenames: str) -> jinja2.Template: ... def read_templates( self, - filenames: List[str], + filenames: list[str], custom_template_directories: Optional[Iterable[str]] = None, - ) -> List[jinja2.Template]: ... + ) -> list[jinja2.Template]: ... -def read_config_files(config_files: Iterable[str]) -> Dict[str, Any]: ... -def find_config_files(search_paths: List[str]) -> List[str]: ... +def read_config_files(config_files: Iterable[str]) -> dict[str, Any]: ... +def find_config_files(search_paths: list[str]) -> list[str]: ... class ShardedWorkerHandlingConfig: - instances: List[str] - def __init__(self, instances: List[str]) -> None: ... + instances: list[str] + def __init__(self, instances: list[str]) -> None: ... def should_handle(self, instance_name: str, key: str) -> bool: ... # noqa: F811 class RoutableShardedWorkerHandlingConfig(ShardedWorkerHandlingConfig): diff --git a/synapse/config/_util.py b/synapse/config/_util.py index 731b60a840..3e239c525e 100644 --- a/synapse/config/_util.py +++ b/synapse/config/_util.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict, Type, TypeVar +from typing import Any, TypeVar import jsonschema @@ -79,8 +79,8 @@ def json_error_to_config_error( def parse_and_validate_mapping( config: Any, - model_type: Type[Model], -) -> Dict[str, Model]: + model_type: type[Model], +) -> dict[str, Model]: """Parse `config` as a mapping from strings to a given `Model` type. Args: config: The configuration data to check @@ -93,7 +93,7 @@ def parse_and_validate_mapping( try: # type-ignore: mypy doesn't like constructing `Dict[str, model_type]` because # `model_type` is a runtime variable. Pydantic is fine with this. - instances = parse_obj_as(Dict[str, model_type], config) # type: ignore[valid-type] + instances = parse_obj_as(dict[str, model_type], config) # type: ignore[valid-type] except ValidationError as e: raise ConfigError(str(e)) from e return instances diff --git a/synapse/config/api.py b/synapse/config/api.py index 0bb99d4228..e32e03e55e 100644 --- a/synapse/config/api.py +++ b/synapse/config/api.py @@ -20,7 +20,7 @@ # import logging -from typing import Any, Iterable, Optional, Tuple +from typing import Any, Iterable, Optional from synapse.api.constants import EventTypes from synapse.config._base import Config, ConfigError @@ -46,7 +46,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: def _get_prejoin_state_entries( self, config: JsonDict - ) -> Iterable[Tuple[str, Optional[str]]]: + ) -> Iterable[tuple[str, Optional[str]]]: """Get the event types and state keys to include in the prejoin state.""" room_prejoin_state_config = config.get("room_prejoin_state") or {} diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py index 81dbd330cc..b9ed1a702c 100644 --- a/synapse/config/appservice.py +++ b/synapse/config/appservice.py @@ -21,7 +21,7 @@ # import logging -from typing import Any, Dict, List +from typing import Any from urllib import parse as urlparse import yaml @@ -61,13 +61,13 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: def load_appservices( - hostname: str, config_files: List[str] -) -> List[ApplicationService]: + hostname: str, config_files: list[str] +) -> list[ApplicationService]: """Returns a list of Application Services from the config files.""" # Dicts of value -> filename - seen_as_tokens: Dict[str, str] = {} - seen_ids: Dict[str, str] = {} + seen_as_tokens: dict[str, str] = {} + seen_ids: dict[str, str] = {} appservices = [] diff --git a/synapse/config/cache.py b/synapse/config/cache.py index 35a052b254..e51efc3dbd 100644 --- a/synapse/config/cache.py +++ b/synapse/config/cache.py @@ -23,7 +23,7 @@ import os import re import threading -from typing import Any, Callable, Dict, Mapping, Optional +from typing import Any, Callable, Mapping, Optional import attr @@ -38,7 +38,7 @@ _CACHE_PREFIX = "SYNAPSE_CACHE_FACTOR" # Map from canonicalised cache name to cache. -_CACHES: Dict[str, Callable[[float], None]] = {} +_CACHES: dict[str, Callable[[float], None]] = {} # a lock on the contents of _CACHES _CACHES_LOCK = threading.Lock() @@ -104,7 +104,7 @@ class CacheConfig(Config): _environ: Mapping[str, str] = os.environ event_cache_size: int - cache_factors: Dict[str, float] + cache_factors: dict[str, float] global_factor: float track_memory_usage: bool expiry_time_msec: Optional[int] diff --git a/synapse/config/cas.py b/synapse/config/cas.py index 60d66d7019..e6e869bb16 100644 --- a/synapse/config/cas.py +++ b/synapse/config/cas.py @@ -20,7 +20,7 @@ # # -from typing import Any, List, Optional +from typing import Any, Optional from synapse.config.sso import SsoAttributeRequirement from synapse.types import JsonDict @@ -107,7 +107,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: def _parsed_required_attributes_def( required_attributes: Any, -) -> List[SsoAttributeRequirement]: +) -> list[SsoAttributeRequirement]: validate_config( REQUIRED_ATTRIBUTES_SCHEMA, required_attributes, diff --git a/synapse/config/database.py b/synapse/config/database.py index c4ca63a1fa..8e9d253820 100644 --- a/synapse/config/database.py +++ b/synapse/config/database.py @@ -22,7 +22,7 @@ import argparse import logging import os -from typing import Any, List +from typing import Any from synapse.config._base import Config, ConfigError from synapse.types import JsonDict @@ -83,7 +83,7 @@ class DatabaseConfig(Config): def __init__(self, *args: Any): super().__init__(*args) - self.databases: List[DatabaseConnectionConfig] = [] + self.databases: list[DatabaseConnectionConfig] = [] def read_config(self, config: JsonDict, **kwargs: Any) -> None: # We *experimentally* support specifying multiple databases via the diff --git a/synapse/config/key.py b/synapse/config/key.py index f78ff5114f..3e832b4946 100644 --- a/synapse/config/key.py +++ b/synapse/config/key.py @@ -23,7 +23,7 @@ import hashlib import logging import os -from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional +from typing import TYPE_CHECKING, Any, Iterator, Optional import attr import jsonschema @@ -110,7 +110,7 @@ class TrustedKeyServer: server_name: str # map from key id to key object, or None to disable signature verification. - verify_keys: Optional[Dict[str, VerifyKey]] = None + verify_keys: Optional[dict[str, VerifyKey]] = None class KeyConfig(Config): @@ -250,7 +250,7 @@ def generate_config_section( - server_name: "matrix.org" """ % locals() - def read_signing_keys(self, signing_key_path: str, name: str) -> List[SigningKey]: + def read_signing_keys(self, signing_key_path: str, name: str) -> list[SigningKey]: """Read the signing keys in the given path. Args: @@ -280,7 +280,7 @@ def read_signing_keys(self, signing_key_path: str, name: str) -> List[SigningKey def read_old_signing_keys( self, old_signing_keys: Optional[JsonDict] - ) -> Dict[str, "VerifyKeyWithExpiry"]: + ) -> dict[str, "VerifyKeyWithExpiry"]: if old_signing_keys is None: return {} keys = {} @@ -299,7 +299,7 @@ def read_old_signing_keys( ) return keys - def generate_files(self, config: Dict[str, Any], config_dir_path: str) -> None: + def generate_files(self, config: dict[str, Any], config_dir_path: str) -> None: if "signing_key" in config: return @@ -393,7 +393,7 @@ def _perspectives_to_key_servers(config: JsonDict) -> Iterator[JsonDict]: def _parse_key_servers( - key_servers: List[Any], federation_verify_certificates: bool + key_servers: list[Any], federation_verify_certificates: bool ) -> Iterator[TrustedKeyServer]: try: jsonschema.validate(key_servers, TRUSTED_KEY_SERVERS_SCHEMA) @@ -408,7 +408,7 @@ def _parse_key_servers( server_name = server["server_name"] result = TrustedKeyServer(server_name=server_name) - verify_keys: Optional[Dict[str, str]] = server.get("verify_keys") + verify_keys: Optional[dict[str, str]] = server.get("verify_keys") if verify_keys is not None: result.verify_keys = {} for key_id, key_base64 in verify_keys.items(): diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 9dde4c4003..8e355035a9 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -26,7 +26,7 @@ import sys import threading from string import Template -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any, Optional import yaml from zope.interface import implementer @@ -186,7 +186,7 @@ def add_arguments(parser: argparse.ArgumentParser) -> None: help=argparse.SUPPRESS, ) - def generate_files(self, config: Dict[str, Any], config_dir_path: str) -> None: + def generate_files(self, config: dict[str, Any], config_dir_path: str) -> None: log_config = config.get("log_config") if log_config and not os.path.exists(log_config): log_file = self.abspath("homeserver.log") diff --git a/synapse/config/modules.py b/synapse/config/modules.py index 37dc26e130..17319c9e37 100644 --- a/synapse/config/modules.py +++ b/synapse/config/modules.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict, List, Tuple +from typing import Any from synapse.config._base import Config, ConfigError from synapse.types import JsonDict @@ -29,7 +29,7 @@ class ModulesConfig(Config): section = "modules" def read_config(self, config: JsonDict, **kwargs: Any) -> None: - self.loaded_modules: List[Tuple[Any, Dict]] = [] + self.loaded_modules: list[tuple[Any, dict]] = [] configured_modules = config.get("modules") or [] for i, module in enumerate(configured_modules): diff --git a/synapse/config/oembed.py b/synapse/config/oembed.py index 1b6c521087..a4a192302c 100644 --- a/synapse/config/oembed.py +++ b/synapse/config/oembed.py @@ -21,7 +21,7 @@ import importlib.resources as importlib_resources import json import re -from typing import Any, Dict, Iterable, List, Optional, Pattern +from typing import Any, Iterable, Optional, Pattern from urllib import parse as urlparse import attr @@ -37,9 +37,9 @@ class OEmbedEndpointConfig: # The API endpoint to fetch. api_endpoint: str # The patterns to match. - url_patterns: List[Pattern[str]] + url_patterns: list[Pattern[str]] # The supported formats. - formats: Optional[List[str]] + formats: Optional[list[str]] class OembedConfig(Config): @@ -48,10 +48,10 @@ class OembedConfig(Config): section = "oembed" def read_config(self, config: JsonDict, **kwargs: Any) -> None: - oembed_config: Dict[str, Any] = config.get("oembed") or {} + oembed_config: dict[str, Any] = config.get("oembed") or {} # A list of patterns which will be used. - self.oembed_patterns: List[OEmbedEndpointConfig] = list( + self.oembed_patterns: list[OEmbedEndpointConfig] = list( self._parse_and_validate_providers(oembed_config) ) @@ -92,7 +92,7 @@ def _parse_and_validate_providers( ) def _parse_and_validate_provider( - self, providers: List[JsonDict], config_path: StrSequence + self, providers: list[JsonDict], config_path: StrSequence ) -> Iterable[OEmbedEndpointConfig]: # Ensure it is the proper form. validate_config( diff --git a/synapse/config/oidc.py b/synapse/config/oidc.py index 3ddf65a3e9..ada89bb8bc 100644 --- a/synapse/config/oidc.py +++ b/synapse/config/oidc.py @@ -21,7 +21,7 @@ # from collections import Counter -from typing import Any, Collection, Iterable, List, Mapping, Optional, Tuple, Type +from typing import Any, Collection, Iterable, Mapping, Optional import attr @@ -213,7 +213,7 @@ def _parse_oidc_provider_configs(config: JsonDict) -> Iterable["OidcProviderConf def _parse_oidc_config_dict( - oidc_config: JsonDict, config_path: Tuple[str, ...] + oidc_config: JsonDict, config_path: tuple[str, ...] ) -> "OidcProviderConfig": """Take the configuration dict and parse it into an OidcProviderConfig @@ -416,7 +416,7 @@ class OidcProviderConfig: # Valid values are 'auto', 'always', and 'never'. pkce_method: str - id_token_signing_alg_values_supported: Optional[List[str]] + id_token_signing_alg_values_supported: Optional[list[str]] """ List of the JWS signing algorithms (`alg` values) that are supported for signing the `id_token`. @@ -491,13 +491,13 @@ class OidcProviderConfig: allow_existing_users: bool # the class of the user mapping provider - user_mapping_provider_class: Type + user_mapping_provider_class: type # the config of the user mapping provider user_mapping_provider_config: Any # required attributes to require in userinfo to allow login/registration - attribute_requirements: List[SsoAttributeRequirement] + attribute_requirements: list[SsoAttributeRequirement] # Whether automatic registrations are enabled in the ODIC flow. Defaults to True enable_registration: bool diff --git a/synapse/config/password_auth_providers.py b/synapse/config/password_auth_providers.py index b2b624aea2..c2894f58dc 100644 --- a/synapse/config/password_auth_providers.py +++ b/synapse/config/password_auth_providers.py @@ -19,7 +19,7 @@ # # -from typing import Any, List, Tuple, Type +from typing import Any from synapse.types import JsonDict from synapse.util.module_loader import load_module @@ -56,7 +56,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: for backwards compatibility. """ - self.password_providers: List[Tuple[Type, Any]] = [] + self.password_providers: list[tuple[type, Any]] = [] providers = [] # We want to be backwards compatible with the old `ldap_config` diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py index b082daa8f7..be2f49f87c 100644 --- a/synapse/config/ratelimiting.py +++ b/synapse/config/ratelimiting.py @@ -19,7 +19,7 @@ # # -from typing import Any, Dict, Optional, cast +from typing import Any, Optional, cast import attr @@ -37,9 +37,9 @@ class RatelimitSettings: @classmethod def parse( cls, - config: Dict[str, Any], + config: dict[str, Any], key: str, - defaults: Optional[Dict[str, float]] = None, + defaults: Optional[dict[str, float]] = None, ) -> "RatelimitSettings": """Parse config[key] as a new-style rate limiter config. @@ -62,7 +62,7 @@ def parse( # By this point we should have hit the rate limiter parameters. # We don't actually check this though! - rl_config = cast(Dict[str, float], rl_config) + rl_config = cast(dict[str, float], rl_config) return cls( key=key, diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 283199aa11..c0e7316bc3 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -20,7 +20,7 @@ # # import argparse -from typing import Any, Dict, Optional +from typing import Any, Optional from synapse.api.constants import RoomCreationPreset from synapse.config._base import Config, ConfigError, read_file @@ -266,7 +266,7 @@ def generate_config_section( else: return "" - def generate_files(self, config: Dict[str, Any], config_dir_path: str) -> None: + def generate_files(self, config: dict[str, Any], config_dir_path: str) -> None: # if 'registration_shared_secret_path' is specified, and the target file # does not exist, generate it. registration_shared_secret_path = config.get("registration_shared_secret_path") diff --git a/synapse/config/repository.py b/synapse/config/repository.py index e7d23740f9..221130b0cd 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -21,7 +21,7 @@ import logging import os -from typing import Any, Dict, List, Tuple +from typing import Any import attr @@ -80,8 +80,8 @@ class MediaStorageProviderConfig: def parse_thumbnail_requirements( - thumbnail_sizes: List[JsonDict], -) -> Dict[str, Tuple[ThumbnailRequirement, ...]]: + thumbnail_sizes: list[JsonDict], +) -> dict[str, tuple[ThumbnailRequirement, ...]]: """Takes a list of dictionaries with "width", "height", and "method" keys and creates a map from image media types to the thumbnail size, thumbnailing method, and thumbnail media type to precalculate @@ -92,7 +92,7 @@ def parse_thumbnail_requirements( Returns: Dictionary mapping from media type string to list of ThumbnailRequirement. """ - requirements: Dict[str, List[ThumbnailRequirement]] = {} + requirements: dict[str, list[ThumbnailRequirement]] = {} for size in thumbnail_sizes: width = size["width"] height = size["height"] @@ -206,7 +206,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: # # We don't create the storage providers here as not all workers need # them to be started. - self.media_storage_providers: List[tuple] = [] + self.media_storage_providers: list[tuple] = [] for i, provider_config in enumerate(storage_providers): # We special case the module "file_system" so as not to need to @@ -298,7 +298,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: self.enable_authenticated_media = config.get("enable_authenticated_media", True) - self.media_upload_limits: List[MediaUploadLimit] = [] + self.media_upload_limits: list[MediaUploadLimit] = [] for limit_config in config.get("media_upload_limits", []): time_period_ms = self.parse_duration(limit_config["time_period"]) max_bytes = self.parse_size(limit_config["max_size"]) diff --git a/synapse/config/retention.py b/synapse/config/retention.py index 7e329c7f42..9d34f1e241 100644 --- a/synapse/config/retention.py +++ b/synapse/config/retention.py @@ -20,7 +20,7 @@ # import logging -from typing import Any, List, Optional +from typing import Any, Optional import attr @@ -119,7 +119,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: " greater than 'allowed_lifetime_max'" ) - self.retention_purge_jobs: List[RetentionPurgeJob] = [] + self.retention_purge_jobs: list[RetentionPurgeJob] = [] for purge_job_config in retention_config.get("purge_jobs", []): interval_config = purge_job_config.get("interval") diff --git a/synapse/config/saml2.py b/synapse/config/saml2.py index 9d7ef94507..acba63ee9b 100644 --- a/synapse/config/saml2.py +++ b/synapse/config/saml2.py @@ -20,7 +20,7 @@ # import logging -from typing import Any, List, Set +from typing import Any from synapse.config.sso import SsoAttributeRequirement from synapse.types import JsonDict @@ -160,8 +160,11 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: ) # Get the desired saml auth response attributes from the module + # type-ignore: the provider class was already checked for having the method being called + # with the runtime checks above, which mypy is not aware of, and treats as an error + # ever since the typehint of provider class was changed from "typing.Type" to "type" saml2_config_dict = self._default_saml_config_dict( - *self.saml2_user_mapping_provider_class.get_saml_attributes( + *self.saml2_user_mapping_provider_class.get_saml_attributes( # type: ignore[attr-defined] self.saml2_user_mapping_provider_config ) ) @@ -191,7 +194,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: ) def _default_saml_config_dict( - self, required_attributes: Set[str], optional_attributes: Set[str] + self, required_attributes: set[str], optional_attributes: set[str] ) -> JsonDict: """Generate a configuration dictionary with required and optional attributes that will be needed to process new user registration @@ -239,7 +242,7 @@ def _default_saml_config_dict( def _parse_attribute_requirements_def( attribute_requirements: Any, -) -> List[SsoAttributeRequirement]: +) -> list[SsoAttributeRequirement]: validate_config( ATTRIBUTE_REQUIREMENTS_SCHEMA, attribute_requirements, diff --git a/synapse/config/server.py b/synapse/config/server.py index e15bceb296..662ed24a13 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -25,7 +25,7 @@ import os.path import urllib.parse from textwrap import indent -from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, TypedDict, Union +from typing import Any, Iterable, Optional, TypedDict, Union from urllib.request import getproxies_environment import attr @@ -213,7 +213,7 @@ def generate_ip_set( @attr.s(frozen=True) class HttpResourceConfig: - names: List[str] = attr.ib( + names: list[str] = attr.ib( factory=list, validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)), ) @@ -228,8 +228,8 @@ class HttpListenerConfig: """Object describing the http-specific parts of the config of a listener""" x_forwarded: bool = False - resources: List[HttpResourceConfig] = attr.Factory(list) - additional_resources: Dict[str, dict] = attr.Factory(dict) + resources: list[HttpResourceConfig] = attr.Factory(list) + additional_resources: dict[str, dict] = attr.Factory(dict) tag: Optional[str] = None request_id_header: Optional[str] = None @@ -239,7 +239,7 @@ class TCPListenerConfig: """Object describing the configuration of a single TCP listener.""" port: int = attr.ib(validator=attr.validators.instance_of(int)) - bind_addresses: List[str] = attr.ib(validator=attr.validators.instance_of(List)) + bind_addresses: list[str] = attr.ib(validator=attr.validators.instance_of(list)) type: str = attr.ib(validator=attr.validators.in_(KNOWN_LISTENER_TYPES)) tls: bool = False @@ -344,7 +344,7 @@ class ProxyConfig: """ Proxy server to use for HTTPS requests. """ - no_proxy_hosts: Optional[List[str]] + no_proxy_hosts: Optional[list[str]] """ List of hosts, IP addresses, or IP ranges in CIDR format which should not use the proxy. Synapse will directly connect to these hosts. @@ -864,11 +864,11 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: ) # Whitelist of domain names that given next_link parameters must have - next_link_domain_whitelist: Optional[List[str]] = config.get( + next_link_domain_whitelist: Optional[list[str]] = config.get( "next_link_domain_whitelist" ) - self.next_link_domain_whitelist: Optional[Set[str]] = None + self.next_link_domain_whitelist: Optional[set[str]] = None if next_link_domain_whitelist is not None: if not isinstance(next_link_domain_whitelist, list): raise ConfigError("'next_link_domain_whitelist' must be a list") @@ -892,7 +892,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: config.get("use_account_validity_in_account_status") or False ) - self.rooms_to_exclude_from_sync: List[str] = ( + self.rooms_to_exclude_from_sync: list[str] = ( config.get("exclude_rooms_from_sync") or [] ) @@ -927,7 +927,7 @@ def generate_config_section( data_dir_path: str, server_name: str, open_private_ports: bool, - listeners: Optional[List[dict]], + listeners: Optional[list[dict]], **kwargs: Any, ) -> str: _, bind_port = parse_and_validate_server_name(server_name) @@ -1028,7 +1028,7 @@ def add_arguments(parser: argparse.ArgumentParser) -> None: help="Turn on the twisted telnet manhole service on the given port.", ) - def read_gc_intervals(self, durations: Any) -> Optional[Tuple[float, float, float]]: + def read_gc_intervals(self, durations: Any) -> Optional[tuple[float, float, float]]: """Reads the three durations for the GC min interval option, returning seconds.""" if durations is None: return None @@ -1048,7 +1048,7 @@ def read_gc_intervals(self, durations: Any) -> Optional[Tuple[float, float, floa def is_threepid_reserved( - reserved_threepids: List[JsonDict], threepid: JsonDict + reserved_threepids: list[JsonDict], threepid: JsonDict ) -> bool: """Check the threepid against the reserved threepid config Args: @@ -1066,8 +1066,8 @@ def is_threepid_reserved( def read_gc_thresholds( - thresholds: Optional[List[Any]], -) -> Optional[Tuple[int, int, int]]: + thresholds: Optional[list[Any]], +) -> Optional[tuple[int, int, int]]: """Reads the three integer thresholds for garbage collection. Ensures that the thresholds are integers if thresholds are supplied. """ diff --git a/synapse/config/spam_checker.py b/synapse/config/spam_checker.py index 014c55d702..02d7cee88f 100644 --- a/synapse/config/spam_checker.py +++ b/synapse/config/spam_checker.py @@ -19,7 +19,7 @@ # import logging -from typing import Any, Dict, List, Tuple +from typing import Any from synapse.config import ConfigError from synapse.types import JsonDict @@ -41,7 +41,7 @@ class SpamCheckerConfig(Config): section = "spamchecker" def read_config(self, config: JsonDict, **kwargs: Any) -> None: - self.spam_checkers: List[Tuple[Any, Dict]] = [] + self.spam_checkers: list[tuple[Any, dict]] = [] spam_checkers = config.get("spam_checker") or [] if isinstance(spam_checkers, dict): diff --git a/synapse/config/sso.py b/synapse/config/sso.py index cf27a7ee13..facb418510 100644 --- a/synapse/config/sso.py +++ b/synapse/config/sso.py @@ -19,7 +19,7 @@ # # import logging -from typing import Any, Dict, List, Optional +from typing import Any, Optional import attr @@ -45,7 +45,7 @@ class SsoAttributeRequirement: attribute: str # If neither `value` nor `one_of` is given, the attribute must simply exist. value: Optional[str] = None - one_of: Optional[List[str]] = None + one_of: Optional[list[str]] = None JSON_SCHEMA = { "type": "object", @@ -64,7 +64,7 @@ class SSOConfig(Config): section = "sso" def read_config(self, config: JsonDict, **kwargs: Any) -> None: - sso_config: Dict[str, Any] = config.get("sso") or {} + sso_config: dict[str, Any] = config.get("sso") or {} # The sso-specific template_dir self.sso_template_dir = sso_config.get("template_dir") diff --git a/synapse/config/tls.py b/synapse/config/tls.py index a48d81fdc3..d03a77d9d2 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -20,7 +20,7 @@ # import logging -from typing import Any, List, Optional, Pattern +from typing import Any, Optional, Pattern from matrix_common.regex import glob_to_regex @@ -84,7 +84,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: fed_whitelist_entries = [] # Support globs (*) in whitelist values - self.federation_certificate_verification_whitelist: List[Pattern] = [] + self.federation_certificate_verification_whitelist: list[Pattern] = [] for entry in fed_whitelist_entries: try: entry_regex = glob_to_regex(entry.encode("ascii").decode("ascii")) diff --git a/synapse/config/tracer.py b/synapse/config/tracer.py index d31fd41082..ccfeed4d07 100644 --- a/synapse/config/tracer.py +++ b/synapse/config/tracer.py @@ -19,7 +19,7 @@ # # -from typing import Any, List, Set +from typing import Any from synapse.types import JsonDict from synapse.util.check_dependencies import check_requirements @@ -42,7 +42,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: {"sampler": {"type": "const", "param": 1}, "logging": False}, ) - self.force_tracing_for_users: Set[str] = set() + self.force_tracing_for_users: set[str] = set() if not self.opentracer_enabled: return @@ -51,7 +51,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: # The tracer is enabled so sanitize the config - self.opentracer_whitelist: List[str] = opentracing_config.get( + self.opentracer_whitelist: list[str] = opentracing_config.get( "homeserver_whitelist", [] ) if not isinstance(self.opentracer_whitelist, list): diff --git a/synapse/config/user_types.py b/synapse/config/user_types.py index 2d9c9f7afb..dd64425d6c 100644 --- a/synapse/config/user_types.py +++ b/synapse/config/user_types.py @@ -12,7 +12,7 @@ # . # -from typing import Any, List, Optional +from typing import Any, Optional from synapse.api.constants import UserTypes from synapse.types import JsonDict @@ -29,9 +29,9 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: self.default_user_type: Optional[str] = user_types.get( "default_user_type", None ) - self.extra_user_types: List[str] = user_types.get("extra_user_types", []) + self.extra_user_types: list[str] = user_types.get("extra_user_types", []) - all_user_types: List[str] = [] + all_user_types: list[str] = [] all_user_types.extend(UserTypes.ALL_BUILTIN_USER_TYPES) all_user_types.extend(self.extra_user_types) diff --git a/synapse/config/workers.py b/synapse/config/workers.py index 825ba78482..da7148b3a1 100644 --- a/synapse/config/workers.py +++ b/synapse/config/workers.py @@ -22,7 +22,7 @@ import argparse import logging -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union import attr @@ -79,7 +79,7 @@ logger = logging.getLogger(__name__) -def _instance_to_list_converter(obj: Union[str, List[str]]) -> List[str]: +def _instance_to_list_converter(obj: Union[str, list[str]]) -> list[str]: """Helper for allowing parsing a string or list of strings to a config option expecting a list of strings. """ @@ -142,39 +142,39 @@ class WriterLocations: device_lists: The instances that write to the device list stream. """ - events: List[str] = attr.ib( + events: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - typing: List[str] = attr.ib( + typing: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - to_device: List[str] = attr.ib( + to_device: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - account_data: List[str] = attr.ib( + account_data: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - receipts: List[str] = attr.ib( + receipts: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - presence: List[str] = attr.ib( + presence: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - push_rules: List[str] = attr.ib( + push_rules: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - device_lists: List[str] = attr.ib( + device_lists: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - thread_subscriptions: List[str] = attr.ib( + thread_subscriptions: list[str] = attr.ib( default=["master"], converter=_instance_to_list_converter, ) @@ -190,8 +190,8 @@ class OutboundFederationRestrictedTo: locations: list of instance locations to connect to proxy via. """ - instances: Optional[List[str]] - locations: List[InstanceLocationConfig] = attr.Factory(list) + instances: Optional[list[str]] + locations: list[InstanceLocationConfig] = attr.Factory(list) def __contains__(self, instance: str) -> bool: # It feels a bit dirty to return `True` if `instances` is `None`, but it makes @@ -295,7 +295,7 @@ def read_config( # A map from instance name to host/port of their HTTP replication endpoint. # Check if the main process is declared. The main process itself doesn't need # this data as it would never have to talk to itself. - instance_map: Dict[str, Any] = config.get("instance_map", {}) + instance_map: dict[str, Any] = config.get("instance_map", {}) if self.instance_name is not MAIN_PROCESS_INSTANCE_NAME: # TODO: The next 3 condition blocks can be deleted after some time has @@ -342,7 +342,7 @@ def read_config( ) # type-ignore: the expression `Union[A, B]` is not a Type[Union[A, B]] currently - self.instance_map: Dict[str, InstanceLocationConfig] = ( + self.instance_map: dict[str, InstanceLocationConfig] = ( parse_and_validate_mapping( instance_map, InstanceLocationConfig, # type: ignore[arg-type] @@ -481,7 +481,7 @@ def read_config( def _should_this_worker_perform_duty( self, - config: Dict[str, Any], + config: dict[str, Any], legacy_master_option_name: str, legacy_worker_app_name: str, new_option_name: str, @@ -574,11 +574,11 @@ def _should_this_worker_perform_duty( def _worker_names_performing_this_duty( self, - config: Dict[str, Any], + config: dict[str, Any], legacy_option_name: str, legacy_app_name: str, modern_instance_list_name: str, - ) -> List[str]: + ) -> list[str]: """ Retrieves the names of the workers handling a given duty, by either legacy option or instance list. diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index c36398cec0..d13d5d04c3 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -23,7 +23,7 @@ import collections.abc import hashlib import logging -from typing import Any, Callable, Dict, Tuple +from typing import Any, Callable from canonicaljson import encode_canonical_json from signedjson.sign import sign_json @@ -80,8 +80,8 @@ def check_event_content_hash( def compute_content_hash( - event_dict: Dict[str, Any], hash_algorithm: Hasher -) -> Tuple[str, bytes]: + event_dict: dict[str, Any], hash_algorithm: Hasher +) -> tuple[str, bytes]: """Compute the content hash of an event, which is the hash of the unredacted event. @@ -112,7 +112,7 @@ def compute_content_hash( def compute_event_reference_hash( event: EventBase, hash_algorithm: Hasher = hashlib.sha256 -) -> Tuple[str, bytes]: +) -> tuple[str, bytes]: """Computes the event reference hash. This is the hash of the redacted event. @@ -139,7 +139,7 @@ def compute_event_signature( event_dict: JsonDict, signature_name: str, signing_key: SigningKey, -) -> Dict[str, Dict[str, str]]: +) -> dict[str, dict[str, str]]: """Compute the signature of the event for the given name and key. Args: diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 258bc29357..24a693fdb1 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -21,7 +21,7 @@ import abc import logging -from typing import TYPE_CHECKING, Callable, Dict, Iterable, List, Optional, Tuple +from typing import TYPE_CHECKING, Callable, Iterable, Optional import attr from signedjson.key import ( @@ -82,7 +82,7 @@ class VerifyJsonRequest: server_name: str get_json_object: Callable[[], JsonDict] minimum_valid_until_ts: int - key_ids: List[str] + key_ids: list[str] @staticmethod def from_json_object( @@ -141,7 +141,7 @@ class _FetchKeyRequest: server_name: str minimum_valid_until_ts: int - key_ids: List[str] + key_ids: list[str] class Keyring: @@ -156,7 +156,7 @@ def __init__( if key_fetchers is None: # Always fetch keys from the database. - mutable_key_fetchers: List[KeyFetcher] = [StoreKeyFetcher(hs)] + mutable_key_fetchers: list[KeyFetcher] = [StoreKeyFetcher(hs)] # Fetch keys from configured trusted key servers, if any exist. key_servers = hs.config.key.key_servers if key_servers: @@ -169,7 +169,7 @@ def __init__( self._key_fetchers = key_fetchers self._fetch_keys_queue: BatchingQueue[ - _FetchKeyRequest, Dict[str, Dict[str, FetchKeyResult]] + _FetchKeyRequest, dict[str, dict[str, FetchKeyResult]] ] = BatchingQueue( name="keyring_server", hs=hs, @@ -182,7 +182,7 @@ def __init__( # build a FetchKeyResult for each of our own keys, to shortcircuit the # fetcher. - self._local_verify_keys: Dict[str, FetchKeyResult] = {} + self._local_verify_keys: dict[str, FetchKeyResult] = {} for key_id, key in hs.config.key.old_signing_keys.items(): self._local_verify_keys[key_id] = FetchKeyResult( verify_key=key, valid_until_ts=key.expired @@ -229,8 +229,8 @@ async def verify_json_for_server( return await self.process_request(request) def verify_json_objects_for_server( - self, server_and_json: Iterable[Tuple[str, dict, int]] - ) -> List["defer.Deferred[None]"]: + self, server_and_json: Iterable[tuple[str, dict, int]] + ) -> list["defer.Deferred[None]"]: """Bulk verifies signatures of json objects, bulk fetching keys as necessary. @@ -286,7 +286,7 @@ async def process_request(self, verify_request: VerifyJsonRequest) -> None: Codes.UNAUTHORIZED, ) - found_keys: Dict[str, FetchKeyResult] = {} + found_keys: dict[str, FetchKeyResult] = {} # If we are the originating server, short-circuit the key-fetch for any keys # we already have @@ -368,8 +368,8 @@ async def process_json( ) async def _inner_fetch_key_requests( - self, requests: List[_FetchKeyRequest] - ) -> Dict[str, Dict[str, FetchKeyResult]]: + self, requests: list[_FetchKeyRequest] + ) -> dict[str, dict[str, FetchKeyResult]]: """Processing function for the queue of `_FetchKeyRequest`. Takes a list of key fetch requests, de-duplicates them and then carries out @@ -387,7 +387,7 @@ async def _inner_fetch_key_requests( # First we need to deduplicate requests for the same key. We do this by # taking the *maximum* requested `minimum_valid_until_ts` for each pair # of server name/key ID. - server_to_key_to_ts: Dict[str, Dict[str, int]] = {} + server_to_key_to_ts: dict[str, dict[str, int]] = {} for request in requests: by_server = server_to_key_to_ts.setdefault(request.server_name, {}) for key_id in request.key_ids: @@ -412,7 +412,7 @@ async def _inner_fetch_key_requests( # We now convert the returned list of results into a map from server # name to key ID to FetchKeyResult, to return. - to_return: Dict[str, Dict[str, FetchKeyResult]] = {} + to_return: dict[str, dict[str, FetchKeyResult]] = {} for request, results in zip(deduped_requests, results_per_request): to_return_by_server = to_return.setdefault(request.server_name, {}) for key_id, key_result in results.items(): @@ -424,7 +424,7 @@ async def _inner_fetch_key_requests( async def _inner_fetch_key_request( self, verify_request: _FetchKeyRequest - ) -> Dict[str, FetchKeyResult]: + ) -> dict[str, FetchKeyResult]: """Attempt to fetch the given key by calling each key fetcher one by one. If a key is found, check whether its `valid_until_ts` attribute satisfies the @@ -445,7 +445,7 @@ async def _inner_fetch_key_request( """ logger.debug("Starting fetch for %s", verify_request) - found_keys: Dict[str, FetchKeyResult] = {} + found_keys: dict[str, FetchKeyResult] = {} missing_key_ids = set(verify_request.key_ids) for fetcher in self._key_fetchers: @@ -499,8 +499,8 @@ def shutdown(self) -> None: self._queue.shutdown() async def get_keys( - self, server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + self, server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: results = await self._queue.add_to_queue( _FetchKeyRequest( server_name=server_name, @@ -512,8 +512,8 @@ async def get_keys( @abc.abstractmethod async def _fetch_keys( - self, keys_to_fetch: List[_FetchKeyRequest] - ) -> Dict[str, Dict[str, FetchKeyResult]]: + self, keys_to_fetch: list[_FetchKeyRequest] + ) -> dict[str, dict[str, FetchKeyResult]]: pass @@ -526,8 +526,8 @@ def __init__(self, hs: "HomeServer"): self.store = hs.get_datastores().main async def _fetch_keys( - self, keys_to_fetch: List[_FetchKeyRequest] - ) -> Dict[str, Dict[str, FetchKeyResult]]: + self, keys_to_fetch: list[_FetchKeyRequest] + ) -> dict[str, dict[str, FetchKeyResult]]: key_ids_to_fetch = ( (queue_value.server_name, key_id) for queue_value in keys_to_fetch @@ -535,7 +535,7 @@ async def _fetch_keys( ) res = await self.store.get_server_keys_json(key_ids_to_fetch) - keys: Dict[str, Dict[str, FetchKeyResult]] = {} + keys: dict[str, dict[str, FetchKeyResult]] = {} for (server_name, key_id), key in res.items(): keys.setdefault(server_name, {})[key_id] = key return keys @@ -549,7 +549,7 @@ def __init__(self, hs: "HomeServer"): async def process_v2_response( self, from_server: str, response_json: JsonDict, time_added_ms: int - ) -> Dict[str, FetchKeyResult]: + ) -> dict[str, FetchKeyResult]: """Parse a 'Server Keys' structure from the result of a /key request This is used to parse either the entirety of the response from @@ -640,11 +640,11 @@ def __init__(self, hs: "HomeServer"): self.key_servers = hs.config.key.key_servers async def _fetch_keys( - self, keys_to_fetch: List[_FetchKeyRequest] - ) -> Dict[str, Dict[str, FetchKeyResult]]: + self, keys_to_fetch: list[_FetchKeyRequest] + ) -> dict[str, dict[str, FetchKeyResult]]: """see KeyFetcher._fetch_keys""" - async def get_key(key_server: TrustedKeyServer) -> Dict: + async def get_key(key_server: TrustedKeyServer) -> dict: try: return await self.get_server_verify_key_v2_indirect( keys_to_fetch, key_server @@ -670,7 +670,7 @@ async def get_key(key_server: TrustedKeyServer) -> Dict: ).addErrback(unwrapFirstError) ) - union_of_keys: Dict[str, Dict[str, FetchKeyResult]] = {} + union_of_keys: dict[str, dict[str, FetchKeyResult]] = {} for result in results: for server_name, keys in result.items(): union_of_keys.setdefault(server_name, {}).update(keys) @@ -678,8 +678,8 @@ async def get_key(key_server: TrustedKeyServer) -> Dict: return union_of_keys async def get_server_verify_key_v2_indirect( - self, keys_to_fetch: List[_FetchKeyRequest], key_server: TrustedKeyServer - ) -> Dict[str, Dict[str, FetchKeyResult]]: + self, keys_to_fetch: list[_FetchKeyRequest], key_server: TrustedKeyServer + ) -> dict[str, dict[str, FetchKeyResult]]: """ Args: keys_to_fetch: @@ -731,8 +731,8 @@ async def get_server_verify_key_v2_indirect( "Response from notary server %s: %s", perspective_name, query_response ) - keys: Dict[str, Dict[str, FetchKeyResult]] = {} - added_keys: Dict[Tuple[str, str], FetchKeyResult] = {} + keys: dict[str, dict[str, FetchKeyResult]] = {} + added_keys: dict[tuple[str, str], FetchKeyResult] = {} time_now_ms = self.clock.time_msec() @@ -836,8 +836,8 @@ def __init__(self, hs: "HomeServer"): self.client = hs.get_federation_http_client() async def get_keys( - self, server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + self, server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: results = await self._queue.add_to_queue( _FetchKeyRequest( server_name=server_name, @@ -849,8 +849,8 @@ async def get_keys( return results.get(server_name, {}) async def _fetch_keys( - self, keys_to_fetch: List[_FetchKeyRequest] - ) -> Dict[str, Dict[str, FetchKeyResult]]: + self, keys_to_fetch: list[_FetchKeyRequest] + ) -> dict[str, dict[str, FetchKeyResult]]: """ Args: keys_to_fetch: @@ -879,7 +879,7 @@ async def get_keys(server_name: str) -> None: async def get_server_verify_keys_v2_direct( self, server_name: str - ) -> Dict[str, FetchKeyResult]: + ) -> dict[str, FetchKeyResult]: """ Args: diff --git a/synapse/event_auth.py b/synapse/event_auth.py index 64de3f7ef8..5d927a925a 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -26,15 +26,11 @@ from typing import ( Any, ChainMap, - Dict, Iterable, - List, Mapping, MutableMapping, Optional, Protocol, - Set, - Tuple, Union, cast, ) @@ -91,7 +87,7 @@ async def get_events( redact_behaviour: EventRedactBehaviour, get_prev_content: bool = False, allow_rejected: bool = False, - ) -> Dict[str, "EventBase"]: ... + ) -> dict[str, "EventBase"]: ... def validate_event_for_room_version(event: "EventBase") -> None: @@ -993,7 +989,7 @@ def _check_power_levels( user_level = get_user_power_level(event.user_id, auth_events) # Check other levels: - levels_to_check: List[Tuple[str, Optional[str]]] = [ + levels_to_check: list[tuple[str, Optional[str]]] = [ ("users_default", None), ("events_default", None), ("state_default", None), @@ -1191,7 +1187,7 @@ def _verify_third_party_invite( return False -def get_public_keys(invite_event: "EventBase") -> List[Dict[str, Any]]: +def get_public_keys(invite_event: "EventBase") -> list[dict[str, Any]]: public_keys = [] if "public_key" in invite_event.content: o = {"public_key": invite_event.content["public_key"]} @@ -1204,7 +1200,7 @@ def get_public_keys(invite_event: "EventBase") -> List[Dict[str, Any]]: def auth_types_for_event( room_version: RoomVersion, event: Union["EventBase", "EventBuilder"] -) -> Set[Tuple[str, str]]: +) -> set[tuple[str, str]]: """Given an event, return a list of (EventType, StateKey) that may be needed to auth the event. The returned list may be a superset of what would actually be required depending on the full state of the room. diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index db38754280..a353076e0d 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -25,14 +25,10 @@ from typing import ( TYPE_CHECKING, Any, - Dict, Generic, Iterable, - List, Literal, Optional, - Tuple, - Type, TypeVar, Union, overload, @@ -94,20 +90,20 @@ def __init__(self, key: str): def __get__( self, instance: Literal[None], - owner: Optional[Type[_DictPropertyInstance]] = None, + owner: Optional[type[_DictPropertyInstance]] = None, ) -> "DictProperty": ... @overload def __get__( self, instance: _DictPropertyInstance, - owner: Optional[Type[_DictPropertyInstance]] = None, + owner: Optional[type[_DictPropertyInstance]] = None, ) -> T: ... def __get__( self, instance: Optional[_DictPropertyInstance], - owner: Optional[Type[_DictPropertyInstance]] = None, + owner: Optional[type[_DictPropertyInstance]] = None, ) -> Union[T, "DictProperty"]: # if the property is accessed as a class property rather than an instance # property, return the property itself rather than the value @@ -160,20 +156,20 @@ def __init__(self, key: str, default: T): def __get__( self, instance: Literal[None], - owner: Optional[Type[_DictPropertyInstance]] = None, + owner: Optional[type[_DictPropertyInstance]] = None, ) -> "DefaultDictProperty": ... @overload def __get__( self, instance: _DictPropertyInstance, - owner: Optional[Type[_DictPropertyInstance]] = None, + owner: Optional[type[_DictPropertyInstance]] = None, ) -> T: ... def __get__( self, instance: Optional[_DictPropertyInstance], - owner: Optional[Type[_DictPropertyInstance]] = None, + owner: Optional[type[_DictPropertyInstance]] = None, ) -> Union[T, "DefaultDictProperty"]: if instance is None: return self @@ -192,7 +188,7 @@ def __init__( self, event_dict: JsonDict, room_version: RoomVersion, - signatures: Dict[str, Dict[str, str]], + signatures: dict[str, dict[str, str]], unsigned: JsonDict, internal_metadata_dict: JsonDict, rejected_reason: Optional[str], @@ -210,7 +206,7 @@ def __init__( depth: DictProperty[int] = DictProperty("depth") content: DictProperty[JsonDict] = DictProperty("content") - hashes: DictProperty[Dict[str, str]] = DictProperty("hashes") + hashes: DictProperty[dict[str, str]] = DictProperty("hashes") origin_server_ts: DictProperty[int] = DictProperty("origin_server_ts") sender: DictProperty[str] = DictProperty("sender") # TODO state_key should be Optional[str]. This is generally asserted in Synapse @@ -293,13 +289,13 @@ def __getitem__(self, field: str) -> Optional[Any]: def __contains__(self, field: str) -> bool: return field in self._dict - def items(self) -> List[Tuple[str, Optional[Any]]]: + def items(self) -> list[tuple[str, Optional[Any]]]: return list(self._dict.items()) def keys(self) -> Iterable[str]: return self._dict.keys() - def prev_event_ids(self) -> List[str]: + def prev_event_ids(self) -> list[str]: """Returns the list of prev event IDs. The order matches the order specified in the event, though there is no meaning to it. @@ -457,7 +453,7 @@ def event_id(self) -> str: def room_id(self) -> str: return self._dict["room_id"] - def prev_event_ids(self) -> List[str]: + def prev_event_ids(self) -> list[str]: """Returns the list of prev event IDs. The order matches the order specified in the event, though there is no meaning to it. @@ -558,7 +554,7 @@ def auth_event_ids(self) -> StrCollection: def _event_type_from_format_version( format_version: int, -) -> Type[Union[FrozenEvent, FrozenEventV2, FrozenEventV3]]: +) -> type[Union[FrozenEvent, FrozenEventV2, FrozenEventV3]]: """Returns the python type to use to construct an Event object for the given event format version. @@ -669,4 +665,4 @@ class StrippedStateEvent: type: str state_key: str sender: str - content: Dict[str, Any] + content: dict[str, Any] diff --git a/synapse/events/auto_accept_invites.py b/synapse/events/auto_accept_invites.py index 9e17edd227..4c59f0dffe 100644 --- a/synapse/events/auto_accept_invites.py +++ b/synapse/events/auto_accept_invites.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import Any, Dict, Tuple +from typing import Any from synapse.api.constants import AccountDataTypes, EventTypes, Membership from synapse.api.errors import SynapseError @@ -146,7 +146,7 @@ async def _mark_room_as_direct_message( # Be careful: we convert the outer frozendict into a dict here, # but the contents of the dict are still frozen (tuples in lieu of lists, # etc.) - dm_map: Dict[str, Tuple[str, ...]] = dict( + dm_map: dict[str, tuple[str, ...]] = dict( await self._api.account_data_manager.get_global( user_id, AccountDataTypes.DIRECT ) diff --git a/synapse/events/builder.py b/synapse/events/builder.py index 1c9f78c7ca..a57303c999 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Optional, Union import attr from signedjson.types import SigningKey @@ -125,8 +125,8 @@ def is_mine_id(self, user_id: str) -> bool: async def build( self, - prev_event_ids: List[str], - auth_event_ids: Optional[List[str]], + prev_event_ids: list[str], + auth_event_ids: Optional[list[str]], depth: Optional[int] = None, ) -> EventBase: """Transform into a fully signed and hashed event @@ -205,8 +205,8 @@ async def build( format_version = self.room_version.event_format # The types of auth/prev events changes between event versions. - prev_events: Union[StrCollection, List[Tuple[str, Dict[str, str]]]] - auth_events: Union[List[str], List[Tuple[str, Dict[str, str]]]] + prev_events: Union[StrCollection, list[tuple[str, dict[str, str]]]] + auth_events: Union[list[str], list[tuple[str, dict[str, str]]]] if format_version == EventFormatVersions.ROOM_V1_V2: auth_events = await self._store.add_event_hashes(auth_event_ids) prev_events = await self._store.add_event_hashes(prev_event_ids) @@ -228,7 +228,7 @@ async def build( # the db) depth = min(depth, MAX_DEPTH) - event_dict: Dict[str, Any] = { + event_dict: dict[str, Any] = { "auth_events": auth_events, "prev_events": prev_events, "type": self.type, diff --git a/synapse/events/presence_router.py b/synapse/events/presence_router.py index 9713b141bc..39dd7ee2b3 100644 --- a/synapse/events/presence_router.py +++ b/synapse/events/presence_router.py @@ -24,11 +24,8 @@ Any, Awaitable, Callable, - Dict, Iterable, - List, Optional, - Set, TypeVar, Union, ) @@ -44,10 +41,10 @@ from synapse.server import HomeServer GET_USERS_FOR_STATES_CALLBACK = Callable[ - [Iterable[UserPresenceState]], Awaitable[Dict[str, Set[UserPresenceState]]] + [Iterable[UserPresenceState]], Awaitable[dict[str, set[UserPresenceState]]] ] # This must either return a set of strings or the constant PresenceRouter.ALL_USERS. -GET_INTERESTED_USERS_CALLBACK = Callable[[str], Awaitable[Union[Set[str], str]]] +GET_INTERESTED_USERS_CALLBACK = Callable[[str], Awaitable[Union[set[str], str]]] logger = logging.getLogger(__name__) @@ -98,7 +95,7 @@ def run(*args: P.args, **kwargs: P.kwargs) -> Awaitable[R]: return run # Register the hooks through the module API. - hooks: Dict[str, Optional[Callable[..., Any]]] = { + hooks: dict[str, Optional[Callable[..., Any]]] = { hook: async_wrapper(getattr(presence_router, hook, None)) for hook in presence_router_methods } @@ -116,8 +113,8 @@ class PresenceRouter: def __init__(self, hs: "HomeServer"): # Initially there are no callbacks - self._get_users_for_states_callbacks: List[GET_USERS_FOR_STATES_CALLBACK] = [] - self._get_interested_users_callbacks: List[GET_INTERESTED_USERS_CALLBACK] = [] + self._get_users_for_states_callbacks: list[GET_USERS_FOR_STATES_CALLBACK] = [] + self._get_interested_users_callbacks: list[GET_INTERESTED_USERS_CALLBACK] = [] def register_presence_router_callbacks( self, @@ -143,7 +140,7 @@ def register_presence_router_callbacks( async def get_users_for_states( self, state_updates: Iterable[UserPresenceState], - ) -> Dict[str, Set[UserPresenceState]]: + ) -> dict[str, set[UserPresenceState]]: """ Given an iterable of user presence updates, determine where each one needs to go. @@ -161,7 +158,7 @@ async def get_users_for_states( # Don't include any extra destinations for presence updates return {} - users_for_states: Dict[str, Set[UserPresenceState]] = {} + users_for_states: dict[str, set[UserPresenceState]] = {} # run all the callbacks for get_users_for_states and combine the results for callback in self._get_users_for_states_callbacks: try: @@ -174,7 +171,7 @@ async def get_users_for_states( logger.warning("Failed to run module API callback %s: %s", callback, e) continue - if not isinstance(result, Dict): + if not isinstance(result, dict): logger.warning( "Wrong type returned by module API callback %s: %s, expected Dict", callback, @@ -183,7 +180,7 @@ async def get_users_for_states( continue for key, new_entries in result.items(): - if not isinstance(new_entries, Set): + if not isinstance(new_entries, set): logger.warning( "Wrong type returned by module API callback %s: %s, expected Set", callback, @@ -194,7 +191,7 @@ async def get_users_for_states( return users_for_states - async def get_interested_users(self, user_id: str) -> Union[Set[str], str]: + async def get_interested_users(self, user_id: str) -> Union[set[str], str]: """ Retrieve a list of users that `user_id` is interested in receiving the presence of. This will be in addition to those they share a room with. @@ -234,7 +231,7 @@ async def get_interested_users(self, user_id: str) -> Union[Set[str], str]: if result == PresenceRouter.ALL_USERS: return PresenceRouter.ALL_USERS - if not isinstance(result, Set): + if not isinstance(result, set): logger.warning( "Wrong type returned by module API callback %s: %s, expected set", callback, diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py index 63551143d8..764d31ee66 100644 --- a/synapse/events/snapshot.py +++ b/synapse/events/snapshot.py @@ -19,7 +19,7 @@ # # from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional import attr from immutabledict import immutabledict @@ -133,7 +133,7 @@ class EventContext(UnpersistedEventContextBase): """ _storage: "StorageControllers" - state_group_deltas: Dict[Tuple[int, int], StateMap[str]] + state_group_deltas: dict[tuple[int, int], StateMap[str]] rejected: Optional[str] = None _state_group: Optional[int] = None state_group_before_event: Optional[int] = None @@ -149,7 +149,7 @@ def with_state( state_group_before_event: Optional[int], state_delta_due_to_event: Optional[StateMap[str]], partial_state: bool, - state_group_deltas: Dict[Tuple[int, int], StateMap[str]], + state_group_deltas: dict[tuple[int, int], StateMap[str]], ) -> "EventContext": return EventContext( storage=storage, @@ -306,7 +306,7 @@ async def get_prev_state_ids( ) -EventPersistencePair = Tuple[EventBase, EventContext] +EventPersistencePair = tuple[EventBase, EventContext] """ The combination of an event to be persisted and its context. """ @@ -365,11 +365,11 @@ class UnpersistedEventContext(UnpersistedEventContextBase): @classmethod async def batch_persist_unpersisted_contexts( cls, - events_and_context: List[Tuple[EventBase, "UnpersistedEventContextBase"]], + events_and_context: list[tuple[EventBase, "UnpersistedEventContextBase"]], room_id: str, last_known_state_group: int, datastore: "StateGroupDataStore", - ) -> List[EventPersistencePair]: + ) -> list[EventPersistencePair]: """ Takes a list of events and their associated unpersisted contexts and persists the unpersisted contexts, returning a list of events and persisted contexts. @@ -472,7 +472,7 @@ async def persist(self, event: EventBase) -> EventContext: partial_state=self.partial_state, ) - def _build_state_group_deltas(self) -> Dict[Tuple[int, int], StateMap]: + def _build_state_group_deltas(self) -> dict[tuple[int, int], StateMap]: """ Collect deltas between the state groups associated with this context """ @@ -510,8 +510,8 @@ def _build_state_group_deltas(self) -> Dict[Tuple[int, int], StateMap]: def _encode_state_group_delta( - state_group_delta: Dict[Tuple[int, int], StateMap[str]], -) -> List[Tuple[int, int, Optional[List[Tuple[str, str, str]]]]]: + state_group_delta: dict[tuple[int, int], StateMap[str]], +) -> list[tuple[int, int, Optional[list[tuple[str, str, str]]]]]: if not state_group_delta: return [] @@ -523,8 +523,8 @@ def _encode_state_group_delta( def _decode_state_group_delta( - input: List[Tuple[int, int, List[Tuple[str, str, str]]]], -) -> Dict[Tuple[int, int], StateMap[str]]: + input: list[tuple[int, int, list[tuple[str, str, str]]]], +) -> dict[tuple[int, int], StateMap[str]]: if not input: return {} @@ -539,7 +539,7 @@ def _decode_state_group_delta( def _encode_state_dict( state_dict: Optional[StateMap[str]], -) -> Optional[List[Tuple[str, str, str]]]: +) -> Optional[list[tuple[str, str, str]]]: """Since dicts of (type, state_key) -> event_id cannot be serialized in JSON we need to convert them to a form that can. """ @@ -550,7 +550,7 @@ def _encode_state_dict( def _decode_state_dict( - input: Optional[List[Tuple[str, str, str]]], + input: Optional[list[tuple[str, str, str]]], ) -> Optional[StateMap[str]]: """Decodes a state dict encoded using `_encode_state_dict` above""" if input is None: diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 942072cf84..9fa251abd8 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -27,8 +27,6 @@ Awaitable, Callable, Collection, - Dict, - List, Mapping, Match, MutableMapping, @@ -239,7 +237,7 @@ def add_fields(*fields: str) -> None: return allowed_fields -def _copy_field(src: JsonDict, dst: JsonDict, field: List[str]) -> None: +def _copy_field(src: JsonDict, dst: JsonDict, field: list[str]) -> None: """Copy the field in 'src' to 'dst'. For example, if src={"foo":{"bar":5}} and dst={}, and field=["foo","bar"] @@ -292,7 +290,7 @@ def _escape_slash(m: Match[str]) -> str: return m.group(0) -def _split_field(field: str) -> List[str]: +def _split_field(field: str) -> list[str]: """ Splits strings on unescaped dots and removes escaping. @@ -333,7 +331,7 @@ def _split_field(field: str) -> List[str]: return result -def only_fields(dictionary: JsonDict, fields: List[str]) -> JsonDict: +def only_fields(dictionary: JsonDict, fields: list[str]) -> JsonDict: """Return a new dict with only the fields in 'dictionary' which are present in 'fields'. @@ -419,7 +417,7 @@ class SerializeEventConfig: # the transaction_id in the unsigned section of the event. requester: Optional[Requester] = None # List of event fields to include. If empty, all fields will be returned. - only_event_fields: Optional[List[str]] = None + only_event_fields: Optional[list[str]] = None # Some events can have stripped room state stored in the `unsigned` field. # This is required for invite and knock functionality. If this option is # False, that state will be removed from the event before it is returned. @@ -573,7 +571,7 @@ class EventClientSerializer: def __init__(self, hs: "HomeServer") -> None: self._store = hs.get_datastores().main self._auth = hs.get_auth() - self._add_extra_fields_to_unsigned_client_event_callbacks: List[ + self._add_extra_fields_to_unsigned_client_event_callbacks: list[ ADD_EXTRA_FIELDS_TO_UNSIGNED_CLIENT_EVENT_CALLBACK ] = [] @@ -583,7 +581,7 @@ async def serialize_event( time_now: int, *, config: SerializeEventConfig = _DEFAULT_SERIALIZE_EVENT_CONFIG, - bundle_aggregations: Optional[Dict[str, "BundledAggregations"]] = None, + bundle_aggregations: Optional[dict[str, "BundledAggregations"]] = None, ) -> JsonDict: """Serializes a single event. @@ -641,7 +639,7 @@ async def _inject_bundled_aggregations( event: EventBase, time_now: int, config: SerializeEventConfig, - bundled_aggregations: Dict[str, "BundledAggregations"], + bundled_aggregations: dict[str, "BundledAggregations"], serialized_event: JsonDict, ) -> None: """Potentially injects bundled aggregations into the unsigned portion of the serialized event. @@ -718,8 +716,8 @@ async def serialize_events( time_now: int, *, config: SerializeEventConfig = _DEFAULT_SERIALIZE_EVENT_CONFIG, - bundle_aggregations: Optional[Dict[str, "BundledAggregations"]] = None, - ) -> List[JsonDict]: + bundle_aggregations: Optional[dict[str, "BundledAggregations"]] = None, + ) -> list[JsonDict]: """Serializes multiple events. Args: @@ -763,7 +761,7 @@ def register_add_extra_fields_to_unsigned_client_event_callback( def copy_and_fixup_power_levels_contents( old_power_levels: PowerLevelsContent, -) -> Dict[str, Union[int, Dict[str, int]]]: +) -> dict[str, Union[int, dict[str, int]]]: """Copy the content of a power_levels event, unfreezing immutabledicts along the way. We accept as input power level values which are strings, provided they represent an @@ -779,11 +777,11 @@ def copy_and_fixup_power_levels_contents( if not isinstance(old_power_levels, collections.abc.Mapping): raise TypeError("Not a valid power-levels content: %r" % (old_power_levels,)) - power_levels: Dict[str, Union[int, Dict[str, int]]] = {} + power_levels: dict[str, Union[int, dict[str, int]]] = {} for k, v in old_power_levels.items(): if isinstance(v, collections.abc.Mapping): - h: Dict[str, int] = {} + h: dict[str, int] = {} power_levels[k] = h for k1, v1 in v.items(): _copy_power_level_value_as_integer(v1, h, k1) diff --git a/synapse/events/validator.py b/synapse/events/validator.py index 4d9ba15829..6fb52f82c1 100644 --- a/synapse/events/validator.py +++ b/synapse/events/validator.py @@ -19,7 +19,7 @@ # # import collections.abc -from typing import List, Type, Union, cast +from typing import Union, cast import jsonschema @@ -283,13 +283,13 @@ def _ensure_state_event(self, event: Union[EventBase, EventBuilder]) -> None: class Mentions(RequestBodyModel): - user_ids: List[StrictStr] = Field(default_factory=list) + user_ids: list[StrictStr] = Field(default_factory=list) room: StrictBool = False # This could return something newer than Draft 7, but that's the current "latest" # validator. -def _create_validator(schema: JsonDict) -> Type[jsonschema.Draft7Validator]: +def _create_validator(schema: JsonDict) -> type[jsonschema.Draft7Validator]: validator = jsonschema.validators.validator_for(schema) # by default jsonschema does not consider a immutabledict to be an object so diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index a1c9c286ac..13e445456a 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -20,7 +20,7 @@ # # import logging -from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional, Sequence +from typing import TYPE_CHECKING, Awaitable, Callable, Optional, Sequence from synapse.api.constants import MAX_DEPTH, EventContentFields, EventTypes, Membership from synapse.api.errors import Codes, SynapseError @@ -305,7 +305,7 @@ def _is_invite_via_3pid(event: EventBase) -> bool: def parse_events_from_pdu_json( pdus_json: Sequence[JsonDict], room_version: RoomVersion -) -> List[EventBase]: +) -> list[EventBase]: return [ event_from_pdu_json(pdu_json, room_version) for pdu_json in filter_pdus_for_valid_depth(pdus_json) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 8c91336dbc..cb2fa59f54 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -32,13 +32,10 @@ Callable, Collection, Container, - Dict, Iterable, - List, Mapping, Optional, Sequence, - Tuple, TypeVar, Union, ) @@ -120,8 +117,8 @@ class SendJoinResult: event: EventBase # A string giving the server the event was sent to. origin: str - state: List[EventBase] - auth_chain: List[EventBase] + state: list[EventBase] + auth_chain: list[EventBase] # True if 'state' elides non-critical membership events partial_state: bool @@ -135,7 +132,7 @@ class FederationClient(FederationBase): def __init__(self, hs: "HomeServer"): super().__init__(hs) - self.pdu_destination_tried: Dict[str, Dict[str, int]] = {} + self.pdu_destination_tried: dict[str, dict[str, int]] = {} self._clock.looping_call(self._clear_tried_cache, 60 * 1000) self.state = hs.get_state_handler() self.transport_layer = hs.get_federation_transport_client() @@ -145,7 +142,7 @@ def __init__(self, hs: "HomeServer"): # Cache mapping `event_id` to a tuple of the event itself and the `pull_origin` # (which server we pulled the event from) - self._get_pdu_cache: ExpiringCache[str, Tuple[EventBase, str]] = ExpiringCache( + self._get_pdu_cache: ExpiringCache[str, tuple[EventBase, str]] = ExpiringCache( cache_name="get_pdu_cache", server_name=self.server_name, hs=self.hs, @@ -163,8 +160,8 @@ def __init__(self, hs: "HomeServer"): # It is a map of (room ID, suggested-only) -> the response of # get_room_hierarchy. self._get_room_hierarchy_cache: ExpiringCache[ - Tuple[str, bool], - Tuple[JsonDict, Sequence[JsonDict], Sequence[JsonDict], Sequence[str]], + tuple[str, bool], + tuple[JsonDict, Sequence[JsonDict], Sequence[JsonDict], Sequence[str]], ] = ExpiringCache( cache_name="get_room_hierarchy_cache", server_name=self.server_name, @@ -265,7 +262,7 @@ async def claim_client_keys( self, user: UserID, destination: str, - query: Dict[str, Dict[str, Dict[str, int]]], + query: dict[str, dict[str, dict[str, int]]], timeout: Optional[int], ) -> JsonDict: """Claims one-time keys for a device hosted on a remote server. @@ -285,8 +282,8 @@ async def claim_client_keys( # Convert the query with counts into a stable and unstable query and check # if attempting to claim more than 1 OTK. - content: Dict[str, Dict[str, str]] = {} - unstable_content: Dict[str, Dict[str, List[str]]] = {} + content: dict[str, dict[str, str]] = {} + unstable_content: dict[str, dict[str, list[str]]] = {} use_unstable = False for user_id, one_time_keys in query.items(): for device_id, algorithms in one_time_keys.items(): @@ -337,7 +334,7 @@ async def claim_client_keys( @tag_args async def backfill( self, dest: str, room_id: str, limit: int, extremities: Collection[str] - ) -> Optional[List[EventBase]]: + ) -> Optional[list[EventBase]]: """Requests some more historic PDUs for the given room from the given destination server. @@ -662,7 +659,7 @@ async def get_pdu( @tag_args async def get_room_state_ids( self, destination: str, room_id: str, event_id: str - ) -> Tuple[List[str], List[str]]: + ) -> tuple[list[str], list[str]]: """Calls the /state_ids endpoint to fetch the state at a particular point in the room, and the auth events for the given event @@ -711,7 +708,7 @@ async def get_room_state( room_id: str, event_id: str, room_version: RoomVersion, - ) -> Tuple[List[EventBase], List[EventBase]]: + ) -> tuple[list[EventBase], list[EventBase]]: """Calls the /state endpoint to fetch the state at a particular point in the room. @@ -772,7 +769,7 @@ async def _check_sigs_and_hash_for_pulled_events_and_fetch( origin: str, pdus: Collection[EventBase], room_version: RoomVersion, - ) -> List[EventBase]: + ) -> list[EventBase]: """ Checks the signatures and hashes of a list of pulled events we got from federation and records any signature failures as failed pull attempts. @@ -806,7 +803,7 @@ async def _check_sigs_and_hash_for_pulled_events_and_fetch( # We limit how many PDUs we check at once, as if we try to do hundreds # of thousands of PDUs at once we see large memory spikes. - valid_pdus: List[EventBase] = [] + valid_pdus: list[EventBase] = [] async def _record_failure_callback(event: EventBase, cause: str) -> None: await self.store.record_event_failed_pull_attempt( @@ -916,7 +913,7 @@ async def _check_sigs_and_hash_and_fetch_one( async def get_event_auth( self, destination: str, room_id: str, event_id: str - ) -> List[EventBase]: + ) -> list[EventBase]: res = await self.transport_layer.get_event_auth(destination, room_id, event_id) room_version = await self.store.get_room_version(room_id) @@ -1050,7 +1047,7 @@ async def make_membership_event( membership: str, content: dict, params: Optional[Mapping[str, Union[str, Iterable[str]]]], - ) -> Tuple[str, EventBase, RoomVersion]: + ) -> tuple[str, EventBase, RoomVersion]: """ Creates an m.room.member event, with context, without participating in the room. @@ -1092,7 +1089,7 @@ async def make_membership_event( % (membership, ",".join(valid_memberships)) ) - async def send_request(destination: str) -> Tuple[str, EventBase, RoomVersion]: + async def send_request(destination: str) -> tuple[str, EventBase, RoomVersion]: ret = await self.transport_layer.make_membership_event( destination, room_id, user_id, membership, params ) @@ -1237,7 +1234,7 @@ async def send_request(destination: str) -> SendJoinResult: # We now go and check the signatures and hashes for the event. Note # that we limit how many events we process at a time to keep the # memory overhead from exploding. - valid_pdus_map: Dict[str, EventBase] = {} + valid_pdus_map: dict[str, EventBase] = {} async def _execute(pdu: EventBase) -> None: valid_pdu = await self._check_sigs_and_hash_and_fetch_one( @@ -1507,7 +1504,7 @@ async def _do_send_leave(self, destination: str, pdu: EventBase) -> JsonDict: # content. return resp[1] - async def send_knock(self, destinations: List[str], pdu: EventBase) -> JsonDict: + async def send_knock(self, destinations: list[str], pdu: EventBase) -> JsonDict: """Attempts to send a knock event to a given list of servers. Iterates through the list until one attempt succeeds. @@ -1568,7 +1565,7 @@ async def get_public_rooms( remote_server: str, limit: Optional[int] = None, since_token: Optional[str] = None, - search_filter: Optional[Dict] = None, + search_filter: Optional[dict] = None, include_all_networks: bool = False, third_party_instance_id: Optional[str] = None, ) -> JsonDict: @@ -1612,7 +1609,7 @@ async def get_missing_events( limit: int, min_depth: int, timeout: int, - ) -> List[EventBase]: + ) -> list[EventBase]: """Tries to fetch events we are missing. This is called when we receive an event without having received all of its ancestors. @@ -1718,7 +1715,7 @@ async def get_room_hierarchy( destinations: Iterable[str], room_id: str, suggested_only: bool, - ) -> Tuple[JsonDict, Sequence[JsonDict], Sequence[JsonDict], Sequence[str]]: + ) -> tuple[JsonDict, Sequence[JsonDict], Sequence[JsonDict], Sequence[str]]: """ Call other servers to get a hierarchy of the given room. @@ -1749,7 +1746,7 @@ async def get_room_hierarchy( async def send_request( destination: str, - ) -> Tuple[JsonDict, Sequence[JsonDict], Sequence[JsonDict], Sequence[str]]: + ) -> tuple[JsonDict, Sequence[JsonDict], Sequence[JsonDict], Sequence[str]]: try: res = await self.transport_layer.get_room_hierarchy( destination=destination, @@ -1924,8 +1921,8 @@ async def _timestamp_to_event_from_destination( raise InvalidResponseError(str(e)) async def get_account_status( - self, destination: str, user_ids: List[str] - ) -> Tuple[JsonDict, List[str]]: + self, destination: str, user_ids: list[str] + ) -> tuple[JsonDict, list[str]]: """Retrieves account statuses for a given list of users on a given remote homeserver. @@ -1991,8 +1988,8 @@ async def federation_download_media( download_ratelimiter: Ratelimiter, ip_address: str, ) -> Union[ - Tuple[int, Dict[bytes, List[bytes]], bytes], - Tuple[int, Dict[bytes, List[bytes]]], + tuple[int, dict[bytes, list[bytes]], bytes], + tuple[int, dict[bytes, list[bytes]]], ]: try: return await self.transport_layer.federation_download_media( @@ -2036,7 +2033,7 @@ async def download_media( max_timeout_ms: int, download_ratelimiter: Ratelimiter, ip_address: str, - ) -> Tuple[int, Dict[bytes, List[bytes]]]: + ) -> tuple[int, dict[bytes, list[bytes]]]: try: return await self.transport_layer.download_media_v3( destination, diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index e7da8fda0d..6e14f4a049 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -27,11 +27,8 @@ Awaitable, Callable, Collection, - Dict, - List, Mapping, Optional, - Tuple, Union, ) @@ -163,10 +160,10 @@ def __init__(self, hs: "HomeServer"): # origins that we are currently processing a transaction from. # a dict from origin to txn id. - self._active_transactions: Dict[str, str] = {} + self._active_transactions: dict[str, str] = {} # We cache results for transaction with the same ID - self._transaction_resp_cache: ResponseCache[Tuple[str, str]] = ResponseCache( + self._transaction_resp_cache: ResponseCache[tuple[str, str]] = ResponseCache( clock=hs.get_clock(), name="fed_txn_handler", server_name=self.server_name, @@ -179,7 +176,7 @@ def __init__(self, hs: "HomeServer"): # We cache responses to state queries, as they take a while and often # come in waves. - self._state_resp_cache: ResponseCache[Tuple[str, Optional[str]]] = ( + self._state_resp_cache: ResponseCache[tuple[str, Optional[str]]] = ( ResponseCache( clock=hs.get_clock(), name="state_resp", @@ -187,7 +184,7 @@ def __init__(self, hs: "HomeServer"): timeout_ms=30000, ) ) - self._state_ids_resp_cache: ResponseCache[Tuple[str, str]] = ResponseCache( + self._state_ids_resp_cache: ResponseCache[tuple[str, str]] = ResponseCache( clock=hs.get_clock(), name="state_ids_resp", server_name=self.server_name, @@ -236,8 +233,8 @@ async def _handle_old_staged_events(self) -> None: await self._clock.sleep(random.uniform(0, 0.1)) async def on_backfill_request( - self, origin: str, room_id: str, versions: List[str], limit: int - ) -> Tuple[int, Dict[str, Any]]: + self, origin: str, room_id: str, versions: list[str], limit: int + ) -> tuple[int, dict[str, Any]]: async with self._server_linearizer.queue((origin, room_id)): origin_host, _ = parse_server_name(origin) await self.check_server_matches_acl(origin_host, room_id) @@ -252,7 +249,7 @@ async def on_backfill_request( async def on_timestamp_to_event_request( self, origin: str, room_id: str, timestamp: int, direction: Direction - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: """When we receive a federated `/timestamp_to_event` request, handle all of the logic for validating and fetching the event. @@ -298,7 +295,7 @@ async def on_incoming_transaction( transaction_id: str, destination: str, transaction_data: JsonDict, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # If we receive a transaction we should make sure that kick off handling # any old events in the staging area. if not self._started_handling_of_staged_events: @@ -365,7 +362,7 @@ async def on_incoming_transaction( async def _on_incoming_transaction_inner( self, origin: str, transaction: Transaction, request_time: int - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: # CRITICAL SECTION: the first thing we must do (before awaiting) is # add an entry to _active_transactions. assert origin not in self._active_transactions @@ -381,7 +378,7 @@ async def _on_incoming_transaction_inner( async def _handle_incoming_transaction( self, origin: str, transaction: Transaction, request_time: int - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: """Process an incoming transaction and return the HTTP response Args: @@ -429,7 +426,7 @@ async def _handle_incoming_transaction( async def _handle_pdus_in_txn( self, origin: str, transaction: Transaction, request_time: int - ) -> Dict[str, dict]: + ) -> dict[str, dict]: """Process the PDUs in a received transaction. Args: @@ -448,7 +445,7 @@ async def _handle_pdus_in_txn( origin_host, _ = parse_server_name(origin) - pdus_by_room: Dict[str, List[EventBase]] = {} + pdus_by_room: dict[str, list[EventBase]] = {} newest_pdu_ts = 0 @@ -601,7 +598,7 @@ async def _process_edu(edu_dict: JsonDict) -> None: async def on_room_state_request( self, origin: str, room_id: str, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self._event_auth_handler.assert_host_in_room(room_id, origin) origin_host, _ = parse_server_name(origin) await self.check_server_matches_acl(origin_host, room_id) @@ -625,7 +622,7 @@ async def on_room_state_request( @tag_args async def on_state_ids_request( self, origin: str, room_id: str, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if not event_id: raise NotImplementedError("Specify an event") @@ -653,7 +650,7 @@ async def _on_state_ids_request_compute( async def _on_context_state_request_compute( self, room_id: str, event_id: str - ) -> Dict[str, list]: + ) -> dict[str, list]: pdus: Collection[EventBase] event_ids = await self.handler.get_state_ids_for_pdu(room_id, event_id) pdus = await self.store.get_events_as_list(event_ids) @@ -669,7 +666,7 @@ async def _on_context_state_request_compute( async def on_pdu_request( self, origin: str, event_id: str - ) -> Tuple[int, Union[JsonDict, str]]: + ) -> tuple[int, Union[JsonDict, str]]: pdu = await self.handler.get_persisted_pdu(origin, event_id) if pdu: @@ -678,8 +675,8 @@ async def on_pdu_request( return 404, "" async def on_query_request( - self, query_type: str, args: Dict[str, str] - ) -> Tuple[int, Dict[str, Any]]: + self, query_type: str, args: dict[str, str] + ) -> tuple[int, dict[str, Any]]: received_queries_counter.labels( type=query_type, **{SERVER_NAME_LABEL: self.server_name}, @@ -688,8 +685,8 @@ async def on_query_request( return 200, resp async def on_make_join_request( - self, origin: str, room_id: str, user_id: str, supported_versions: List[str] - ) -> Dict[str, Any]: + self, origin: str, room_id: str, user_id: str, supported_versions: list[str] + ) -> dict[str, Any]: origin_host, _ = parse_server_name(origin) await self.check_server_matches_acl(origin_host, room_id) @@ -714,7 +711,7 @@ async def on_make_join_request( async def on_invite_request( self, origin: str, content: JsonDict, room_version_id: str - ) -> Dict[str, Any]: + ) -> dict[str, Any]: room_version = KNOWN_ROOM_VERSIONS.get(room_version_id) if not room_version: raise SynapseError( @@ -748,7 +745,7 @@ async def on_send_join_request( content: JsonDict, room_id: str, caller_supports_partial_state: bool = False, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: set_tag( SynapseTags.SEND_JOIN_RESPONSE_IS_PARTIAL_STATE, caller_supports_partial_state, @@ -809,7 +806,7 @@ async def on_send_join_request( async def on_make_leave_request( self, origin: str, room_id: str, user_id: str - ) -> Dict[str, Any]: + ) -> dict[str, Any]: origin_host, _ = parse_server_name(origin) await self.check_server_matches_acl(origin_host, room_id) pdu = await self.handler.on_make_leave_request(origin, room_id, user_id) @@ -826,7 +823,7 @@ async def on_send_leave_request( return {} async def on_make_knock_request( - self, origin: str, room_id: str, user_id: str, supported_versions: List[str] + self, origin: str, room_id: str, user_id: str, supported_versions: list[str] ) -> JsonDict: """We've received a /make_knock/ request, so we create a partial knock event for the room and hand that back, along with the room version, to the knocking @@ -884,7 +881,7 @@ async def on_send_knock_request( origin: str, content: JsonDict, room_id: str, - ) -> Dict[str, List[JsonDict]]: + ) -> dict[str, list[JsonDict]]: """ We have received a knock event for a room. Verify and send the event into the room on the knocking homeserver's behalf. Then reply with some stripped state from the @@ -1034,7 +1031,7 @@ async def _on_send_membership_event( async def on_event_auth( self, origin: str, room_id: str, event_id: str - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: async with self._server_linearizer.queue((origin, room_id)): await self._event_auth_handler.assert_host_in_room(room_id, origin) origin_host, _ = parse_server_name(origin) @@ -1046,20 +1043,20 @@ async def on_event_auth( return 200, res async def on_query_client_keys( - self, origin: str, content: Dict[str, str] - ) -> Tuple[int, Dict[str, Any]]: + self, origin: str, content: dict[str, str] + ) -> tuple[int, dict[str, Any]]: return await self.on_query_request("client_keys", content) async def on_query_user_devices( self, origin: str, user_id: str - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: keys = await self.device_handler.on_federation_query_user_devices(user_id) return 200, keys @trace async def on_claim_client_keys( - self, query: List[Tuple[str, str, str, int]], always_include_fallback_keys: bool - ) -> Dict[str, Any]: + self, query: list[tuple[str, str, str, int]], always_include_fallback_keys: bool + ) -> dict[str, Any]: if any( not self.hs.is_mine(UserID.from_string(user_id)) for user_id, _, _, _ in query @@ -1071,7 +1068,7 @@ async def on_claim_client_keys( query, always_include_fallback_keys=always_include_fallback_keys ) - json_result: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + json_result: dict[str, dict[str, dict[str, JsonDict]]] = {} for result in results: for user_id, device_keys in result.items(): for device_id, keys in device_keys.items(): @@ -1098,10 +1095,10 @@ async def on_get_missing_events( self, origin: str, room_id: str, - earliest_events: List[str], - latest_events: List[str], + earliest_events: list[str], + latest_events: list[str], limit: int, - ) -> Dict[str, list]: + ) -> dict[str, list]: async with self._server_linearizer.queue((origin, room_id)): origin_host, _ = parse_server_name(origin) await self.check_server_matches_acl(origin_host, room_id) @@ -1133,7 +1130,7 @@ async def on_openid_userinfo(self, token: str) -> Optional[str]: ts_now_ms = self._clock.time_msec() return await self.store.get_user_id_for_open_id_token(token, ts_now_ms) - def _transaction_dict_from_pdus(self, pdu_list: List[EventBase]) -> JsonDict: + def _transaction_dict_from_pdus(self, pdu_list: list[EventBase]) -> JsonDict: """Returns a new Transaction containing the given PDUs suitable for transmission. """ @@ -1208,7 +1205,7 @@ async def _handle_received_pdu(self, origin: str, pdu: EventBase) -> None: async def _get_next_nonspam_staged_event_for_room( self, room_id: str, room_version: RoomVersion - ) -> Optional[Tuple[str, EventBase]]: + ) -> Optional[tuple[str, EventBase]]: """Fetch the first non-spam event from staging queue. Args: @@ -1363,13 +1360,13 @@ async def _process_incoming_pdus_in_room_inner( lock = new_lock async def exchange_third_party_invite( - self, sender_user_id: str, target_user_id: str, room_id: str, signed: Dict + self, sender_user_id: str, target_user_id: str, room_id: str, signed: dict ) -> None: await self.handler.exchange_third_party_invite( sender_user_id, target_user_id, room_id, signed ) - async def on_exchange_third_party_invite_request(self, event_dict: Dict) -> None: + async def on_exchange_third_party_invite_request(self, event_dict: dict) -> None: await self.handler.on_exchange_third_party_invite_request(event_dict) async def check_server_matches_acl(self, server_name: str, room_id: str) -> None: @@ -1407,13 +1404,13 @@ def __init__(self, hs: "HomeServer"): # the case. self._send_edu = ReplicationFederationSendEduRestServlet.make_client(hs) - self.edu_handlers: Dict[str, Callable[[str, dict], Awaitable[None]]] = {} - self.query_handlers: Dict[str, Callable[[dict], Awaitable[JsonDict]]] = {} + self.edu_handlers: dict[str, Callable[[str, dict], Awaitable[None]]] = {} + self.query_handlers: dict[str, Callable[[dict], Awaitable[JsonDict]]] = {} # Map from type to instance names that we should route EDU handling to. # We randomly choose one instance from the list to route to for each new # EDU received. - self._edu_type_to_instance: Dict[str, List[str]] = {} + self._edu_type_to_instance: dict[str, list[str]] = {} def register_edu_handler( self, edu_type: str, handler: Callable[[str, JsonDict], Awaitable[None]] @@ -1455,7 +1452,7 @@ def register_query_handler( self.query_handlers[query_type] = handler def register_instances_for_edu( - self, edu_type: str, instance_names: List[str] + self, edu_type: str, instance_names: list[str] ) -> None: """Register that the EDU handler is on multiple instances.""" self._edu_type_to_instance[edu_type] = instance_names diff --git a/synapse/federation/persistence.py b/synapse/federation/persistence.py index 8340b48503..5628130429 100644 --- a/synapse/federation/persistence.py +++ b/synapse/federation/persistence.py @@ -27,7 +27,7 @@ """ import logging -from typing import Optional, Tuple +from typing import Optional from synapse.federation.units import Transaction from synapse.storage.databases.main import DataStore @@ -44,7 +44,7 @@ def __init__(self, datastore: DataStore): async def have_responded( self, origin: str, transaction: Transaction - ) -> Optional[Tuple[int, JsonDict]]: + ) -> Optional[tuple[int, JsonDict]]: """Have we already responded to a transaction with the same id and origin? diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index 759df9836b..80f31798e8 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -40,14 +40,10 @@ from enum import Enum from typing import ( TYPE_CHECKING, - Dict, Hashable, Iterable, - List, Optional, Sized, - Tuple, - Type, ) import attr @@ -77,7 +73,7 @@ class QueueNames(str, Enum): PRESENCE_DESTINATIONS = "presence_destinations" -queue_name_to_gauge_map: Dict[QueueNames, LaterGauge] = {} +queue_name_to_gauge_map: dict[QueueNames, LaterGauge] = {} for queue_name in QueueNames: queue_name_to_gauge_map[queue_name] = LaterGauge( @@ -100,23 +96,23 @@ def __init__(self, hs: "HomeServer"): # We may have multiple federation sender instances, so we need to track # their positions separately. self._sender_instances = hs.config.worker.federation_shard_config.instances - self._sender_positions: Dict[str, int] = {} + self._sender_positions: dict[str, int] = {} # Pending presence map user_id -> UserPresenceState - self.presence_map: Dict[str, UserPresenceState] = {} + self.presence_map: dict[str, UserPresenceState] = {} # Stores the destinations we need to explicitly send presence to about a # given user. # Stream position -> (user_id, destinations) - self.presence_destinations: SortedDict[int, Tuple[str, Iterable[str]]] = ( + self.presence_destinations: SortedDict[int, tuple[str, Iterable[str]]] = ( SortedDict() ) # (destination, key) -> EDU - self.keyed_edu: Dict[Tuple[str, tuple], Edu] = {} + self.keyed_edu: dict[tuple[str, tuple], Edu] = {} # stream position -> (destination, key) - self.keyed_edu_changed: SortedDict[int, Tuple[str, tuple]] = SortedDict() + self.keyed_edu_changed: SortedDict[int, tuple[str, tuple]] = SortedDict() self.edus: SortedDict[int, Edu] = SortedDict() @@ -295,7 +291,7 @@ def federation_ack(self, instance_name: str, token: int) -> None: async def get_replication_rows( self, instance_name: str, from_token: int, to_token: int, target_row_count: int - ) -> Tuple[List[Tuple[int, Tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: """Get rows to be sent over federation between the two tokens Args: @@ -318,7 +314,7 @@ async def get_replication_rows( # list of tuple(int, BaseFederationRow), where the first is the position # of the federation stream. - rows: List[Tuple[int, BaseFederationRow]] = [] + rows: list[tuple[int, BaseFederationRow]] = [] # Fetch presence to send to destinations i = self.presence_destinations.bisect_right(from_token) @@ -413,7 +409,7 @@ def add_to_buffer(self, buff: "ParsedFederationStreamData") -> None: @attr.s(slots=True, frozen=True, auto_attribs=True) class PresenceDestinationsRow(BaseFederationRow): state: UserPresenceState - destinations: List[str] + destinations: list[str] TypeId = "pd" @@ -436,7 +432,7 @@ class KeyedEduRow(BaseFederationRow): typing EDUs clobber based on room_id. """ - key: Tuple[str, ...] # the edu key passed to send_edu + key: tuple[str, ...] # the edu key passed to send_edu edu: Edu TypeId = "k" @@ -471,7 +467,7 @@ def add_to_buffer(self, buff: "ParsedFederationStreamData") -> None: buff.edus.setdefault(self.edu.destination, []).append(self.edu) -_rowtypes: Tuple[Type[BaseFederationRow], ...] = ( +_rowtypes: tuple[type[BaseFederationRow], ...] = ( PresenceDestinationsRow, KeyedEduRow, EduRow, @@ -483,16 +479,16 @@ def add_to_buffer(self, buff: "ParsedFederationStreamData") -> None: @attr.s(slots=True, frozen=True, auto_attribs=True) class ParsedFederationStreamData: # list of tuples of UserPresenceState and destinations - presence_destinations: List[Tuple[UserPresenceState, List[str]]] + presence_destinations: list[tuple[UserPresenceState, list[str]]] # dict of destination -> { key -> Edu } - keyed_edus: Dict[str, Dict[Tuple[str, ...], Edu]] + keyed_edus: dict[str, dict[tuple[str, ...], Edu]] # dict of destination -> [Edu] - edus: Dict[str, List[Edu]] + edus: dict[str, list[Edu]] async def process_rows_for_federation( transaction_queue: FederationSender, - rows: List[FederationStream.FederationStreamRow], + rows: list[FederationStream.FederationStreamRow], ) -> None: """Parse a list of rows from the federation stream and put them in the transaction queue ready for sending to the relevant homeservers. diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index 4410ffc5c5..229ae647c0 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -135,13 +135,10 @@ from typing import ( TYPE_CHECKING, Collection, - Dict, Hashable, Iterable, - List, Literal, Optional, - Tuple, ) import attr @@ -312,7 +309,7 @@ def federation_ack(self, instance_name: str, token: int) -> None: @abc.abstractmethod async def get_replication_rows( self, instance_name: str, from_token: int, to_token: int, target_row_count: int - ) -> Tuple[List[Tuple[int, Tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: raise NotImplementedError() @@ -420,7 +417,7 @@ def __init__(self, hs: "HomeServer"): self._federation_shard_config = hs.config.worker.federation_shard_config # map from destination to PerDestinationQueue - self._per_destination_queues: Dict[str, PerDestinationQueue] = {} + self._per_destination_queues: dict[str, PerDestinationQueue] = {} transaction_queue_pending_destinations_gauge.register_hook( homeserver_instance_id=hs.get_instance_id(), @@ -724,7 +721,7 @@ async def handle_event(event: EventBase) -> None: **{SERVER_NAME_LABEL: self.server_name}, ).observe((now - ts) / 1000) - async def handle_room_events(events: List[EventBase]) -> None: + async def handle_room_events(events: list[EventBase]) -> None: logger.debug( "Handling %i events in room %s", len(events), events[0].room_id ) @@ -736,7 +733,7 @@ async def handle_room_events(events: List[EventBase]) -> None: for event in events: await handle_event(event) - events_by_room: Dict[str, List[EventBase]] = {} + events_by_room: dict[str, list[EventBase]] = {} for event_id in event_ids: # `event_entries` is unsorted, so we have to iterate over `event_ids` @@ -1124,7 +1121,7 @@ def federation_ack(self, instance_name: str, token: int) -> None: @staticmethod async def get_replication_rows( instance_name: str, from_token: int, to_token: int, target_row_count: int - ) -> Tuple[List[Tuple[int, Tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: # Dummy implementation for case where federation sender isn't offloaded # to a worker. return [], 0, False diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py index 845af92fac..ecf4789d76 100644 --- a/synapse/federation/sender/per_destination_queue.py +++ b/synapse/federation/sender/per_destination_queue.py @@ -23,7 +23,7 @@ import logging from collections import OrderedDict from types import TracebackType -from typing import TYPE_CHECKING, Dict, Hashable, Iterable, List, Optional, Tuple, Type +from typing import TYPE_CHECKING, Hashable, Iterable, Optional import attr from prometheus_client import Counter @@ -145,16 +145,16 @@ def __init__( self._last_successful_stream_ordering: Optional[int] = None # a queue of pending PDUs - self._pending_pdus: List[EventBase] = [] + self._pending_pdus: list[EventBase] = [] # XXX this is never actually used: see # https://github.com/matrix-org/synapse/issues/7549 - self._pending_edus: List[Edu] = [] + self._pending_edus: list[Edu] = [] # Pending EDUs by their "key". Keyed EDUs are EDUs that get clobbered # based on their key (e.g. typing events by room_id) # Map of (edu_type, key) -> Edu - self._pending_edus_keyed: Dict[Tuple[str, Hashable], Edu] = {} + self._pending_edus_keyed: dict[tuple[str, Hashable], Edu] = {} # Map of user_id -> UserPresenceState of pending presence to be sent to this # destination @@ -164,7 +164,7 @@ def __init__( # # Each receipt can only have a single receipt per # (room ID, receipt type, user ID, thread ID) tuple. - self._pending_receipt_edus: List[Dict[str, Dict[str, Dict[str, dict]]]] = [] + self._pending_receipt_edus: list[dict[str, dict[str, dict[str, dict]]]] = [] # stream_id of last successfully sent to-device message. # NB: may be a long or an int. @@ -340,7 +340,7 @@ def attempt_new_transaction(self) -> None: ) async def _transaction_transmission_loop(self) -> None: - pending_pdus: List[EventBase] = [] + pending_pdus: list[EventBase] = [] try: self.transmission_loop_running = True # This will throw if we wouldn't retry. We do this here so we fail @@ -665,12 +665,12 @@ def _get_receipt_edus(self, limit: int) -> Iterable[Edu]: if not self._pending_receipt_edus: self._rrs_pending_flush = False - def _pop_pending_edus(self, limit: int) -> List[Edu]: + def _pop_pending_edus(self, limit: int) -> list[Edu]: pending_edus = self._pending_edus pending_edus, self._pending_edus = pending_edus[:limit], pending_edus[limit:] return pending_edus - async def _get_device_update_edus(self, limit: int) -> Tuple[List[Edu], int]: + async def _get_device_update_edus(self, limit: int) -> tuple[list[Edu], int]: last_device_list = self._last_device_list_stream_id # Retrieve list of new device updates to send to the destination @@ -691,7 +691,7 @@ async def _get_device_update_edus(self, limit: int) -> Tuple[List[Edu], int]: return edus, now_stream_id - async def _get_to_device_message_edus(self, limit: int) -> Tuple[List[Edu], int]: + async def _get_to_device_message_edus(self, limit: int) -> tuple[list[Edu], int]: last_device_stream_id = self._last_device_stream_id to_device_stream_id = self._store.get_to_device_stream_token() contents, stream_id = await self._store.get_new_device_msgs_for_remote( @@ -745,9 +745,9 @@ class _TransactionQueueManager: _device_stream_id: Optional[int] = None _device_list_id: Optional[int] = None _last_stream_ordering: Optional[int] = None - _pdus: List[EventBase] = attr.Factory(list) + _pdus: list[EventBase] = attr.Factory(list) - async def __aenter__(self) -> Tuple[List[EventBase], List[Edu]]: + async def __aenter__(self) -> tuple[list[EventBase], list[Edu]]: # First we calculate the EDUs we want to send, if any. # There's a maximum number of EDUs that can be sent with a transaction, @@ -767,7 +767,7 @@ async def __aenter__(self) -> Tuple[List[EventBase], List[Edu]]: if self.queue._pending_presence: # Only send max 50 presence entries in the EDU, to bound the amount # of data we're sending. - presence_to_add: List[JsonDict] = [] + presence_to_add: list[JsonDict] = [] while ( self.queue._pending_presence and len(presence_to_add) < MAX_PRESENCE_STATES_PER_EDU @@ -845,7 +845,7 @@ async def __aenter__(self) -> Tuple[List[EventBase], List[Edu]]: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType], ) -> None: diff --git a/synapse/federation/sender/transaction_manager.py b/synapse/federation/sender/transaction_manager.py index f47c011487..99aa05ebd6 100644 --- a/synapse/federation/sender/transaction_manager.py +++ b/synapse/federation/sender/transaction_manager.py @@ -18,7 +18,7 @@ # # import logging -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING from prometheus_client import Gauge @@ -82,8 +82,8 @@ def shutdown(self) -> None: async def send_new_transaction( self, destination: str, - pdus: List[EventBase], - edus: List[Edu], + pdus: list[EventBase], + edus: list[Edu], ) -> None: """ Args: diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 02e56e8e27..ee15b4804e 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -28,13 +28,10 @@ BinaryIO, Callable, Collection, - Dict, Generator, Iterable, - List, Mapping, Optional, - Tuple, Union, ) @@ -238,7 +235,7 @@ async def backfill( async def timestamp_to_event( self, destination: str, room_id: str, timestamp: int, direction: Direction - ) -> Union[JsonDict, List]: + ) -> Union[JsonDict, list]: """ Calls a remote federating server at `destination` asking for their closest event to the given timestamp in the given direction. @@ -428,7 +425,7 @@ async def send_join_v2( omit_members: bool, ) -> "SendJoinResponse": path = _create_v2_path("/send_join/%s/%s", room_id, event_id) - query_params: Dict[str, str] = {} + query_params: dict[str, str] = {} # lazy-load state on join query_params["omit_members"] = "true" if omit_members else "false" @@ -442,7 +439,7 @@ async def send_join_v2( async def send_leave_v1( self, destination: str, room_id: str, event_id: str, content: JsonDict - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: path = _create_v1_path("/send_leave/%s/%s", room_id, event_id) return await self.client.put_json( @@ -508,7 +505,7 @@ async def send_knock_v1( async def send_invite_v1( self, destination: str, room_id: str, event_id: str, content: JsonDict - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: path = _create_v1_path("/invite/%s/%s", room_id, event_id) return await self.client.put_json( @@ -533,7 +530,7 @@ async def get_public_rooms( remote_server: str, limit: Optional[int] = None, since_token: Optional[str] = None, - search_filter: Optional[Dict] = None, + search_filter: Optional[dict] = None, include_all_networks: bool = False, third_party_instance_id: Optional[str] = None, ) -> JsonDict: @@ -546,7 +543,7 @@ async def get_public_rooms( if search_filter: # this uses MSC2197 (Search Filtering over Federation) - data: Dict[str, Any] = {"include_all_networks": include_all_networks} + data: dict[str, Any] = {"include_all_networks": include_all_networks} if third_party_instance_id: data["third_party_instance_id"] = third_party_instance_id if limit: @@ -570,7 +567,7 @@ async def get_public_rooms( ) raise else: - args: Dict[str, Union[str, Iterable[str]]] = { + args: dict[str, Union[str, Iterable[str]]] = { "include_all_networks": "true" if include_all_networks else "false" } if third_party_instance_id: @@ -854,7 +851,7 @@ async def get_room_hierarchy_unstable( ) async def get_account_status( - self, destination: str, user_ids: List[str] + self, destination: str, user_ids: list[str] ) -> JsonDict: """ Args: @@ -878,7 +875,7 @@ async def download_media_r0( max_timeout_ms: int, download_ratelimiter: Ratelimiter, ip_address: str, - ) -> Tuple[int, Dict[bytes, List[bytes]]]: + ) -> tuple[int, dict[bytes, list[bytes]]]: path = f"/_matrix/media/r0/download/{destination}/{media_id}" return await self.client.get_file( destination, @@ -905,7 +902,7 @@ async def download_media_v3( max_timeout_ms: int, download_ratelimiter: Ratelimiter, ip_address: str, - ) -> Tuple[int, Dict[bytes, List[bytes]]]: + ) -> tuple[int, dict[bytes, list[bytes]]]: path = f"/_matrix/media/v3/download/{destination}/{media_id}" return await self.client.get_file( destination, @@ -936,7 +933,7 @@ async def federation_download_media( max_timeout_ms: int, download_ratelimiter: Ratelimiter, ip_address: str, - ) -> Tuple[int, Dict[bytes, List[bytes]], bytes]: + ) -> tuple[int, dict[bytes, list[bytes]], bytes]: path = f"/_matrix/federation/v1/media/download/{media_id}" return await self.client.federation_get_file( destination, @@ -993,9 +990,9 @@ class SendJoinResponse: """The parsed response of a `/send_join` request.""" # The list of auth events from the /send_join response. - auth_events: List[EventBase] + auth_events: list[EventBase] # The list of state from the /send_join response. - state: List[EventBase] + state: list[EventBase] # The raw join event from the /send_join response. event_dict: JsonDict # The parsed join event from the /send_join response. This will be None if @@ -1006,19 +1003,19 @@ class SendJoinResponse: members_omitted: bool = False # List of servers in the room - servers_in_room: Optional[List[str]] = None + servers_in_room: Optional[list[str]] = None @attr.s(slots=True, auto_attribs=True) class StateRequestResponse: """The parsed response of a `/state` request.""" - auth_events: List[EventBase] - state: List[EventBase] + auth_events: list[EventBase] + state: list[EventBase] @ijson.coroutine -def _event_parser(event_dict: JsonDict) -> Generator[None, Tuple[str, Any], None]: +def _event_parser(event_dict: JsonDict) -> Generator[None, tuple[str, Any], None]: """Helper function for use with `ijson.kvitems_coro` to parse key-value pairs to add them to a given dictionary. """ @@ -1030,7 +1027,7 @@ def _event_parser(event_dict: JsonDict) -> Generator[None, Tuple[str, Any], None @ijson.coroutine def _event_list_parser( - room_version: RoomVersion, events: List[EventBase] + room_version: RoomVersion, events: list[EventBase] ) -> Generator[None, JsonDict, None]: """Helper function for use with `ijson.items_coro` to parse an array of events and add them to the given list. @@ -1086,7 +1083,7 @@ class SendJoinParser(ByteParser[SendJoinResponse]): def __init__(self, room_version: RoomVersion, v1_api: bool): self._response = SendJoinResponse([], [], event_dict={}) self._room_version = room_version - self._coros: List[Generator[None, bytes, None]] = [] + self._coros: list[Generator[None, bytes, None]] = [] # The V1 API has the shape of `[200, {...}]`, which we handle by # prefixing with `item.*`. @@ -1159,7 +1156,7 @@ class _StateParser(ByteParser[StateRequestResponse]): def __init__(self, room_version: RoomVersion): self._response = StateRequestResponse([], []) self._room_version = room_version - self._coros: List[Generator[None, bytes, None]] = [ + self._coros: list[Generator[None, bytes, None]] = [ ijson.items_coro( _event_list_parser(room_version, self._response.state), "pdus.item", diff --git a/synapse/federation/transport/server/__init__.py b/synapse/federation/transport/server/__init__.py index c4905e63dd..d5f05f7290 100644 --- a/synapse/federation/transport/server/__init__.py +++ b/synapse/federation/transport/server/__init__.py @@ -20,7 +20,7 @@ # # import logging -from typing import TYPE_CHECKING, Dict, Iterable, List, Literal, Optional, Tuple, Type +from typing import TYPE_CHECKING, Iterable, Literal, Optional from synapse.api.errors import FederationDeniedError, SynapseError from synapse.federation.transport.server._base import ( @@ -52,7 +52,7 @@ class TransportLayerServer(JsonResource): """Handles incoming federation HTTP requests""" - def __init__(self, hs: "HomeServer", servlet_groups: Optional[List[str]] = None): + def __init__(self, hs: "HomeServer", servlet_groups: Optional[list[str]] = None): """Initialize the TransportLayerServer Will by default register all servlets. For custom behaviour, pass in @@ -130,8 +130,8 @@ def __init__( self.allow_access = hs.config.server.allow_public_rooms_over_federation async def on_GET( - self, origin: str, content: Literal[None], query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: str, content: Literal[None], query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: if not self.allow_access: raise FederationDeniedError(origin) @@ -164,8 +164,8 @@ async def on_GET( return 200, data async def on_POST( - self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: str, content: JsonDict, query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: # This implements MSC2197 (Search Filtering over Federation) if not self.allow_access: raise FederationDeniedError(origin) @@ -242,8 +242,8 @@ async def on_GET( self, origin: Optional[str], content: Literal[None], - query: Dict[bytes, List[bytes]], - ) -> Tuple[int, JsonDict]: + query: dict[bytes, list[bytes]], + ) -> tuple[int, JsonDict]: token = parse_string_from_args(query, "access_token") if token is None: return ( @@ -265,7 +265,7 @@ async def on_GET( return 200, {"sub": user_id} -SERVLET_GROUPS: Dict[str, Iterable[Type[BaseFederationServlet]]] = { +SERVLET_GROUPS: dict[str, Iterable[type[BaseFederationServlet]]] = { "federation": FEDERATION_SERVLET_CLASSES, "room_list": (PublicRoomList,), "openid": (OpenIdUserInfo,), diff --git a/synapse/federation/transport/server/_base.py b/synapse/federation/transport/server/_base.py index cba309635b..146cbebb27 100644 --- a/synapse/federation/transport/server/_base.py +++ b/synapse/federation/transport/server/_base.py @@ -24,7 +24,7 @@ import re import time from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Awaitable, Callable, Dict, Optional, Tuple, cast +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional, cast from synapse.api.errors import Codes, FederationDeniedError, SynapseError from synapse.api.urls import FEDERATION_V1_PREFIX @@ -165,7 +165,7 @@ async def reset_retry_timings(self, origin: str) -> None: logger.exception("Error resetting retry timings on %s", origin) -def _parse_auth_header(header_bytes: bytes) -> Tuple[str, str, str, Optional[str]]: +def _parse_auth_header(header_bytes: bytes) -> tuple[str, str, str, Optional[str]]: """Parse an X-Matrix auth header Args: @@ -185,7 +185,7 @@ def _parse_auth_header(header_bytes: bytes) -> Tuple[str, str, str, Optional[str rf"{space_or_tab}*,{space_or_tab}*", re.split(r"^X-Matrix +", header_str, maxsplit=1)[1], ) - param_dict: Dict[str, str] = { + param_dict: dict[str, str] = { k.lower(): v for k, v in [param.split("=", maxsplit=1) for param in params] } @@ -252,7 +252,7 @@ class BaseFederationServlet: components as specified in the path match regexp. Returns: - Optional[Tuple[int, object]]: either (response code, response object) to + Optional[tuple[int, object]]: either (response code, response object) to return a JSON response, or None if the request has already been handled. Raises: @@ -282,14 +282,14 @@ def __init__( self.ratelimiter = ratelimiter self.server_name = server_name - def _wrap(self, func: Callable[..., Awaitable[Tuple[int, Any]]]) -> ServletCallback: + def _wrap(self, func: Callable[..., Awaitable[tuple[int, Any]]]) -> ServletCallback: authenticator = self.authenticator ratelimiter = self.ratelimiter @functools.wraps(func) async def new_func( request: SynapseRequest, *args: Any, **kwargs: str - ) -> Optional[Tuple[int, Any]]: + ) -> Optional[tuple[int, Any]]: """A callback which can be passed to HttpServer.RegisterPaths Args: diff --git a/synapse/federation/transport/server/federation.py b/synapse/federation/transport/server/federation.py index eb96ff27f9..54c7dac1b7 100644 --- a/synapse/federation/transport/server/federation.py +++ b/synapse/federation/transport/server/federation.py @@ -22,14 +22,10 @@ from collections import Counter from typing import ( TYPE_CHECKING, - Dict, - List, Literal, Mapping, Optional, Sequence, - Tuple, - Type, Union, ) @@ -93,9 +89,9 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], transaction_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """Called on PUT /send// Args: @@ -158,9 +154,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], event_id: str, - ) -> Tuple[int, Union[JsonDict, str]]: + ) -> tuple[int, Union[JsonDict, str]]: return await self.handler.on_pdu_request(origin, event_id) @@ -173,9 +169,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: return await self.handler.on_room_state_request( origin, room_id, @@ -191,9 +187,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: return await self.handler.on_state_ids_request( origin, room_id, @@ -209,9 +205,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: versions = [x.decode("ascii") for x in query[b"v"]] limit = parse_integer_from_args(query, "limit", None) @@ -248,9 +244,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: timestamp = parse_integer_from_args(query, "ts", required=True) direction_str = parse_string_from_args( query, "dir", allowed_values=["f", "b"], required=True @@ -271,9 +267,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], query_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: args = {k.decode("utf8"): v[0].decode("utf-8") for k, v in query.items()} args["origin"] = origin return await self.handler.on_query_request(query_type, args) @@ -287,10 +283,10 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, user_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ Args: origin: The authenticated server_name of the calling server @@ -323,10 +319,10 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, user_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: result = await self.handler.on_make_leave_request(origin, room_id, user_id) return 200, result @@ -339,10 +335,10 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, Tuple[int, JsonDict]]: + ) -> tuple[int, tuple[int, JsonDict]]: result = await self.handler.on_send_leave_request(origin, content, room_id) return 200, (200, result) @@ -357,10 +353,10 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: result = await self.handler.on_send_leave_request(origin, content, room_id) return 200, result @@ -373,10 +369,10 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, user_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # Retrieve the room versions the remote homeserver claims to support supported_versions = parse_strings_from_args( query, "ver", required=True, encoding="utf-8" @@ -396,10 +392,10 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: result = await self.handler.on_send_knock_request(origin, content, room_id) return 200, result @@ -412,10 +408,10 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: return await self.handler.on_event_auth(origin, room_id, event_id) @@ -427,10 +423,10 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, Tuple[int, JsonDict]]: + ) -> tuple[int, tuple[int, JsonDict]]: # TODO(paul): assert that event_id parsed from path actually # match those given in content result = await self.handler.on_send_join_request(origin, content, room_id) @@ -447,10 +443,10 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # TODO(paul): assert that event_id parsed from path actually # match those given in content @@ -470,10 +466,10 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, Tuple[int, JsonDict]]: + ) -> tuple[int, tuple[int, JsonDict]]: # We don't get a room version, so we have to assume its EITHER v1 or # v2. This is "fine" as the only difference between V1 and V2 is the # state resolution algorithm, and we don't use that for processing @@ -497,10 +493,10 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # TODO(paul): assert that room_id/event_id parsed from path actually # match those given in content @@ -535,9 +531,9 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.handler.on_exchange_third_party_invite_request(content) return 200, {} @@ -547,8 +543,8 @@ class FederationClientKeysQueryServlet(BaseFederationServerServlet): CATEGORY = "Federation requests" async def on_POST( - self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: str, content: JsonDict, query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: return await self.handler.on_query_client_keys(origin, content) @@ -560,9 +556,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], user_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: return await self.handler.on_query_user_devices(origin, user_id) @@ -571,10 +567,10 @@ class FederationClientKeysClaimServlet(BaseFederationServerServlet): CATEGORY = "Federation requests" async def on_POST( - self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: str, content: JsonDict, query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: # Generate a count for each algorithm, which is hard-coded to 1. - key_query: List[Tuple[str, str, str, int]] = [] + key_query: list[tuple[str, str, str, int]] = [] for user_id, device_keys in content.get("one_time_keys", {}).items(): for device_id, algorithm in device_keys.items(): key_query.append((user_id, device_id, algorithm, 1)) @@ -597,10 +593,10 @@ class FederationUnstableClientKeysClaimServlet(BaseFederationServerServlet): CATEGORY = "Federation requests" async def on_POST( - self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: str, content: JsonDict, query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: # Generate a count for each algorithm. - key_query: List[Tuple[str, str, str, int]] = [] + key_query: list[tuple[str, str, str, int]] = [] for user_id, device_keys in content.get("one_time_keys", {}).items(): for device_id, algorithms in device_keys.items(): counts = Counter(algorithms) @@ -621,9 +617,9 @@ async def on_POST( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: limit = int(content.get("limit", 10)) earliest_events = content.get("earliest_events", []) latest_events = content.get("latest_events", []) @@ -646,8 +642,8 @@ class On3pidBindServlet(BaseFederationServerServlet): REQUIRE_AUTH = False async def on_POST( - self, origin: Optional[str], content: JsonDict, query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: Optional[str], content: JsonDict, query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: if "invites" in content: last_exception = None for invite in content["invites"]: @@ -682,8 +678,8 @@ async def on_GET( self, origin: Optional[str], content: Literal[None], - query: Dict[bytes, List[bytes]], - ) -> Tuple[int, JsonDict]: + query: dict[bytes, list[bytes]], + ) -> tuple[int, JsonDict]: return ( 200, { @@ -715,7 +711,7 @@ async def on_GET( content: Literal[None], query: Mapping[bytes, Sequence[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: suggested_only = parse_boolean_from_args(query, "suggested_only", default=False) return 200, await self.handler.get_federation_hierarchy( origin, room_id, suggested_only @@ -746,9 +742,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: is_public = await self._store.is_room_world_readable_or_publicly_joinable( room_id ) @@ -780,7 +776,7 @@ async def on_POST( content: JsonDict, query: Mapping[bytes, Sequence[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if "user_ids" not in content: raise SynapseError( 400, "Required parameter 'user_ids' is missing", Codes.MISSING_PARAM @@ -882,7 +878,7 @@ async def on_GET( self.media_repo.mark_recently_accessed(None, media_id) -FEDERATION_SERVLET_CLASSES: Tuple[Type[BaseFederationServlet], ...] = ( +FEDERATION_SERVLET_CLASSES: tuple[type[BaseFederationServlet], ...] = ( FederationSendServlet, FederationEventServlet, FederationStateV1Servlet, diff --git a/synapse/federation/units.py b/synapse/federation/units.py index 3bb5f824b7..bff45bc2a9 100644 --- a/synapse/federation/units.py +++ b/synapse/federation/units.py @@ -24,7 +24,7 @@ """ import logging -from typing import List, Optional, Sequence +from typing import Optional, Sequence import attr @@ -70,7 +70,7 @@ def strip_context(self) -> None: getattr(self, "content", {})["org.matrix.opentracing_context"] = "{}" -def _none_to_list(edus: Optional[List[JsonDict]]) -> List[JsonDict]: +def _none_to_list(edus: Optional[list[JsonDict]]) -> list[JsonDict]: if edus is None: return [] return edus @@ -98,8 +98,8 @@ class Transaction: origin: str destination: str origin_server_ts: int - pdus: List[JsonDict] = attr.ib(factory=list, converter=_none_to_list) - edus: List[JsonDict] = attr.ib(factory=list, converter=_none_to_list) + pdus: list[JsonDict] = attr.ib(factory=list, converter=_none_to_list) + edus: list[JsonDict] = attr.ib(factory=list, converter=_none_to_list) def get_dict(self) -> JsonDict: """A JSON-ready dictionary of valid keys which aren't internal.""" @@ -113,7 +113,7 @@ def get_dict(self) -> JsonDict: return result -def filter_pdus_for_valid_depth(pdus: Sequence[JsonDict]) -> List[JsonDict]: +def filter_pdus_for_valid_depth(pdus: Sequence[JsonDict]) -> list[JsonDict]: filtered_pdus = [] for pdu in pdus: # Drop PDUs that have a depth that is outside of the range allowed @@ -129,5 +129,5 @@ def filter_pdus_for_valid_depth(pdus: Sequence[JsonDict]) -> List[JsonDict]: def serialize_and_filter_pdus( pdus: Sequence[EventBase], time_now: Optional[int] = None -) -> List[JsonDict]: +) -> list[JsonDict]: return filter_pdus_for_valid_depth([pdu.get_pdu_json(time_now) for pdu in pdus]) diff --git a/synapse/handlers/account.py b/synapse/handlers/account.py index 37cc3d3ff5..855027f08e 100644 --- a/synapse/handlers/account.py +++ b/synapse/handlers/account.py @@ -19,7 +19,7 @@ # # -from typing import TYPE_CHECKING, Dict, List, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import Codes, SynapseError from synapse.types import JsonDict, UserID @@ -40,9 +40,9 @@ def __init__(self, hs: "HomeServer"): async def get_account_statuses( self, - user_ids: List[str], + user_ids: list[str], allow_remote: bool, - ) -> Tuple[JsonDict, List[str]]: + ) -> tuple[JsonDict, list[str]]: """Get account statuses for a list of user IDs. If one or more account(s) belong to remote homeservers, retrieve their status(es) @@ -63,7 +63,7 @@ async def get_account_statuses( """ statuses = {} failures = [] - remote_users: List[UserID] = [] + remote_users: list[UserID] = [] for raw_user_id in user_ids: try: @@ -127,8 +127,8 @@ async def _get_local_account_status(self, user_id: UserID) -> JsonDict: return status async def _get_remote_account_statuses( - self, remote_users: List[UserID] - ) -> Tuple[JsonDict, List[str]]: + self, remote_users: list[UserID] + ) -> tuple[JsonDict, list[str]]: """Send out federation requests to retrieve the statuses of remote accounts. Args: @@ -140,7 +140,7 @@ async def _get_remote_account_statuses( """ # Group remote users by destination, so we only send one request per remote # homeserver. - by_destination: Dict[str, List[str]] = {} + by_destination: dict[str, list[str]] = {} for user in remote_users: if user.domain not in by_destination: by_destination[user.domain] = [] @@ -149,7 +149,7 @@ async def _get_remote_account_statuses( # Retrieve the statuses and failures for remote accounts. final_statuses: JsonDict = {} - final_failures: List[str] = [] + final_failures: list[str] = [] for destination, users in by_destination.items(): statuses, failures = await self._federation_client.get_account_status( destination, diff --git a/synapse/handlers/account_data.py b/synapse/handlers/account_data.py index 228132db48..4492612859 100644 --- a/synapse/handlers/account_data.py +++ b/synapse/handlers/account_data.py @@ -21,7 +21,7 @@ # import logging import random -from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional, Tuple +from typing import TYPE_CHECKING, Awaitable, Callable, Optional from synapse.api.constants import AccountDataTypes from synapse.replication.http.account_data import ( @@ -67,7 +67,7 @@ def __init__(self, hs: "HomeServer"): self._remove_tag_client = ReplicationRemoveTagRestServlet.make_client(hs) self._account_data_writers = hs.config.worker.writers.account_data - self._on_account_data_updated_callbacks: List[ + self._on_account_data_updated_callbacks: list[ ON_ACCOUNT_DATA_UPDATED_CALLBACK ] = [] @@ -325,7 +325,7 @@ async def get_new_events( room_ids: StrCollection, is_guest: bool, explicit_room_id: Optional[str] = None, - ) -> Tuple[List[JsonDict], int]: + ) -> tuple[list[JsonDict], int]: user_id = user.to_string() last_stream_id = from_key diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py index eed50ef69a..a805de1f35 100644 --- a/synapse/handlers/account_validity.py +++ b/synapse/handlers/account_validity.py @@ -21,7 +21,7 @@ import email.mime.multipart import email.utils import logging -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse.api.errors import AuthError, StoreError, SynapseError from synapse.metrics.background_process_metrics import wrap_as_background_process @@ -222,7 +222,7 @@ async def _send_renewal_email(self, user_id: str, expiration_ts: int) -> None: await self.store.set_renewal_mail_status(user_id=user_id, email_sent=True) - async def _get_email_addresses_for_user(self, user_id: str) -> List[str]: + async def _get_email_addresses_for_user(self, user_id: str) -> list[str]: """Retrieve the list of email addresses attached to a user's account. Args: @@ -263,7 +263,7 @@ async def _get_renewal_token(self, user_id: str) -> str: attempts += 1 raise StoreError(500, "Couldn't generate a unique string as refresh string.") - async def renew_account(self, renewal_token: str) -> Tuple[bool, bool, int]: + async def renew_account(self, renewal_token: str) -> tuple[bool, bool, int]: """Renews the account attached to a given renewal token by pushing back the expiration date by the current validity period in the server's configuration. diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py index e90d675b59..3faaa4d2b3 100644 --- a/synapse/handlers/admin.py +++ b/synapse/handlers/admin.py @@ -24,13 +24,9 @@ from typing import ( TYPE_CHECKING, Any, - Dict, - List, Mapping, Optional, Sequence, - Set, - Tuple, ) import attr @@ -218,7 +214,7 @@ async def export_user_data(self, user_id: str, writer: "ExfiltrationWriter") -> to_key = RoomStreamToken(stream=stream_ordering) # Events that we've processed in this room - written_events: Set[str] = set() + written_events: set[str] = set() # We need to track gaps in the events stream so that we can then # write out the state at those events. We do this by keeping track @@ -231,7 +227,7 @@ async def export_user_data(self, user_id: str, writer: "ExfiltrationWriter") -> # The reverse mapping to above, i.e. map from unseen event to events # that have the unseen event in their prev_events, i.e. the unseen # events "children". - unseen_to_child_events: Dict[str, Set[str]] = {} + unseen_to_child_events: dict[str, set[str]] = {} # We fetch events in the room the user could see by fetching *all* # events that we have and then filtering, this isn't the most @@ -412,7 +408,7 @@ async def start_redact_events( async def _redact_all_events( self, task: ScheduledTask - ) -> Tuple[TaskStatus, Optional[Mapping[str, Any]], Optional[str]]: + ) -> tuple[TaskStatus, Optional[Mapping[str, Any]], Optional[str]]: """ Task to redact all of a users events in the given rooms, tracking which, if any, events whose redaction failed @@ -518,7 +514,7 @@ class ExfiltrationWriter(metaclass=abc.ABCMeta): """Interface used to specify how to write exported data.""" @abc.abstractmethod - def write_events(self, room_id: str, events: List[EventBase]) -> None: + def write_events(self, room_id: str, events: list[EventBase]) -> None: """Write a batch of events for a room.""" raise NotImplementedError() diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 6536d9fe51..5240178d80 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -22,12 +22,9 @@ from typing import ( TYPE_CHECKING, Collection, - Dict, Iterable, - List, Mapping, Optional, - Tuple, Union, ) @@ -143,7 +140,7 @@ async def _notify_interested_services(self, max_token: RoomStreamToken) -> None: event_to_received_ts.keys(), get_prev_content=True ) - events_by_room: Dict[str, List[EventBase]] = {} + events_by_room: dict[str, list[EventBase]] = {} for event in events: events_by_room.setdefault(event.room_id, []).append(event) @@ -341,7 +338,7 @@ def notify_interested_services_ephemeral( @wrap_as_background_process("notify_interested_services_ephemeral") async def _notify_interested_services_ephemeral( self, - services: List[ApplicationService], + services: list[ApplicationService], stream_key: StreamKeyType, new_token: Union[int, MultiWriterStreamToken], users: Collection[Union[str, UserID]], @@ -429,7 +426,7 @@ async def _notify_interested_services_ephemeral( async def _handle_typing( self, service: ApplicationService, new_token: int - ) -> List[JsonMapping]: + ) -> list[JsonMapping]: """ Return the typing events since the given stream token that the given application service should receive. @@ -464,7 +461,7 @@ async def _handle_typing( async def _handle_receipts( self, service: ApplicationService, new_token: MultiWriterStreamToken - ) -> List[JsonMapping]: + ) -> list[JsonMapping]: """ Return the latest read receipts that the given application service should receive. @@ -503,7 +500,7 @@ async def _handle_presence( service: ApplicationService, users: Collection[Union[str, UserID]], new_token: Optional[int], - ) -> List[JsonMapping]: + ) -> list[JsonMapping]: """ Return the latest presence updates that the given application service should receive. @@ -523,7 +520,7 @@ async def _handle_presence( A list of json dictionaries containing data derived from the presence events that should be sent to the given application service. """ - events: List[JsonMapping] = [] + events: list[JsonMapping] = [] presence_source = self.event_sources.sources.presence from_key = await self.store.get_type_stream_id_for_appservice( service, "presence" @@ -563,7 +560,7 @@ async def _get_to_device_messages( service: ApplicationService, new_token: int, users: Collection[Union[str, UserID]], - ) -> List[JsonDict]: + ) -> list[JsonDict]: """ Given an application service, determine which events it should receive from those between the last-recorded to-device message stream token for this @@ -585,7 +582,7 @@ async def _get_to_device_messages( ) # Filter out users that this appservice is not interested in - users_appservice_is_interested_in: List[str] = [] + users_appservice_is_interested_in: list[str] = [] for user in users: # FIXME: We should do this farther up the call stack. We currently repeat # this operation in _handle_presence. @@ -612,7 +609,7 @@ async def _get_to_device_messages( # # So we mangle this dict into a flat list of to-device messages with the relevant # user ID and device ID embedded inside each message dict. - message_payload: List[JsonDict] = [] + message_payload: list[JsonDict] = [] for ( user_id, device_id, @@ -761,8 +758,8 @@ async def query_room_alias_exists( return None async def query_3pe( - self, kind: str, protocol: str, fields: Dict[bytes, List[bytes]] - ) -> List[JsonDict]: + self, kind: str, protocol: str, fields: dict[bytes, list[bytes]] + ) -> list[JsonDict]: services = self._get_services_for_3pn(protocol) results = await make_deferred_yieldable( @@ -786,9 +783,9 @@ async def query_3pe( async def get_3pe_protocols( self, only_protocol: Optional[str] = None - ) -> Dict[str, JsonDict]: + ) -> dict[str, JsonDict]: services = self.store.get_app_services() - protocols: Dict[str, List[JsonDict]] = {} + protocols: dict[str, list[JsonDict]] = {} # Collect up all the individual protocol responses out of the ASes for s in services: @@ -804,7 +801,7 @@ async def get_3pe_protocols( if info is not None: protocols[p].append(info) - def _merge_instances(infos: List[JsonDict]) -> JsonDict: + def _merge_instances(infos: list[JsonDict]) -> JsonDict: # Merge the 'instances' lists of multiple results, but just take # the other fields from the first as they ought to be identical # copy the result so as not to corrupt the cached one @@ -822,7 +819,7 @@ def _merge_instances(infos: List[JsonDict]) -> JsonDict: async def _get_services_for_event( self, event: EventBase - ) -> List[ApplicationService]: + ) -> list[ApplicationService]: """Retrieve a list of application services interested in this event. Args: @@ -842,11 +839,11 @@ async def _get_services_for_event( return interested_list - def _get_services_for_user(self, user_id: str) -> List[ApplicationService]: + def _get_services_for_user(self, user_id: str) -> list[ApplicationService]: services = self.store.get_app_services() return [s for s in services if (s.is_interested_in_user(user_id))] - def _get_services_for_3pn(self, protocol: str) -> List[ApplicationService]: + def _get_services_for_3pn(self, protocol: str) -> list[ApplicationService]: services = self.store.get_app_services() return [s for s in services if s.is_interested_in_protocol(protocol)] @@ -872,9 +869,9 @@ async def _check_user_exists(self, user_id: str) -> bool: return True async def claim_e2e_one_time_keys( - self, query: Iterable[Tuple[str, str, str, int]] - ) -> Tuple[ - Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str, int]] + self, query: Iterable[tuple[str, str, str, int]] + ) -> tuple[ + dict[str, dict[str, dict[str, JsonDict]]], list[tuple[str, str, str, int]] ]: """Claim one time keys from application services. @@ -896,7 +893,7 @@ async def claim_e2e_one_time_keys( services = self.store.get_app_services() # Partition the users by appservice. - query_by_appservice: Dict[str, List[Tuple[str, str, str, int]]] = {} + query_by_appservice: dict[str, list[tuple[str, str, str, int]]] = {} missing = [] for user_id, device, algorithm, count in query: if not self.store.get_if_app_services_interested_in_user(user_id): @@ -929,7 +926,7 @@ async def claim_e2e_one_time_keys( # Patch together the results -- they are all independent (since they # require exclusive control over the users, which is the outermost key). - claimed_keys: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + claimed_keys: dict[str, dict[str, dict[str, JsonDict]]] = {} for success, result in results: if success: claimed_keys.update(result[0]) @@ -938,8 +935,8 @@ async def claim_e2e_one_time_keys( return claimed_keys, missing async def query_keys( - self, query: Mapping[str, Optional[List[str]]] - ) -> Dict[str, Dict[str, Dict[str, JsonDict]]]: + self, query: Mapping[str, Optional[list[str]]] + ) -> dict[str, dict[str, dict[str, JsonDict]]]: """Query application services for device keys. Users which are exclusively owned by an application service are queried @@ -954,7 +951,7 @@ async def query_keys( services = self.store.get_app_services() # Partition the users by appservice. - query_by_appservice: Dict[str, Dict[str, List[str]]] = {} + query_by_appservice: dict[str, dict[str, list[str]]] = {} for user_id, device_ids in query.items(): if not self.store.get_if_app_services_interested_in_user(user_id): continue @@ -986,7 +983,7 @@ async def query_keys( # Patch together the results -- they are all independent (since they # require exclusive control over the users). They get returned as a single # dictionary. - key_queries: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + key_queries: dict[str, dict[str, dict[str, JsonDict]]] = {} for success, result in results: if success: key_queries.update(result) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index f4583e33c3..e282f38b9e 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -31,13 +31,9 @@ Any, Awaitable, Callable, - Dict, Iterable, - List, Mapping, Optional, - Tuple, - Type, Union, cast, ) @@ -102,7 +98,7 @@ def convert_client_dict_legacy_fields_to_identifier( submission: JsonDict, -) -> Dict[str, str]: +) -> dict[str, str]: """ Convert a legacy-formatted login submission to an identifier dict. @@ -154,7 +150,7 @@ def convert_client_dict_legacy_fields_to_identifier( return identifier -def login_id_phone_to_thirdparty(identifier: JsonDict) -> Dict[str, str]: +def login_id_phone_to_thirdparty(identifier: JsonDict) -> dict[str, str]: """ Convert a phone login identifier type to a generic threepid identifier. @@ -205,7 +201,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.auth_blocking = hs.get_auth_blocking() self.clock = hs.get_clock() - self.checkers: Dict[str, UserInteractiveAuthChecker] = {} + self.checkers: dict[str, UserInteractiveAuthChecker] = {} for auth_checker_class in INTERACTIVE_AUTH_CHECKERS: inst = auth_checker_class(hs) if inst.is_enabled(): @@ -280,7 +276,7 @@ def __init__(self, hs: "HomeServer"): # A mapping of user ID to extra attributes to include in the login # response. - self._extra_attributes: Dict[str, SsoLoginExtraAttributes] = {} + self._extra_attributes: dict[str, SsoLoginExtraAttributes] = {} self._auth_delegation_enabled = ( hs.config.mas.enabled or hs.config.experimental.msc3861.enabled @@ -290,10 +286,10 @@ async def validate_user_via_ui_auth( self, requester: Requester, request: SynapseRequest, - request_body: Dict[str, Any], + request_body: dict[str, Any], description: str, can_skip_ui_auth: bool = False, - ) -> Tuple[dict, Optional[str]]: + ) -> tuple[dict, Optional[str]]: """ Checks that the user is who they claim to be, via a UI auth. @@ -440,12 +436,12 @@ def get_enabled_auth_types(self) -> Iterable[str]: async def check_ui_auth( self, - flows: List[List[str]], + flows: list[list[str]], request: SynapseRequest, - clientdict: Dict[str, Any], + clientdict: dict[str, Any], description: str, get_new_session_data: Optional[Callable[[], JsonDict]] = None, - ) -> Tuple[dict, dict, str]: + ) -> tuple[dict, dict, str]: """ Takes a dictionary sent by the client in the login / registration protocol and handles the User-Interactive Auth flow. @@ -579,7 +575,7 @@ async def check_ui_auth( ) # check auth type currently being presented - errordict: Dict[str, Any] = {} + errordict: dict[str, Any] = {} if "type" in authdict: login_type: str = authdict["type"] try: @@ -617,7 +613,7 @@ async def check_ui_auth( raise InteractiveAuthIncompleteError(session.session_id, ret) async def add_oob_auth( - self, stagetype: str, authdict: Dict[str, Any], clientip: str + self, stagetype: str, authdict: dict[str, Any], clientip: str ) -> None: """ Adds the result of out-of-band authentication into an existing auth @@ -641,7 +637,7 @@ async def add_oob_auth( authdict["session"], stagetype, result ) - def get_session_id(self, clientdict: Dict[str, Any]) -> Optional[str]: + def get_session_id(self, clientdict: dict[str, Any]) -> Optional[str]: """ Gets the session ID for a client given the client dictionary @@ -702,8 +698,8 @@ async def _expire_old_sessions(self) -> None: await self.store.delete_old_ui_auth_sessions(expiration_time) async def _check_auth_dict( - self, authdict: Dict[str, Any], clientip: str - ) -> Union[Dict[str, Any], str]: + self, authdict: dict[str, Any], clientip: str + ) -> Union[dict[str, Any], str]: """Attempt to validate the auth dict provided by a client Args: @@ -750,9 +746,9 @@ def _get_params_terms(self) -> dict: def _auth_dict_for_flows( self, - flows: List[List[str]], + flows: list[list[str]], session_id: str, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: public_flows = [] for f in flows: public_flows.append(f) @@ -762,7 +758,7 @@ def _auth_dict_for_flows( LoginType.TERMS: self._get_params_terms, } - params: Dict[str, Any] = {} + params: dict[str, Any] = {} for f in public_flows: for stage in f: @@ -780,7 +776,7 @@ async def refresh_token( refresh_token: str, access_token_valid_until_ms: Optional[int], refresh_token_valid_until_ms: Optional[int], - ) -> Tuple[str, str, Optional[int]]: + ) -> tuple[str, str, Optional[int]]: """ Consumes a refresh token and generate both a new access token and a new refresh token from it. @@ -934,7 +930,7 @@ async def create_refresh_token_for_user_id( device_id: str, expiry_ts: Optional[int], ultimate_session_expiry_ts: Optional[int], - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """ Creates a new refresh token for the user with the given user ID. @@ -1067,7 +1063,7 @@ async def is_user_approved(self, user_id: str) -> bool: async def _find_user_id_and_pwd_hash( self, user_id: str - ) -> Optional[Tuple[str, str]]: + ) -> Optional[tuple[str, str]]: """Checks to see if a user with the given id exists. Will check case insensitively, but will return None if there are multiple inexact matches. @@ -1142,10 +1138,10 @@ def get_supported_login_types(self) -> Iterable[str]: async def validate_login( self, - login_submission: Dict[str, Any], + login_submission: dict[str, Any], ratelimit: bool = False, is_reauth: bool = False, - ) -> Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]: + ) -> tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]: """Authenticates the user for the /login API Also used by the user-interactive auth flow to validate auth types which don't @@ -1300,8 +1296,8 @@ async def validate_login( async def _validate_userid_login( self, username: str, - login_submission: Dict[str, Any], - ) -> Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]: + login_submission: dict[str, Any], + ) -> tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]: """Helper for validate_login Handles login, once we've mapped 3pids onto userids @@ -1390,7 +1386,7 @@ async def _validate_userid_login( async def check_password_provider_3pid( self, medium: str, address: str, password: str - ) -> Tuple[Optional[str], Optional[Callable[["LoginResponse"], Awaitable[None]]]]: + ) -> tuple[Optional[str], Optional[Callable[["LoginResponse"], Awaitable[None]]]]: """Check if a password provider is able to validate a thirdparty login Args: @@ -1905,7 +1901,7 @@ async def _sso_login_callback(self, login_result: "LoginResponse") -> None: extra_attributes = self._extra_attributes.get(login_result["user_id"]) if extra_attributes: - login_result_dict = cast(Dict[str, Any], login_result) + login_result_dict = cast(dict[str, Any], login_result) login_result_dict.update(extra_attributes.extra_attributes) def _expire_sso_extra_attributes(self) -> None: @@ -1941,7 +1937,7 @@ def load_legacy_password_auth_providers(hs: "HomeServer") -> None: def load_single_legacy_password_auth_provider( - module: Type, + module: type, config: JsonDict, api: "ModuleApi", ) -> None: @@ -1966,7 +1962,7 @@ def async_wrapper(f: Optional[Callable]) -> Optional[Callable[..., Awaitable]]: async def wrapped_check_password( username: str, login_type: str, login_dict: JsonDict - ) -> Optional[Tuple[str, Optional[Callable]]]: + ) -> Optional[tuple[str, Optional[Callable]]]: # We've already made sure f is not None above, but mypy doesn't do well # across function boundaries so we need to tell it f is definitely not # None. @@ -1985,12 +1981,12 @@ async def wrapped_check_password( return wrapped_check_password # We need to wrap check_auth as in the old form it could return - # just a str, but now it must return Optional[Tuple[str, Optional[Callable]] + # just a str, but now it must return Optional[tuple[str, Optional[Callable]] if f.__name__ == "check_auth": async def wrapped_check_auth( username: str, login_type: str, login_dict: JsonDict - ) -> Optional[Tuple[str, Optional[Callable]]]: + ) -> Optional[tuple[str, Optional[Callable]]]: # We've already made sure f is not None above, but mypy doesn't do well # across function boundaries so we need to tell it f is definitely not # None. @@ -2006,12 +2002,12 @@ async def wrapped_check_auth( return wrapped_check_auth # We need to wrap check_3pid_auth as in the old form it could return - # just a str, but now it must return Optional[Tuple[str, Optional[Callable]] + # just a str, but now it must return Optional[tuple[str, Optional[Callable]] if f.__name__ == "check_3pid_auth": async def wrapped_check_3pid_auth( medium: str, address: str, password: str - ) -> Optional[Tuple[str, Optional[Callable]]]: + ) -> Optional[tuple[str, Optional[Callable]]]: # We've already made sure f is not None above, but mypy doesn't do well # across function boundaries so we need to tell it f is definitely not # None. @@ -2026,7 +2022,7 @@ async def wrapped_check_3pid_auth( return wrapped_check_3pid_auth - def run(*args: Tuple, **kwargs: Dict) -> Awaitable: + def run(*args: tuple, **kwargs: dict) -> Awaitable: # mypy doesn't do well across function boundaries so we need to tell it # f is definitely not None. assert f is not None @@ -2079,14 +2075,14 @@ def run(*args: Tuple, **kwargs: Dict) -> Awaitable: CHECK_3PID_AUTH_CALLBACK = Callable[ [str, str, str], Awaitable[ - Optional[Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]] + Optional[tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]] ], ] ON_LOGGED_OUT_CALLBACK = Callable[[str, Optional[str], str], Awaitable] CHECK_AUTH_CALLBACK = Callable[ [str, str, JsonDict], Awaitable[ - Optional[Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]] + Optional[tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]] ], ] GET_USERNAME_FOR_REGISTRATION_CALLBACK = Callable[ @@ -2108,21 +2104,21 @@ class PasswordAuthProvider: def __init__(self) -> None: # lists of callbacks - self.check_3pid_auth_callbacks: List[CHECK_3PID_AUTH_CALLBACK] = [] - self.on_logged_out_callbacks: List[ON_LOGGED_OUT_CALLBACK] = [] - self.get_username_for_registration_callbacks: List[ + self.check_3pid_auth_callbacks: list[CHECK_3PID_AUTH_CALLBACK] = [] + self.on_logged_out_callbacks: list[ON_LOGGED_OUT_CALLBACK] = [] + self.get_username_for_registration_callbacks: list[ GET_USERNAME_FOR_REGISTRATION_CALLBACK ] = [] - self.get_displayname_for_registration_callbacks: List[ + self.get_displayname_for_registration_callbacks: list[ GET_DISPLAYNAME_FOR_REGISTRATION_CALLBACK ] = [] - self.is_3pid_allowed_callbacks: List[IS_3PID_ALLOWED_CALLBACK] = [] + self.is_3pid_allowed_callbacks: list[IS_3PID_ALLOWED_CALLBACK] = [] # Mapping from login type to login parameters - self._supported_login_types: Dict[str, Tuple[str, ...]] = {} + self._supported_login_types: dict[str, tuple[str, ...]] = {} # Mapping from login type to auth checker callbacks - self.auth_checker_callbacks: Dict[str, List[CHECK_AUTH_CALLBACK]] = {} + self.auth_checker_callbacks: dict[str, list[CHECK_AUTH_CALLBACK]] = {} def register_password_auth_provider_callbacks( self, @@ -2130,7 +2126,7 @@ def register_password_auth_provider_callbacks( on_logged_out: Optional[ON_LOGGED_OUT_CALLBACK] = None, is_3pid_allowed: Optional[IS_3PID_ALLOWED_CALLBACK] = None, auth_checkers: Optional[ - Dict[Tuple[str, Tuple[str, ...]], CHECK_AUTH_CALLBACK] + dict[tuple[str, tuple[str, ...]], CHECK_AUTH_CALLBACK] ] = None, get_username_for_registration: Optional[ GET_USERNAME_FOR_REGISTRATION_CALLBACK @@ -2207,7 +2203,7 @@ def get_supported_login_types(self) -> Mapping[str, Iterable[str]]: async def check_auth( self, username: str, login_type: str, login_dict: JsonDict - ) -> Optional[Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]]: + ) -> Optional[tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]]: """Check if the user has presented valid login credentials Args: @@ -2245,7 +2241,7 @@ async def check_auth( if not isinstance(result, tuple) or len(result) != 2: logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" - " Optional[Tuple[str, Optional[Callable]]]", + " Optional[tuple[str, Optional[Callable]]]", callback, result, ) @@ -2258,7 +2254,7 @@ async def check_auth( if not isinstance(str_result, str): logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" - " Optional[Tuple[str, Optional[Callable]]]", + " Optional[tuple[str, Optional[Callable]]]", callback, result, ) @@ -2269,7 +2265,7 @@ async def check_auth( if not callable(callback_result): logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" - " Optional[Tuple[str, Optional[Callable]]]", + " Optional[tuple[str, Optional[Callable]]]", callback, result, ) @@ -2284,7 +2280,7 @@ async def check_auth( async def check_3pid_auth( self, medium: str, address: str, password: str - ) -> Optional[Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]]: + ) -> Optional[tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]]: # This function is able to return a deferred that either # resolves None, meaning authentication failure, or upon # success, to a str (which is the user_id) or a tuple of @@ -2308,7 +2304,7 @@ async def check_3pid_auth( if not isinstance(result, tuple) or len(result) != 2: logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" - " Optional[Tuple[str, Optional[Callable]]]", + " Optional[tuple[str, Optional[Callable]]]", callback, result, ) @@ -2321,7 +2317,7 @@ async def check_3pid_auth( if not isinstance(str_result, str): logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" - " Optional[Tuple[str, Optional[Callable]]]", + " Optional[tuple[str, Optional[Callable]]]", callback, result, ) @@ -2332,7 +2328,7 @@ async def check_3pid_auth( if not callable(callback_result): logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" - " Optional[Tuple[str, Optional[Callable]]]", + " Optional[tuple[str, Optional[Callable]]]", callback, result, ) diff --git a/synapse/handlers/cas.py b/synapse/handlers/cas.py index fbe79c2e4c..438dcf9f2c 100644 --- a/synapse/handlers/cas.py +++ b/synapse/handlers/cas.py @@ -20,7 +20,7 @@ # import logging import urllib.parse -from typing import TYPE_CHECKING, Dict, List, Optional +from typing import TYPE_CHECKING, Optional from xml.etree import ElementTree as ET import attr @@ -54,7 +54,7 @@ def __str__(self) -> str: @attr.s(slots=True, frozen=True, auto_attribs=True) class CasResponse: username: str - attributes: Dict[str, List[Optional[str]]] + attributes: dict[str, list[Optional[str]]] class CasHandler: @@ -99,7 +99,7 @@ def __init__(self, hs: "HomeServer"): self._sso_handler.register_identity_provider(self) - def _build_service_param(self, args: Dict[str, str]) -> str: + def _build_service_param(self, args: dict[str, str]) -> str: """ Generates a value to use as the "service" parameter when redirecting or querying the CAS service. @@ -116,7 +116,7 @@ def _build_service_param(self, args: Dict[str, str]) -> str: ) async def _validate_ticket( - self, ticket: str, service_args: Dict[str, str] + self, ticket: str, service_args: dict[str, str] ) -> CasResponse: """ Validate a CAS ticket with the server, and return the parsed the response. @@ -186,7 +186,7 @@ def _parse_cas_response(self, cas_response_body: bytes) -> CasResponse: # Iterate through the nodes and pull out the user and any extra attributes. user = None - attributes: Dict[str, List[Optional[str]]] = {} + attributes: dict[str, list[Optional[str]]] = {} for child in root[0]: if child.tag.endswith("user"): user = child.text diff --git a/synapse/handlers/delayed_events.py b/synapse/handlers/delayed_events.py index 79dd3e8416..b89b7416e6 100644 --- a/synapse/handlers/delayed_events.py +++ b/synapse/handlers/delayed_events.py @@ -13,7 +13,7 @@ # import logging -from typing import TYPE_CHECKING, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Optional from twisted.internet.interfaces import IDelayedCall @@ -226,7 +226,7 @@ async def _unsafe_process_new_event(self) -> None: await self._store.update_delayed_events_stream_pos(max_pos) - async def _handle_state_deltas(self, deltas: List[StateDelta]) -> None: + async def _handle_state_deltas(self, deltas: list[StateDelta]) -> None: """ Process current state deltas to cancel other users' pending delayed events that target the same state. @@ -502,8 +502,8 @@ async def _send_on_timeout(self) -> None: await self._send_events(events) - async def _send_events(self, events: List[DelayedEventDetails]) -> None: - sent_state: Set[Tuple[RoomID, EventType, StateKey]] = set() + async def _send_events(self, events: list[DelayedEventDetails]) -> None: + sent_state: set[tuple[RoomID, EventType, StateKey]] = set() for event in events: if event.state_key is not None: state_info = (event.room_id, event.type, event.state_key) @@ -547,7 +547,7 @@ def _schedule_next_at(self, next_send_ts: Timestamp) -> None: else: self._next_delayed_event_call.reset(delay_sec) - async def get_all_for_user(self, requester: Requester) -> List[JsonDict]: + async def get_all_for_user(self, requester: Requester) -> list[JsonDict]: """Return all pending delayed events requested by the given user.""" await self._delayed_event_mgmt_ratelimiter.ratelimit( requester, diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index c6024597b7..f0558fc737 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -25,13 +25,9 @@ from typing import ( TYPE_CHECKING, AbstractSet, - Dict, Iterable, - List, Mapping, Optional, - Set, - Tuple, cast, ) @@ -407,7 +403,7 @@ async def update_device(self, user_id: str, device_id: str, content: dict) -> No raise @trace - async def get_devices_by_user(self, user_id: str) -> List[JsonDict]: + async def get_devices_by_user(self, user_id: str) -> list[JsonDict]: """ Retrieve the given user's devices @@ -431,7 +427,7 @@ async def get_devices_by_user(self, user_id: str) -> List[JsonDict]: async def get_dehydrated_device( self, user_id: str - ) -> Optional[Tuple[str, JsonDict]]: + ) -> Optional[tuple[str, JsonDict]]: """Retrieve the information for a dehydrated device. Args: @@ -568,7 +564,7 @@ async def get_device_changes_in_shared_rooms( room_ids: StrCollection, from_token: StreamToken, now_token: Optional[StreamToken] = None, - ) -> Set[str]: + ) -> set[str]: """Get the set of users whose devices have changed who share a room with the given user. """ @@ -644,8 +640,8 @@ async def get_user_ids_changed( # Check for newly joined or left rooms. We need to make sure that we add # to newly joined in the case membership goes from join -> leave -> join # again. - newly_joined_rooms: Set[str] = set() - newly_left_rooms: Set[str] = set() + newly_joined_rooms: set[str] = set() + newly_left_rooms: set[str] = set() for change in membership_changes: # We check for changes in "joinedness", i.e. if the membership has # changed to or from JOIN. @@ -661,10 +657,10 @@ async def get_user_ids_changed( # the user is currently in. # List of membership changes per room - room_to_deltas: Dict[str, List[StateDelta]] = {} + room_to_deltas: dict[str, list[StateDelta]] = {} # The set of event IDs of membership events (so we can fetch their # associated membership). - memberships_to_fetch: Set[str] = set() + memberships_to_fetch: set[str] = set() # TODO: Only pull out membership events? state_changes = await self.store.get_current_state_deltas_for_rooms( @@ -695,8 +691,8 @@ async def get_user_ids_changed( # We now want to find any user that have newly joined/invited/knocked, # or newly left, similarly to above. - newly_joined_or_invited_or_knocked_users: Set[str] = set() - newly_left_users: Set[str] = set() + newly_joined_or_invited_or_knocked_users: set[str] = set() + newly_left_users: set[str] = set() for _, deltas in room_to_deltas.items(): for delta in deltas: # Get the prev/new memberships for the delta @@ -838,7 +834,7 @@ async def on_federation_query_user_devices(self, user_id: str) -> JsonDict: # Check if the application services have any results. if self._query_appservices_for_keys: # Query the appservice for all devices for this user. - query: Dict[str, Optional[List[str]]] = {user_id: None} + query: dict[str, Optional[list[str]]] = {user_id: None} # Query the appservices for any keys. appservice_results = await self._appservice_handler.query_keys(query) @@ -898,7 +894,7 @@ async def notify_device_update( async def notify_user_signature_update( self, from_user_id: str, - user_ids: List[str], + user_ids: list[str], ) -> None: """Notify a device writer that a user have made new signatures of other users. @@ -927,7 +923,7 @@ async def handle_new_device_update(self) -> None: async def _delete_device_messages( self, task: ScheduledTask, - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: """Scheduler task to delete device messages in batch of `DEVICE_MSGS_DELETE_BATCH_LIMIT`.""" assert task.params is not None user_id = task.params["user_id"] @@ -1051,7 +1047,7 @@ async def notify_device_update( await self.handle_new_device_update() async def notify_user_signature_update( - self, from_user_id: str, user_ids: List[str] + self, from_user_id: str, user_ids: list[str] ) -> None: """Notify a user that they have made new signatures of other users. @@ -1112,7 +1108,7 @@ async def _handle_new_device_update_async(self) -> None: # hosts we've already poked about for this update. This is so that we # don't poke the same remote server about the same update repeatedly. current_stream_id = None - hosts_already_sent_to: Set[str] = set() + hosts_already_sent_to: set[str] = set() try: stream_id, room_id = await self.store.get_device_change_last_converted_pos() @@ -1311,7 +1307,7 @@ async def handle_room_un_partial_stated(self, room_id: str) -> None: def _update_device_from_client_ips( - device: JsonDict, client_ips: Mapping[Tuple[str, str], DeviceLastConnectionInfo] + device: JsonDict, client_ips: Mapping[tuple[str, str], DeviceLastConnectionInfo] ) -> None: ip = client_ips.get((device["user_id"], device["device_id"])) device.update( @@ -1338,8 +1334,8 @@ def __init__(self, hs: "HomeServer"): async def multi_user_device_resync( self, - user_ids: List[str], - ) -> Dict[str, Optional[JsonMapping]]: + user_ids: list[str], + ) -> dict[str, Optional[JsonMapping]]: """ Like `user_device_resync` but operates on multiple users **from the same origin** at once. @@ -1365,7 +1361,7 @@ async def process_cross_signing_key_update( user_id: str, master_key: Optional[JsonDict], self_signing_key: Optional[JsonDict], - ) -> List[str]: + ) -> list[str]: """Process the given new master and self-signing key for the given remote user. Args: @@ -1455,14 +1451,14 @@ def __init__(self, hs: "HomeServer", device_handler: DeviceWriterHandler): ) # user_id -> list of updates waiting to be handled. - self._pending_updates: Dict[ - str, List[Tuple[str, str, Iterable[str], JsonDict]] + self._pending_updates: dict[ + str, list[tuple[str, str, Iterable[str], JsonDict]] ] = {} # Recently seen stream ids. We don't bother keeping these in the DB, # but they're useful to have them about to reduce the number of spurious # resyncs. - self._seen_updates: ExpiringCache[str, Set[str]] = ExpiringCache( + self._seen_updates: ExpiringCache[str, set[str]] = ExpiringCache( cache_name="device_update_edu", server_name=self.server_name, hs=self.hs, @@ -1619,12 +1615,12 @@ async def _handle_device_updates(self, user_id: str) -> None: ) async def _need_to_do_resync( - self, user_id: str, updates: Iterable[Tuple[str, str, Iterable[str], JsonDict]] + self, user_id: str, updates: Iterable[tuple[str, str, Iterable[str], JsonDict]] ) -> bool: """Given a list of updates for a user figure out if we need to do a full resync, or whether we have enough data that we can just apply the delta. """ - seen_updates: Set[str] = self._seen_updates.get(user_id, set()) + seen_updates: set[str] = self._seen_updates.get(user_id, set()) extremity = await self.store.get_device_list_last_stream_id_for_remote(user_id) @@ -1702,8 +1698,8 @@ async def _maybe_retry_device_resync(self) -> None: self._resync_retry_lock.release() async def multi_user_device_resync( - self, user_ids: List[str], mark_failed_as_stale: bool = True - ) -> Dict[str, Optional[JsonMapping]]: + self, user_ids: list[str], mark_failed_as_stale: bool = True + ) -> dict[str, Optional[JsonMapping]]: """ Like `user_device_resync` but operates on multiple users **from the same origin** at once. @@ -1739,7 +1735,7 @@ async def multi_user_device_resync( async def _user_device_resync_returning_failed( self, user_id: str - ) -> Tuple[Optional[JsonMapping], bool]: + ) -> tuple[Optional[JsonMapping], bool]: """Fetches all devices for a user and updates the device cache with them. Args: diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index 860e24d79d..4dcdcc42fe 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -21,7 +21,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any, Optional from synapse.api.constants import EduTypes, EventContentFields, ToDeviceEventTypes from synapse.api.errors import Codes, SynapseError @@ -158,7 +158,7 @@ async def _check_for_unknown_devices( self, message_type: str, sender_user_id: str, - by_device: Dict[str, Dict[str, Any]], + by_device: dict[str, dict[str, Any]], ) -> None: """Checks inbound device messages for unknown remote devices, and if found marks the remote cache for the user as stale. @@ -207,7 +207,7 @@ async def send_device_message( self, requester: Requester, message_type: str, - messages: Dict[str, Dict[str, JsonDict]], + messages: dict[str, dict[str, JsonDict]], ) -> None: """ Handle a request from a user to send to-device message(s). @@ -222,7 +222,7 @@ async def send_device_message( set_tag(SynapseTags.TO_DEVICE_TYPE, message_type) set_tag(SynapseTags.TO_DEVICE_SENDER, sender_user_id) local_messages = {} - remote_messages: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + remote_messages: dict[str, dict[str, dict[str, JsonDict]]] = {} for user_id, by_device in messages.items(): if not UserID.is_valid(user_id): logger.warning( diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 11284ccd0b..865c32d19e 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -21,7 +21,7 @@ import logging import string -from typing import TYPE_CHECKING, Iterable, List, Literal, Optional, Sequence +from typing import TYPE_CHECKING, Iterable, Literal, Optional, Sequence from synapse.api.constants import MAX_ALIAS_LENGTH, EventTypes from synapse.api.errors import ( @@ -108,7 +108,7 @@ async def create_association( requester: Requester, room_alias: RoomAlias, room_id: str, - servers: Optional[List[str]] = None, + servers: Optional[list[str]] = None, check_membership: bool = True, ) -> None: """Attempt to create a new alias diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index b10472f1d2..85a150b71a 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -20,7 +20,7 @@ # # import logging -from typing import TYPE_CHECKING, Dict, Iterable, List, Mapping, Optional, Tuple +from typing import TYPE_CHECKING, Iterable, Mapping, Optional import attr from canonicaljson import encode_canonical_json @@ -162,8 +162,8 @@ async def query_devices( async with self._query_devices_linearizer.queue((from_user_id, from_device_id)): async def filter_device_key_query( - query: Dict[str, List[str]], - ) -> Dict[str, List[str]]: + query: dict[str, list[str]], + ) -> dict[str, list[str]]: if not self.config.experimental.msc4263_limit_key_queries_to_users_who_share_rooms: # Only ignore invalid user IDs, which is the same behaviour as if # the user existed but had no keys. @@ -188,7 +188,7 @@ async def filter_device_key_query( if user_id in allowed_user_ids } - device_keys_query: Dict[str, List[str]] = await filter_device_key_query( + device_keys_query: dict[str, list[str]] = await filter_device_key_query( query_body.get("device_keys", {}) ) @@ -209,7 +209,7 @@ async def filter_device_key_query( # First get local devices. # A map of destination -> failure response. - failures: Dict[str, JsonDict] = {} + failures: dict[str, JsonDict] = {} results = {} if local_query: local_result = await self.query_local_devices(local_query) @@ -224,10 +224,10 @@ async def filter_device_key_query( # Now attempt to get any remote devices from our local cache. # A map of destination -> user ID -> device IDs. - remote_queries_not_in_cache: Dict[str, Dict[str, Iterable[str]]] = {} + remote_queries_not_in_cache: dict[str, dict[str, Iterable[str]]] = {} if remote_queries: user_ids = set() - user_and_device_ids: List[Tuple[str, str]] = [] + user_and_device_ids: list[tuple[str, str]] = [] for user_id, device_ids in remote_queries.items(): if device_ids: user_and_device_ids.extend( @@ -355,9 +355,9 @@ async def _query_devices_for_destination( self, results: JsonDict, cross_signing_keys: JsonDict, - failures: Dict[str, JsonDict], + failures: dict[str, JsonDict], destination: str, - destination_query: Dict[str, Iterable[str]], + destination_query: dict[str, Iterable[str]], timeout: int, ) -> None: """This is called when we are querying the device list of a user on @@ -480,7 +480,7 @@ async def _query_devices_for_destination( @cancellable async def get_cross_signing_keys_from_cache( self, query: Iterable[str], from_user_id: Optional[str] - ) -> Dict[str, Dict[str, JsonMapping]]: + ) -> dict[str, dict[str, JsonMapping]]: """Get cross-signing keys for users from the database Args: @@ -527,9 +527,9 @@ async def get_cross_signing_keys_from_cache( @cancellable async def query_local_devices( self, - query: Mapping[str, Optional[List[str]]], + query: Mapping[str, Optional[list[str]]], include_displaynames: bool = True, - ) -> Dict[str, Dict[str, dict]]: + ) -> dict[str, dict[str, dict]]: """Get E2E device keys for local users Args: @@ -542,9 +542,9 @@ async def query_local_devices( A map from user_id -> device_id -> device details """ set_tag("local_query", str(query)) - local_query: List[Tuple[str, Optional[str]]] = [] + local_query: list[tuple[str, Optional[str]]] = [] - result_dict: Dict[str, Dict[str, dict]] = {} + result_dict: dict[str, dict[str, dict]] = {} for user_id, device_ids in query.items(): # we use UserID.from_string to catch invalid user ids if not self.is_mine(UserID.from_string(user_id)): @@ -594,7 +594,7 @@ async def query_local_devices( return result_dict async def on_federation_query_client_keys( - self, query_body: Dict[str, Dict[str, Optional[List[str]]]] + self, query_body: dict[str, dict[str, Optional[list[str]]]] ) -> JsonDict: """Handle a device key query from a federated server: @@ -614,7 +614,7 @@ async def on_federation_query_client_keys( - self_signing_key: An optional dictionary of user ID -> self-signing key info. """ - device_keys_query: Dict[str, Optional[List[str]]] = query_body.get( + device_keys_query: dict[str, Optional[list[str]]] = query_body.get( "device_keys", {} ) if any( @@ -639,9 +639,9 @@ async def on_federation_query_client_keys( async def claim_local_one_time_keys( self, - local_query: List[Tuple[str, str, str, int]], + local_query: list[tuple[str, str, str, int]], always_include_fallback_keys: bool, - ) -> Iterable[Dict[str, Dict[str, Dict[str, JsonDict]]]]: + ) -> Iterable[dict[str, dict[str, dict[str, JsonDict]]]]: """Claim one time keys for local users. 1. Attempt to claim OTKs from the database. @@ -735,7 +735,7 @@ async def claim_local_one_time_keys( @trace async def claim_one_time_keys( self, - query: Dict[str, Dict[str, Dict[str, int]]], + query: dict[str, dict[str, dict[str, int]]], user: UserID, timeout: Optional[int], always_include_fallback_keys: bool, @@ -754,8 +754,8 @@ async def claim_one_time_keys( one_time_keys: chain of maps user ID -> device ID -> key ID -> key. failures: map from remote destination to a JsonDict describing the error. """ - local_query: List[Tuple[str, str, str, int]] = [] - remote_queries: Dict[str, Dict[str, Dict[str, Dict[str, int]]]] = {} + local_query: list[tuple[str, str, str, int]] = [] + remote_queries: dict[str, dict[str, dict[str, dict[str, int]]]] = {} for user_id, one_time_keys in query.items(): # we use UserID.from_string to catch invalid user ids @@ -775,7 +775,7 @@ async def claim_one_time_keys( ) # A map of user ID -> device ID -> key ID -> key. - json_result: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + json_result: dict[str, dict[str, dict[str, JsonDict]]] = {} for result in results: for user_id, device_keys in result.items(): for device_id, keys in device_keys.items(): @@ -785,7 +785,7 @@ async def claim_one_time_keys( ).update({key_id: key}) # Remote failures. - failures: Dict[str, JsonDict] = {} + failures: dict[str, JsonDict] = {} @trace async def claim_client_keys(destination: str) -> None: @@ -1131,7 +1131,7 @@ async def upload_signatures_for_device_keys( async def _process_self_signatures( self, user_id: str, signatures: JsonDict - ) -> Tuple[List["SignatureListItem"], Dict[str, Dict[str, dict]]]: + ) -> tuple[list["SignatureListItem"], dict[str, dict[str, dict]]]: """Process uploaded signatures of the user's own keys. Signatures of the user's own keys from this API come in two forms: @@ -1149,8 +1149,8 @@ async def _process_self_signatures( Raises: SynapseError: if the input is malformed """ - signature_list: List["SignatureListItem"] = [] - failures: Dict[str, Dict[str, JsonDict]] = {} + signature_list: list["SignatureListItem"] = [] + failures: dict[str, dict[str, JsonDict]] = {} if not signatures: return signature_list, failures @@ -1250,8 +1250,8 @@ def _check_master_key_signature( master_key_id: str, signed_master_key: JsonDict, stored_master_key: JsonMapping, - devices: Dict[str, Dict[str, JsonDict]], - ) -> List["SignatureListItem"]: + devices: dict[str, dict[str, JsonDict]], + ) -> list["SignatureListItem"]: """Check signatures of a user's master key made by their devices. Args: @@ -1294,8 +1294,8 @@ def _check_master_key_signature( return master_key_signature_list async def _process_other_signatures( - self, user_id: str, signatures: Dict[str, dict] - ) -> Tuple[List["SignatureListItem"], Dict[str, Dict[str, dict]]]: + self, user_id: str, signatures: dict[str, dict] + ) -> tuple[list["SignatureListItem"], dict[str, dict[str, dict]]]: """Process uploaded signatures of other users' keys. These will be the target user's master keys, signed by the uploading user's user-signing key. @@ -1311,8 +1311,8 @@ async def _process_other_signatures( Raises: SynapseError: if the input is malformed """ - signature_list: List["SignatureListItem"] = [] - failures: Dict[str, Dict[str, JsonDict]] = {} + signature_list: list["SignatureListItem"] = [] + failures: dict[str, dict[str, JsonDict]] = {} if not signatures: return signature_list, failures @@ -1396,7 +1396,7 @@ async def _process_other_signatures( async def _get_e2e_cross_signing_verify_key( self, user_id: str, key_type: str, from_user_id: Optional[str] = None - ) -> Tuple[JsonMapping, str, VerifyKey]: + ) -> tuple[JsonMapping, str, VerifyKey]: """Fetch locally or remotely query for a cross-signing public key. First, attempt to fetch the cross-signing public key from storage. @@ -1451,7 +1451,7 @@ async def _retrieve_cross_signing_keys_for_remote_user( self, user: UserID, desired_key_type: str, - ) -> Optional[Tuple[JsonMapping, str, VerifyKey]]: + ) -> Optional[tuple[JsonMapping, str, VerifyKey]]: """Queries cross-signing keys for a remote user and saves them to the database Only the key specified by `key_type` will be returned, while all retrieved keys @@ -1541,7 +1541,7 @@ async def _retrieve_cross_signing_keys_for_remote_user( return desired_key_data - async def check_cross_signing_setup(self, user_id: str) -> Tuple[bool, bool]: + async def check_cross_signing_setup(self, user_id: str) -> tuple[bool, bool]: """Checks if the user has cross-signing set up Args: @@ -1599,7 +1599,7 @@ async def has_different_keys(self, user_id: str, body: JsonDict) -> bool: async def _delete_old_one_time_keys_task( self, task: ScheduledTask - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: """Scheduler task to delete old one time keys. Until Synapse 1.119, Synapse used to issue one-time-keys in a random order, leading to the possibility @@ -1773,7 +1773,7 @@ def __init__(self, hs: "HomeServer"): ) # user_id -> list of updates waiting to be handled. - self._pending_updates: Dict[str, List[Tuple[JsonDict, JsonDict]]] = {} + self._pending_updates: dict[str, list[tuple[JsonDict, JsonDict]]] = {} async def incoming_signing_key_update( self, origin: str, edu_content: JsonDict @@ -1819,7 +1819,7 @@ async def _handle_signing_key_updates(self, user_id: str) -> None: # This can happen since we batch updates return - device_ids: List[str] = [] + device_ids: list[str] = [] logger.info("pending updates: %r", pending_updates) diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index 623fd33f13..094b4bc27c 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Dict, Literal, Optional, cast +from typing import TYPE_CHECKING, Literal, Optional, cast from synapse.api.errors import ( Codes, @@ -65,8 +65,8 @@ async def get_room_keys( version: str, room_id: Optional[str] = None, session_id: Optional[str] = None, - ) -> Dict[ - Literal["rooms"], Dict[str, Dict[Literal["sessions"], Dict[str, RoomKey]]] + ) -> dict[ + Literal["rooms"], dict[str, dict[Literal["sessions"], dict[str, RoomKey]]] ]: """Bulk get the E2E room keys for a given backup, optionally filtered to a given room, or a given session. diff --git a/synapse/handlers/event_auth.py b/synapse/handlers/event_auth.py index 1f1f67dc0d..b2caca8ce7 100644 --- a/synapse/handlers/event_auth.py +++ b/synapse/handlers/event_auth.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, List, Mapping, Optional, Union +from typing import TYPE_CHECKING, Mapping, Optional, Union from synapse import event_auth from synapse.api.constants import ( @@ -92,7 +92,7 @@ def compute_auth_events( event: Union[EventBase, EventBuilder], current_state_ids: StateMap[str], for_verification: bool = False, - ) -> List[str]: + ) -> list[str]: """Given an event and current state return the list of event IDs used to auth an event. diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py index 3f46032a43..9522d5a696 100644 --- a/synapse/handlers/events.py +++ b/synapse/handlers/events.py @@ -21,7 +21,7 @@ import logging import random -from typing import TYPE_CHECKING, Iterable, List, Optional +from typing import TYPE_CHECKING, Iterable, Optional from synapse.api.constants import EduTypes, EventTypes, Membership, PresenceState from synapse.api.errors import AuthError, SynapseError @@ -100,7 +100,7 @@ async def get_stream( # When the user joins a new room, or another user joins a currently # joined room, we need to send down presence for those users. - to_add: List[JsonDict] = [] + to_add: list[JsonDict] = [] for event in events: if not isinstance(event, EventBase): continue diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index adc20f4ad0..3eb1d166f8 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -30,12 +30,8 @@ from typing import ( TYPE_CHECKING, AbstractSet, - Dict, Iterable, - List, Optional, - Set, - Tuple, Union, ) @@ -168,12 +164,12 @@ def __init__(self, hs: "HomeServer"): # Tracks running partial state syncs by room ID. # Partial state syncs currently only run on the main process, so it's okay to # track them in-memory for now. - self._active_partial_state_syncs: Set[str] = set() + self._active_partial_state_syncs: set[str] = set() # Tracks partial state syncs we may want to restart. # A dictionary mapping room IDs to (initial destination, other destinations) # tuples. - self._partial_state_syncs_maybe_needing_restart: Dict[ - str, Tuple[Optional[str], AbstractSet[str]] + self._partial_state_syncs_maybe_needing_restart: dict[ + str, tuple[Optional[str], AbstractSet[str]] ] = {} # A lock guarding the partial state flag for rooms. # When the lock is held for a given room, no other concurrent code may @@ -272,7 +268,7 @@ async def _maybe_backfill_inner( # we now have a list of potential places to backpaginate from. We prefer to # start with the most recent (ie, max depth), so let's sort the list. - sorted_backfill_points: List[_BackfillPoint] = sorted( + sorted_backfill_points: list[_BackfillPoint] = sorted( backwards_extremities, key=lambda e: -int(e.depth), ) @@ -380,7 +376,7 @@ async def _maybe_backfill_inner( # there is it's often sufficiently long ago that clients would stop # attempting to paginate before backfill reached the visible history. - extremities_to_request: List[str] = [] + extremities_to_request: list[str] = [] for bp in sorted_backfill_points: if len(extremities_to_request) >= 5: break @@ -562,7 +558,7 @@ async def send_invite(self, target_host: str, event: EventBase) -> EventBase: return pdu - async def on_event_auth(self, event_id: str) -> List[EventBase]: + async def on_event_auth(self, event_id: str) -> list[EventBase]: event = await self.store.get_event(event_id) auth = await self.store.get_auth_chain( event.room_id, list(event.auth_event_ids()), include_given=True @@ -571,7 +567,7 @@ async def on_event_auth(self, event_id: str) -> List[EventBase]: async def do_invite_join( self, target_hosts: Iterable[str], room_id: str, joinee: str, content: JsonDict - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Attempts to join the `joinee` to the room `room_id` via the servers contained in `target_hosts`. @@ -807,11 +803,11 @@ async def do_invite_join( async def do_knock( self, - target_hosts: List[str], + target_hosts: list[str], room_id: str, knockee: str, content: JsonDict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Sends the knock to the remote server. This first triggers a make_knock request that returns a partial @@ -840,7 +836,7 @@ async def do_knock( # Ask the remote server to create a valid knock event for us. Once received, # we sign the event - params: Dict[str, Iterable[str]] = {"ver": supported_room_versions} + params: dict[str, Iterable[str]] = {"ver": supported_room_versions} origin, event, event_format_version = await self._make_and_verify_event( target_hosts, room_id, knockee, Membership.KNOCK, content, params=params ) @@ -889,7 +885,7 @@ async def do_knock( return event.event_id, stream_id async def _handle_queued_pdus( - self, room_queue: List[Tuple[EventBase, str]] + self, room_queue: list[tuple[EventBase, str]] ) -> None: """Process PDUs which got queued up while we were busy send_joining. @@ -1144,7 +1140,7 @@ async def on_invite_request( async def do_remotely_reject_invite( self, target_hosts: Iterable[str], room_id: str, user_id: str, content: JsonDict - ) -> Tuple[EventBase, int]: + ) -> tuple[EventBase, int]: origin, event, room_version = await self._make_and_verify_event( target_hosts, room_id, user_id, "leave", content=content ) @@ -1178,8 +1174,8 @@ async def _make_and_verify_event( user_id: str, membership: str, content: JsonDict, - params: Optional[Dict[str, Union[str, Iterable[str]]]] = None, - ) -> Tuple[str, EventBase, RoomVersion]: + params: Optional[dict[str, Union[str, Iterable[str]]]] = None, + ) -> tuple[str, EventBase, RoomVersion]: ( origin, event, @@ -1306,7 +1302,7 @@ async def on_make_knock_request( @trace @tag_args - async def get_state_ids_for_pdu(self, room_id: str, event_id: str) -> List[str]: + async def get_state_ids_for_pdu(self, room_id: str, event_id: str) -> list[str]: """Returns the state at the event. i.e. not including said event.""" event = await self.store.get_event(event_id, check_room_id=room_id) if event.internal_metadata.outlier: @@ -1339,8 +1335,8 @@ async def get_state_ids_for_pdu(self, room_id: str, event_id: str) -> List[str]: return list(state_map.values()) async def on_backfill_request( - self, origin: str, room_id: str, pdu_list: List[str], limit: int - ) -> List[EventBase]: + self, origin: str, room_id: str, pdu_list: list[str], limit: int + ) -> list[EventBase]: # We allow partially joined rooms since in this case we are filtering out # non-local events in `filter_events_for_server`. await self._event_auth_handler.assert_host_in_room(room_id, origin, True) @@ -1416,10 +1412,10 @@ async def on_get_missing_events( self, origin: str, room_id: str, - earliest_events: List[str], - latest_events: List[str], + earliest_events: list[str], + latest_events: list[str], limit: int, - ) -> List[EventBase]: + ) -> list[EventBase]: # We allow partially joined rooms since in this case we are filtering out # non-local events in `filter_events_for_server`. await self._event_auth_handler.assert_host_in_room(room_id, origin, True) @@ -1602,7 +1598,7 @@ async def add_display_name_to_third_party_invite( event_dict: JsonDict, event: EventBase, context: UnpersistedEventContextBase, - ) -> Tuple[EventBase, UnpersistedEventContextBase]: + ) -> tuple[EventBase, UnpersistedEventContextBase]: key = ( EventTypes.ThirdPartyInvite, event.content["third_party_invite"]["signed"]["token"], @@ -1758,7 +1754,7 @@ async def _check_key_revocation(self, public_key: str, url: str) -> None: raise AuthError(403, "Third party certificate was invalid") async def get_room_complexity( - self, remote_room_hosts: List[str], room_id: str + self, remote_room_hosts: list[str], room_id: str ) -> Optional[dict]: """ Fetch the complexity of a remote room over federation. diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index d6390b79c7..32b603e947 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -27,13 +27,9 @@ TYPE_CHECKING, Collection, Container, - Dict, Iterable, - List, Optional, Sequence, - Set, - Tuple, ) from prometheus_client import Counter, Histogram @@ -190,7 +186,7 @@ def __init__(self, hs: "HomeServer"): # For each room, a list of (pdu, origin) tuples. # TODO: replace this with something more elegant, probably based around the # federation event staging area. - self.room_queues: Dict[str, List[Tuple[EventBase, str]]] = {} + self.room_queues: dict[str, list[tuple[EventBase, str]]] = {} self._room_pdu_linearizer = Linearizer(name="fed_room_pdu", clock=self._clock) @@ -511,8 +507,8 @@ async def process_remote_join( self, origin: str, room_id: str, - auth_events: List[EventBase], - state: List[EventBase], + auth_events: list[EventBase], + state: list[EventBase], event: EventBase, room_version: RoomVersion, partial_state: bool, @@ -595,7 +591,7 @@ async def process_remote_join( ) missing_event_ids = prev_event_ids - seen_event_ids - state_maps_to_resolve: List[StateMap[str]] = [] + state_maps_to_resolve: list[StateMap[str]] = [] # Fetch the state after the prev events that we know about. state_maps_to_resolve.extend( @@ -755,7 +751,7 @@ async def backfill( @trace async def _get_missing_events_for_pdu( - self, origin: str, pdu: EventBase, prevs: Set[str], min_depth: int + self, origin: str, pdu: EventBase, prevs: set[str], min_depth: int ) -> None: """ Args: @@ -902,7 +898,7 @@ async def _process_pulled_events( [event.event_id for event in events] ) - new_events: List[EventBase] = [] + new_events: list[EventBase] = [] for event in events: event_id = event.event_id @@ -1186,7 +1182,7 @@ async def _compute_event_context_with_maybe_missing_prevs( partial_state = any(partial_state_flags.values()) # state_maps is a list of mappings from (type, state_key) to event_id - state_maps: List[StateMap[str]] = [] + state_maps: list[StateMap[str]] = [] # Ask the remote server for the states we don't # know about @@ -1647,7 +1643,7 @@ async def _get_events_and_persist( room_version = await self._store.get_room_version(room_id) - events: List[EventBase] = [] + events: list[EventBase] = [] async def get_event(event_id: str) -> None: with nested_logging_context(event_id): @@ -1753,7 +1749,7 @@ async def _auth_and_persist_outliers( ) auth_map.update(persisted_events) - events_and_contexts_to_persist: List[EventPersistencePair] = [] + events_and_contexts_to_persist: list[EventPersistencePair] = [] async def prep(event: EventBase) -> None: with nested_logging_context(suffix=event.event_id): @@ -2050,7 +2046,7 @@ async def _check_for_soft_fail( state_sets_d = await self._state_storage_controller.get_state_groups_ids( event.room_id, extrem_ids ) - state_sets: List[StateMap[str]] = list(state_sets_d.values()) + state_sets: list[StateMap[str]] = list(state_sets_d.values()) state_ids = await context.get_prev_state_ids() state_sets.append(state_ids) current_state_ids = ( diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index be757201fc..0f507b3317 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -24,7 +24,7 @@ import logging import urllib.parse -from typing import TYPE_CHECKING, Awaitable, Callable, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Awaitable, Callable, Optional import attr @@ -105,7 +105,7 @@ async def ratelimit_request_token_requests( ) async def threepid_from_creds( - self, id_server: str, creds: Dict[str, str] + self, id_server: str, creds: dict[str, str] ) -> Optional[JsonDict]: """ Retrieve and validate a threepid identifier from a "credentials" dictionary against a @@ -693,7 +693,7 @@ async def ask_id_server_for_third_party_invite( inviter_display_name: str, inviter_avatar_url: str, id_access_token: str, - ) -> Tuple[str, List[Dict[str, str]], Dict[str, str], str]: + ) -> tuple[str, list[dict[str, str]], dict[str, str], str]: """ Asks an identity server for a third party invite. @@ -779,7 +779,7 @@ async def ask_id_server_for_third_party_invite( return token, public_keys, fallback_public_key, display_name -def create_id_access_token_header(id_access_token: str) -> List[str]: +def create_id_access_token_header(id_access_token: str) -> list[str]: """Create an Authorization header for passing to SimpleHttpClient as the header value of an HTTP request. diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py index 75d64d2d50..1c6f8bf53b 100644 --- a/synapse/handlers/initial_sync.py +++ b/synapse/handlers/initial_sync.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse.api.constants import ( AccountDataTypes, @@ -69,7 +69,7 @@ def __init__(self, hs: "HomeServer"): self.clock = hs.get_clock() self.validator = EventValidator() self.snapshot_cache: ResponseCache[ - Tuple[ + tuple[ str, Optional[StreamToken], Optional[StreamToken], @@ -451,7 +451,7 @@ async def _room_initial_sync_joined( presence_handler = self.hs.get_presence_handler() - async def get_presence() -> List[JsonDict]: + async def get_presence() -> list[JsonDict]: # If presence is disabled, return an empty list if not self.hs.config.server.presence_enabled: return [] @@ -468,7 +468,7 @@ async def get_presence() -> List[JsonDict]: for s in states ] - async def get_receipts() -> List[JsonMapping]: + async def get_receipts() -> list[JsonMapping]: receipts = await self.store.get_linearized_receipts_for_room( room_id, to_key=now_token.receipt_key ) diff --git a/synapse/handlers/jwt.py b/synapse/handlers/jwt.py index 400f3a59aa..f1715f6495 100644 --- a/synapse/handlers/jwt.py +++ b/synapse/handlers/jwt.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from authlib.jose import JsonWebToken, JWTClaims from authlib.jose.errors import BadSignatureError, InvalidClaimError, JoseError @@ -41,7 +41,7 @@ def __init__(self, hs: "HomeServer"): self.jwt_issuer = hs.config.jwt.jwt_issuer self.jwt_audiences = hs.config.jwt.jwt_audiences - def validate_login(self, login_submission: JsonDict) -> Tuple[str, Optional[str]]: + def validate_login(self, login_submission: JsonDict) -> tuple[str, Optional[str]]: """ Authenticates the user for the /login API diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index e874b60000..2ad1dbe73f 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -22,7 +22,7 @@ import logging import random from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Sequence, Tuple +from typing import TYPE_CHECKING, Any, Mapping, Optional, Sequence from canonicaljson import encode_canonical_json @@ -180,7 +180,7 @@ async def get_state_events( room_id: str, state_filter: Optional[StateFilter] = None, at_token: Optional[StreamToken] = None, - ) -> List[dict]: + ) -> list[dict]: """Retrieve all state events for a given room. If the user is joined to the room then return the current state. If the user has left the room return the state events from when they left. If an explicit @@ -538,7 +538,7 @@ def __init__(self, hs: "HomeServer"): # # map from room id to time-of-last-attempt. # - self._rooms_to_exclude_from_dummy_event_insertion: Dict[str, int] = {} + self._rooms_to_exclude_from_dummy_event_insertion: dict[str, int] = {} # The number of forward extremeities before a dummy event is sent. self._dummy_events_threshold = hs.config.server.dummy_events_threshold @@ -578,16 +578,16 @@ async def create_event( requester: Requester, event_dict: dict, txn_id: Optional[str] = None, - prev_event_ids: Optional[List[str]] = None, - auth_event_ids: Optional[List[str]] = None, - state_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, + auth_event_ids: Optional[list[str]] = None, + state_event_ids: Optional[list[str]] = None, require_consent: bool = True, outlier: bool = False, depth: Optional[int] = None, state_map: Optional[StateMap[str]] = None, for_batch: bool = False, current_state_group: Optional[int] = None, - ) -> Tuple[EventBase, UnpersistedEventContextBase]: + ) -> tuple[EventBase, UnpersistedEventContextBase]: """ Given a dict from a client, create a new event. If bool for_batch is true, will create an event using the prev_event_ids, and will create an event context for @@ -961,14 +961,14 @@ async def create_and_send_nonmember_event( self, requester: Requester, event_dict: dict, - prev_event_ids: Optional[List[str]] = None, - state_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, + state_event_ids: Optional[list[str]] = None, ratelimit: bool = True, txn_id: Optional[str] = None, ignore_shadow_ban: bool = False, outlier: bool = False, depth: Optional[int] = None, - ) -> Tuple[EventBase, int]: + ) -> tuple[EventBase, int]: """ Creates an event, then sends it. @@ -1098,14 +1098,14 @@ async def _create_and_send_nonmember_event_locked( self, requester: Requester, event_dict: dict, - prev_event_ids: Optional[List[str]] = None, - state_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, + state_event_ids: Optional[list[str]] = None, ratelimit: bool = True, txn_id: Optional[str] = None, ignore_shadow_ban: bool = False, outlier: bool = False, depth: Optional[int] = None, - ) -> Tuple[EventBase, int]: + ) -> tuple[EventBase, int]: room_id = event_dict["room_id"] # If we don't have any prev event IDs specified then we need to @@ -1220,14 +1220,14 @@ async def create_new_client_event( self, builder: EventBuilder, requester: Optional[Requester] = None, - prev_event_ids: Optional[List[str]] = None, - auth_event_ids: Optional[List[str]] = None, - state_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, + auth_event_ids: Optional[list[str]] = None, + state_event_ids: Optional[list[str]] = None, depth: Optional[int] = None, state_map: Optional[StateMap[str]] = None, for_batch: bool = False, current_state_group: Optional[int] = None, - ) -> Tuple[EventBase, UnpersistedEventContextBase]: + ) -> tuple[EventBase, UnpersistedEventContextBase]: """Create a new event for a local client. If bool for_batch is true, will create an event using the prev_event_ids, and will create an event context for the event using the parameters state_map and current_state_group, thus these parameters @@ -1471,9 +1471,9 @@ async def _validate_event_relation(self, event: EventBase) -> None: async def handle_new_client_event( self, requester: Requester, - events_and_context: List[EventPersistencePair], + events_and_context: list[EventPersistencePair], ratelimit: bool = True, - extra_users: Optional[List[UserID]] = None, + extra_users: Optional[list[UserID]] = None, ignore_shadow_ban: bool = False, ) -> EventBase: """Processes new events. Please note that if batch persisting events, an error in @@ -1683,9 +1683,9 @@ async def create_and_send_new_client_events( async def _persist_events( self, requester: Requester, - events_and_context: List[EventPersistencePair], + events_and_context: list[EventPersistencePair], ratelimit: bool = True, - extra_users: Optional[List[UserID]] = None, + extra_users: Optional[list[UserID]] = None, ) -> EventBase: """Actually persists new events. Should only be called by `handle_new_client_event`, and see its docstring for documentation of @@ -1769,7 +1769,7 @@ async def _persist_events( raise async def cache_joined_hosts_for_events( - self, events_and_context: List[EventPersistencePair] + self, events_and_context: list[EventPersistencePair] ) -> None: """Precalculate the joined hosts at each of the given events, when using Redis, so that external federation senders don't have to recalculate it themselves. @@ -1875,9 +1875,9 @@ async def _validate_canonical_alias( async def persist_and_notify_client_events( self, requester: Requester, - events_and_context: List[EventPersistencePair], + events_and_context: list[EventPersistencePair], ratelimit: bool = True, - extra_users: Optional[List[UserID]] = None, + extra_users: Optional[list[UserID]] = None, ) -> EventBase: """Called when we have fully built the events, have already calculated the push actions for the events, and checked auth. @@ -2285,7 +2285,7 @@ def _expire_rooms_to_exclude_from_dummy_event_insertion(self) -> None: async def _rebuild_event_after_third_party_rules( self, third_party_result: dict, original_event: EventBase - ) -> Tuple[EventBase, UnpersistedEventContextBase]: + ) -> tuple[EventBase, UnpersistedEventContextBase]: # the third_party_event_rules want to replace the event. # we do some basic checks, and then return the replacement event. diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index 39505463bb..f140912b2a 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -26,11 +26,8 @@ from typing import ( TYPE_CHECKING, Any, - Dict, Generic, - List, Optional, - Type, TypedDict, TypeVar, Union, @@ -113,14 +110,14 @@ class Token(TypedDict): #: A JWK, as per RFC7517 sec 4. The type could be more precise than that, but #: there is no real point of doing this in our case. -JWK = Dict[str, str] +JWK = dict[str, str] C = TypeVar("C") #: A JWK Set, as per RFC7517 sec 5. class JWKS(TypedDict): - keys: List[JWK] + keys: list[JWK] class OidcHandler: @@ -134,7 +131,7 @@ def __init__(self, hs: "HomeServer"): assert provider_confs self._macaroon_generator = hs.get_macaroon_generator() - self._providers: Dict[str, "OidcProvider"] = { + self._providers: dict[str, "OidcProvider"] = { p.idp_id: OidcProvider(hs, self._macaroon_generator, p) for p in provider_confs } @@ -332,7 +329,7 @@ async def handle_backchannel_logout(self, request: SynapseRequest) -> None: # At this point we properly checked both claims types issuer: str = iss - audience: List[str] = aud + audience: list[str] = aud except (TypeError, KeyError): raise SynapseError(400, "Invalid issuer/audience in logout_token") @@ -428,8 +425,10 @@ def __init__( # from the IdP's jwks_uri, if required. self._jwks = RetryOnExceptionCachedCall(self._load_jwks) + # type-ignore: we will not be instantiating a subclass of the provider class, + # so the warning about directly accessing __init__ being unsound does not apply here user_mapping_provider_init_method = ( - provider.user_mapping_provider_class.__init__ + provider.user_mapping_provider_class.__init__ # type: ignore[misc] ) if len(inspect.signature(user_mapping_provider_init_method).parameters) == 3: self._user_mapping_provider = provider.user_mapping_provider_class( @@ -758,7 +757,7 @@ async def _exchange_code(self, code: str, code_verifier: str) -> Token: """ metadata = await self.load_metadata() token_endpoint = metadata.get("token_endpoint") - raw_headers: Dict[str, str] = { + raw_headers: dict[str, str] = { "Content-Type": "application/x-www-form-urlencoded", "User-Agent": self._http_client.user_agent.decode("ascii"), "Accept": "application/json", @@ -902,9 +901,9 @@ async def _fetch_userinfo(self, token: Token) -> UserInfo: async def _verify_jwt( self, - alg_values: List[str], + alg_values: list[str], token: str, - claims_cls: Type[C], + claims_cls: type[C], claims_options: Optional[dict] = None, claims_params: Optional[dict] = None, ) -> C: @@ -1589,7 +1588,7 @@ class UserAttributeDict(TypedDict): confirm_localpart: bool display_name: Optional[str] picture: Optional[str] # may be omitted by older `OidcMappingProviders` - emails: List[str] + emails: list[str] class OidcMappingProvider(Generic[C]): @@ -1678,7 +1677,7 @@ class JinjaOidcMappingConfig: localpart_template: Optional[Template] display_name_template: Optional[Template] email_template: Optional[Template] - extra_attributes: Dict[str, Template] + extra_attributes: dict[str, Template] confirm_localpart: bool = False @@ -1778,7 +1777,7 @@ def render_template_field(template: Optional[Template]) -> Optional[str]: if display_name == "": display_name = None - emails: List[str] = [] + emails: list[str] = [] email = render_template_field(self._config.email_template) if email: emails.append(email) @@ -1794,7 +1793,7 @@ def render_template_field(template: Optional[Template]) -> Optional[str]: ) async def get_extra_attributes(self, userinfo: UserInfo, token: Token) -> JsonDict: - extras: Dict[str, str] = {} + extras: dict[str, str] = {} for key, template in self._config.extra_attributes.items(): try: extras[key] = template.render(user=userinfo).strip() diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 02a67581e7..7274a512b0 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, List, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, Optional, cast from twisted.python.failure import Failure @@ -91,7 +91,7 @@ def __init__(self, hs: "HomeServer"): self.pagination_lock = ReadWriteLock() # IDs of rooms in which there currently an active purge *or delete* operation. - self._purges_in_progress_by_room: Set[str] = set() + self._purges_in_progress_by_room: set[str] = set() self._event_serializer = hs.get_event_client_serializer() self._retention_default_max_lifetime = ( @@ -279,7 +279,7 @@ async def start_purge_history( async def _purge_history( self, task: ScheduledTask, - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: """ Scheduler action to purge some history of a room. """ @@ -343,7 +343,7 @@ async def get_delete_task(self, delete_id: str) -> Optional[ScheduledTask]: async def get_delete_tasks_by_room( self, room_id: str, only_active: Optional[bool] = False - ) -> List[ScheduledTask]: + ) -> list[ScheduledTask]: """Get complete, failed or active delete tasks by room Args: @@ -363,7 +363,7 @@ async def get_delete_tasks_by_room( async def _purge_room( self, task: ScheduledTask, - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: """ Scheduler action to purge a room. """ @@ -523,7 +523,7 @@ async def get_messages( # We use a `Set` because there can be multiple events at a given depth # and we only care about looking at the unique continum of depths to # find gaps. - event_depths: Set[int] = {event.depth for event in events} + event_depths: set[int] = {event.depth for event in events} sorted_event_depths = sorted(event_depths) # Inspect the depths of the returned events to see if there are any gaps @@ -691,7 +691,7 @@ async def get_messages( async def _shutdown_and_purge_room( self, task: ScheduledTask, - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: """ Scheduler action to shutdown and purge a room. """ diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 1610683066..d8150a5857 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -86,14 +86,9 @@ Callable, Collection, ContextManager, - Dict, Generator, Iterable, - List, Optional, - Set, - Tuple, - Type, ) from prometheus_client import Counter @@ -236,7 +231,7 @@ def __init__(self, hs: "HomeServer"): self._federation_queue = PresenceFederationQueue(hs, self) - self.VALID_PRESENCE: Tuple[str, ...] = ( + self.VALID_PRESENCE: tuple[str, ...] = ( PresenceState.ONLINE, PresenceState.UNAVAILABLE, PresenceState.OFFLINE, @@ -276,7 +271,7 @@ async def user_syncing( @abc.abstractmethod def get_currently_syncing_users_for_replication( self, - ) -> Iterable[Tuple[str, Optional[str]]]: + ) -> Iterable[tuple[str, Optional[str]]]: """Get an iterable of syncing users and devices on this worker, to send to the presence handler This is called when a replication connection is established. It should return @@ -293,7 +288,7 @@ async def get_state(self, target_user: UserID) -> UserPresenceState: async def get_states( self, target_user_ids: Iterable[str] - ) -> List[UserPresenceState]: + ) -> list[UserPresenceState]: """Get the presence state for users.""" updates_d = await self.current_state_for_users(target_user_ids) @@ -306,7 +301,7 @@ async def get_states( async def current_state_for_users( self, user_ids: Iterable[str] - ) -> Dict[str, UserPresenceState]: + ) -> dict[str, UserPresenceState]: """Get the current presence state for multiple users. Returns: @@ -417,7 +412,7 @@ def get_federation_queue(self) -> "PresenceFederationQueue": return self._federation_queue async def maybe_send_presence_to_interested_destinations( - self, states: List[UserPresenceState] + self, states: list[UserPresenceState] ) -> None: """If this instance is a federation sender, send the states to all destinations that are interested. Filters out any states for remote @@ -501,7 +496,7 @@ class _NullContextManager(ContextManager[None]): def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: @@ -522,8 +517,8 @@ def __init__(self, hs: "HomeServer"): # The number of ongoing syncs on this process, by (user ID, device ID). # Empty if _presence_enabled is false. - self._user_device_to_num_current_syncs: Dict[ - Tuple[str, Optional[str]], int + self._user_device_to_num_current_syncs: dict[ + tuple[str, Optional[str]], int ] = {} self.notifier = hs.get_notifier() @@ -531,7 +526,7 @@ def __init__(self, hs: "HomeServer"): # (user_id, device_id) -> last_sync_ms. Lists the devices that have stopped # syncing but we haven't notified the presence writer of that yet - self._user_devices_going_offline: Dict[Tuple[str, Optional[str]], int] = {} + self._user_devices_going_offline: dict[tuple[str, Optional[str]], int] = {} self._bump_active_client = ReplicationBumpPresenceActiveTime.make_client(hs) self._set_state_client = ReplicationPresenceSetState.make_client(hs) @@ -645,7 +640,7 @@ def _user_syncing() -> Generator[None, None, None]: return _user_syncing() async def notify_from_replication( - self, states: List[UserPresenceState], stream_id: int + self, states: list[UserPresenceState], stream_id: int ) -> None: parties = await get_interested_parties(self.store, self.presence_router, states) room_ids_to_states, users_to_states = parties @@ -704,7 +699,7 @@ async def process_replication_rows( def get_currently_syncing_users_for_replication( self, - ) -> Iterable[Tuple[str, Optional[str]]]: + ) -> Iterable[tuple[str, Optional[str]]]: return [ user_id_device_id for user_id_device_id, count in self._user_device_to_num_current_syncs.items() @@ -790,8 +785,8 @@ def __init__(self, hs: "HomeServer"): ) # The per-device presence state, maps user to devices to per-device presence state. - self._user_to_device_to_current_state: Dict[ - str, Dict[Optional[str], UserDevicePresenceState] + self._user_to_device_to_current_state: dict[ + str, dict[Optional[str], UserDevicePresenceState] ] = {} now = self.clock.time_msec() @@ -833,7 +828,7 @@ def __init__(self, hs: "HomeServer"): # Set of users who have presence in the `user_to_current_state` that # have not yet been persisted - self.unpersisted_users_changes: Set[str] = set() + self.unpersisted_users_changes: set[str] = set() hs.register_async_shutdown_handler( phase="before", @@ -843,8 +838,8 @@ def __init__(self, hs: "HomeServer"): # Keeps track of the number of *ongoing* syncs on this process. While # this is non zero a user will never go offline. - self._user_device_to_num_current_syncs: Dict[ - Tuple[str, Optional[str]], int + self._user_device_to_num_current_syncs: dict[ + tuple[str, Optional[str]], int ] = {} # Keeps track of the number of *ongoing* syncs on other processes. @@ -857,10 +852,10 @@ def __init__(self, hs: "HomeServer"): # we assume that all the sync requests on that process have stopped. # Stored as a dict from process_id to set of (user_id, device_id), and # a dict of process_id to millisecond timestamp last updated. - self.external_process_to_current_syncs: Dict[ - str, Set[Tuple[str, Optional[str]]] + self.external_process_to_current_syncs: dict[ + str, set[tuple[str, Optional[str]]] ] = {} - self.external_process_last_updated_ms: Dict[str, int] = {} + self.external_process_last_updated_ms: dict[str, int] = {} self.external_sync_linearizer = Linearizer( name="external_sync_linearizer", clock=self.clock @@ -1151,7 +1146,7 @@ async def bump_presence_active_time( # Update the user state, this will always update last_active_ts and # might update the presence state. prev_state = await self.current_state_for_user(user_id) - new_fields: Dict[str, Any] = { + new_fields: dict[str, Any] = { "last_active_ts": now, "state": _combine_device_states(devices.values()), } @@ -1221,7 +1216,7 @@ def _user_syncing() -> Generator[None, None, None]: def get_currently_syncing_users_for_replication( self, - ) -> Iterable[Tuple[str, Optional[str]]]: + ) -> Iterable[tuple[str, Optional[str]]]: # since we are the process handling presence, there is nothing to do here. return [] @@ -1317,7 +1312,7 @@ async def update_external_syncs_clear(self, process_id: str) -> None: ) self.external_process_last_updated_ms.pop(process_id, None) - async def _persist_and_notify(self, states: List[UserPresenceState]) -> None: + async def _persist_and_notify(self, states: list[UserPresenceState]) -> None: """Persist states in the database, poke the notifier and send to interested remote servers """ @@ -1477,7 +1472,7 @@ async def is_visible(self, observed_user: UserID, observer_user: UserID) -> bool async def get_all_presence_updates( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, list]], int, bool]: + ) -> tuple[list[tuple[int, list]], int, bool]: """ Gets a list of presence update rows from between the given stream ids. Each row has: @@ -1562,7 +1557,7 @@ async def _unsafe_process(self) -> None: # We may get multiple deltas for different rooms, but we want to # handle them on a room by room basis, so we batch them up by # room. - deltas_by_room: Dict[str, List[StateDelta]] = {} + deltas_by_room: dict[str, list[StateDelta]] = {} for delta in deltas: deltas_by_room.setdefault(delta.room_id, []).append(delta) @@ -1576,7 +1571,7 @@ async def _unsafe_process(self) -> None: name="presence", **{SERVER_NAME_LABEL: self.server_name} ).set(max_pos) - async def _handle_state_delta(self, room_id: str, deltas: List[StateDelta]) -> None: + async def _handle_state_delta(self, room_id: str, deltas: list[StateDelta]) -> None: """Process current state deltas for the room to find new joins that need to be handled. """ @@ -1849,7 +1844,7 @@ async def get_new_events( explicit_room_id: Optional[str] = None, include_offline: bool = True, service: Optional[ApplicationService] = None, - ) -> Tuple[List[UserPresenceState], int]: + ) -> tuple[list[UserPresenceState], int]: # The process for getting presence events are: # 1. Get the rooms the user is in. # 2. Get the list of user in the rooms. @@ -2001,7 +1996,7 @@ async def _filter_all_presence_updates_for_user( user_id: str, include_offline: bool, from_key: Optional[int] = None, - ) -> List[UserPresenceState]: + ) -> list[UserPresenceState]: """ Computes the presence updates a user should receive. @@ -2058,7 +2053,7 @@ async def _filter_all_presence_updates_for_user( def _filter_offline_presence_state( self, presence_updates: Iterable[UserPresenceState] - ) -> List[UserPresenceState]: + ) -> list[UserPresenceState]: """Given an iterable containing user presence updates, return a list with any offline presence states removed. @@ -2079,12 +2074,12 @@ def get_current_key(self) -> int: def handle_timeouts( - user_states: List[UserPresenceState], + user_states: list[UserPresenceState], is_mine_fn: Callable[[str], bool], - syncing_user_devices: AbstractSet[Tuple[str, Optional[str]]], - user_to_devices: Dict[str, Dict[Optional[str], UserDevicePresenceState]], + syncing_user_devices: AbstractSet[tuple[str, Optional[str]]], + user_to_devices: dict[str, dict[Optional[str], UserDevicePresenceState]], now: int, -) -> List[UserPresenceState]: +) -> list[UserPresenceState]: """Checks the presence of users that have timed out and updates as appropriate. @@ -2120,8 +2115,8 @@ def handle_timeouts( def handle_timeout( state: UserPresenceState, is_mine: bool, - syncing_device_ids: AbstractSet[Tuple[str, Optional[str]]], - user_devices: Dict[Optional[str], UserDevicePresenceState], + syncing_device_ids: AbstractSet[tuple[str, Optional[str]]], + user_devices: dict[Optional[str], UserDevicePresenceState], now: int, ) -> Optional[UserPresenceState]: """Checks the presence of the user to see if any of the timers have elapsed @@ -2218,7 +2213,7 @@ def handle_update( wheel_timer: WheelTimer, now: int, persist: bool, -) -> Tuple[UserPresenceState, bool, bool]: +) -> tuple[UserPresenceState, bool, bool]: """Given a presence update: 1. Add any appropriate timers. 2. Check if we should notify anyone. @@ -2344,8 +2339,8 @@ def _combine_device_states( async def get_interested_parties( - store: DataStore, presence_router: PresenceRouter, states: List[UserPresenceState] -) -> Tuple[Dict[str, List[UserPresenceState]], Dict[str, List[UserPresenceState]]]: + store: DataStore, presence_router: PresenceRouter, states: list[UserPresenceState] +) -> tuple[dict[str, list[UserPresenceState]], dict[str, list[UserPresenceState]]]: """Given a list of states return which entities (rooms, users) are interested in the given states. @@ -2358,8 +2353,8 @@ async def get_interested_parties( A 2-tuple of `(room_ids_to_states, users_to_states)`, with each item being a dict of `entity_name` -> `[UserPresenceState]` """ - room_ids_to_states: Dict[str, List[UserPresenceState]] = {} - users_to_states: Dict[str, List[UserPresenceState]] = {} + room_ids_to_states: dict[str, list[UserPresenceState]] = {} + users_to_states: dict[str, list[UserPresenceState]] = {} for state in states: room_ids = await store.get_rooms_for_user(state.user_id) for room_id in room_ids: @@ -2382,8 +2377,8 @@ async def get_interested_parties( async def get_interested_remotes( store: DataStore, presence_router: PresenceRouter, - states: List[UserPresenceState], -) -> List[Tuple[StrCollection, Collection[UserPresenceState]]]: + states: list[UserPresenceState], +) -> list[tuple[StrCollection, Collection[UserPresenceState]]]: """Given a list of presence states figure out which remote servers should be sent which. @@ -2397,14 +2392,14 @@ async def get_interested_remotes( Returns: A map from destinations to presence states to send to that destination. """ - hosts_and_states: List[Tuple[StrCollection, Collection[UserPresenceState]]] = [] + hosts_and_states: list[tuple[StrCollection, Collection[UserPresenceState]]] = [] # First we look up the rooms each user is in (as well as any explicit # subscriptions), then for each distinct room we look up the remote # hosts in those rooms. for state in states: room_ids = await store.get_rooms_for_user(state.user_id) - hosts: Set[str] = set() + hosts: set[str] = set() for room_id in room_ids: room_hosts = await store.get_current_hosts_in_room(room_id) hosts.update(room_hosts) @@ -2473,12 +2468,12 @@ def __init__(self, hs: "HomeServer", presence_handler: BasePresenceHandler): # stream_id, destinations, user_ids)`. We don't store the full states # for efficiency, and remote workers will already have the full states # cached. - self._queue: List[Tuple[int, int, StrCollection, Set[str]]] = [] + self._queue: list[tuple[int, int, StrCollection, set[str]]] = [] self._next_id = 1 # Map from instance name to current token - self._current_tokens: Dict[str, int] = {} + self._current_tokens: dict[str, int] = {} if self._queue_presence_updates: self._clock.looping_call(self._clear_queue, self._CLEAR_ITEMS_EVERY_MS) @@ -2547,7 +2542,7 @@ async def get_replication_rows( from_token: int, upto_token: int, target_row_count: int, - ) -> Tuple[List[Tuple[int, Tuple[str, str]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str, str]]], int, bool]: """Get all the updates between the two tokens. We return rows in the form of `(destination, user_id)` to keep the size @@ -2583,7 +2578,7 @@ async def get_replication_rows( # handle the case where `from_token` stream ID has already been dropped. start_idx = max(from_token + 1 - self._next_id, -len(self._queue)) - to_send: List[Tuple[int, Tuple[str, str]]] = [] + to_send: list[tuple[int, tuple[str, str]]] = [] limited = False new_id = upto_token for _, stream_id, destinations, user_ids in self._queue[start_idx:]: @@ -2631,7 +2626,7 @@ async def process_replication_rows( if not self._federation: return - hosts_to_users: Dict[str, Set[str]] = {} + hosts_to_users: dict[str, set[str]] = {} for row in rows: hosts_to_users.setdefault(row.destination, set()).add(row.user_id) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 9dda89d85b..240a235a0e 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -20,7 +20,7 @@ # import logging import random -from typing import TYPE_CHECKING, List, Optional, Union +from typing import TYPE_CHECKING, Optional, Union from synapse.api.constants import ProfileFields from synapse.api.errors import ( @@ -69,7 +69,7 @@ def __init__(self, hs: "HomeServer"): self.request_ratelimiter = hs.get_request_ratelimiter() self.max_avatar_size: Optional[int] = hs.config.server.max_avatar_size - self.allowed_avatar_mimetypes: Optional[List[str]] = ( + self.allowed_avatar_mimetypes: Optional[list[str]] = ( hs.config.server.allowed_avatar_mimetypes ) diff --git a/synapse/handlers/push_rules.py b/synapse/handlers/push_rules.py index 4ef6a04c51..643fa72f3f 100644 --- a/synapse/handlers/push_rules.py +++ b/synapse/handlers/push_rules.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any, Optional, Union import attr @@ -127,7 +127,7 @@ def notify_user(self, user_id: str) -> None: async def push_rules_for_user( self, user: UserID - ) -> Dict[str, Dict[str, List[Dict[str, Any]]]]: + ) -> dict[str, dict[str, list[dict[str, Any]]]]: """ Push rules aren't really account data, but get formatted as such for /sync. """ @@ -137,7 +137,7 @@ async def push_rules_for_user( return rules -def check_actions(actions: List[Union[str, JsonDict]]) -> None: +def check_actions(actions: list[Union[str, JsonDict]]) -> None: """Check if the given actions are spec compliant. Args: diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py index c776654d12..ad41113b5b 100644 --- a/synapse/handlers/receipts.py +++ b/synapse/handlers/receipts.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, Iterable, List, Optional, Sequence, Tuple +from typing import TYPE_CHECKING, Iterable, Optional, Sequence from synapse.api.constants import EduTypes, ReceiptTypes from synapse.appservice import ApplicationService @@ -136,10 +136,10 @@ async def _received_remote_receipt(self, origin: str, content: JsonDict) -> None await self._handle_new_receipts(receipts) - async def _handle_new_receipts(self, receipts: List[ReadReceipt]) -> bool: + async def _handle_new_receipts(self, receipts: list[ReadReceipt]) -> bool: """Takes a list of receipts, stores them and informs the notifier.""" - receipts_persisted: List[ReadReceipt] = [] + receipts_persisted: list[ReadReceipt] = [] for receipt in receipts: stream_id = await self.store.insert_receipt( receipt.room_id, @@ -216,7 +216,7 @@ def __init__(self, hs: "HomeServer"): @staticmethod def filter_out_private_receipts( rooms: Sequence[JsonMapping], user_id: str - ) -> List[JsonMapping]: + ) -> list[JsonMapping]: """ Filters a list of serialized receipts (as returned by /sync and /initialSync) and removes private read receipts of other users. @@ -233,7 +233,7 @@ def filter_out_private_receipts( The same as rooms, but filtered. """ - result: List[JsonMapping] = [] + result: list[JsonMapping] = [] # Iterate through each room's receipt content. for room in rooms: @@ -287,7 +287,7 @@ async def get_new_events( is_guest: bool, explicit_room_id: Optional[str] = None, to_key: Optional[MultiWriterStreamToken] = None, - ) -> Tuple[List[JsonMapping], MultiWriterStreamToken]: + ) -> tuple[list[JsonMapping], MultiWriterStreamToken]: """ Find read receipts for given rooms (> `from_token` and <= `to_token`) """ @@ -313,7 +313,7 @@ async def get_new_events_as( from_key: MultiWriterStreamToken, to_key: MultiWriterStreamToken, service: ApplicationService, - ) -> Tuple[List[JsonMapping], MultiWriterStreamToken]: + ) -> tuple[list[JsonMapping], MultiWriterStreamToken]: """Returns a set of new read receipt events that an appservice may be interested in. diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index c3ff0cfaf8..8b620a91bc 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -26,9 +26,7 @@ from typing import ( TYPE_CHECKING, Iterable, - List, Optional, - Tuple, TypedDict, ) @@ -241,7 +239,7 @@ async def register_user( address: Optional[str] = None, bind_emails: Optional[Iterable[str]] = None, by_admin: bool = False, - user_agent_ips: Optional[List[Tuple[str, str]]] = None, + user_agent_ips: Optional[list[tuple[str, str]]] = None, auth_provider_id: Optional[str] = None, approved: bool = False, ) -> str: @@ -655,7 +653,7 @@ async def post_consent_actions(self, user_id: str) -> None: async def appservice_register( self, user_localpart: str, as_token: str - ) -> Tuple[str, ApplicationService]: + ) -> tuple[str, ApplicationService]: user = UserID(user_localpart, self.hs.hostname) user_id = user.to_string() service = self.store.get_app_service_by_token(as_token) @@ -780,7 +778,7 @@ async def register_device( auth_provider_id: Optional[str] = None, should_issue_refresh_token: bool = False, auth_provider_session_id: Optional[str] = None, - ) -> Tuple[str, str, Optional[int], Optional[str]]: + ) -> tuple[str, str, Optional[int], Optional[str]]: """Register a device for a user and generate an access token. The access token will be limited by the homeserver's session_lifetime config. diff --git a/synapse/handlers/relations.py b/synapse/handlers/relations.py index b1158ee77d..217681f7c0 100644 --- a/synapse/handlers/relations.py +++ b/synapse/handlers/relations.py @@ -23,10 +23,7 @@ from typing import ( TYPE_CHECKING, Collection, - Dict, - FrozenSet, Iterable, - List, Mapping, Optional, Sequence, @@ -212,7 +209,7 @@ async def redact_events_related_to( requester: Requester, event_id: str, initial_redaction_event: EventBase, - relation_types: List[str], + relation_types: list[str], ) -> None: """Redacts all events related to the given event ID with one of the given relation types. @@ -267,7 +264,7 @@ async def redact_events_related_to( ) async def get_references_for_events( - self, event_ids: Collection[str], ignored_users: FrozenSet[str] = frozenset() + self, event_ids: Collection[str], ignored_users: frozenset[str] = frozenset() ) -> Mapping[str, Sequence[_RelatedEvent]]: """Get a list of references to the given events. @@ -308,11 +305,11 @@ async def get_references_for_events( async def _get_threads_for_events( self, - events_by_id: Dict[str, EventBase], - relations_by_id: Dict[str, str], + events_by_id: dict[str, EventBase], + relations_by_id: dict[str, str], user_id: str, - ignored_users: FrozenSet[str], - ) -> Dict[str, _ThreadAggregation]: + ignored_users: frozenset[str], + ) -> dict[str, _ThreadAggregation]: """Get the bundled aggregations for threads for the requested events. Args: @@ -437,7 +434,7 @@ async def _get_threads_for_events( @trace async def get_bundled_aggregations( self, events: Iterable[EventBase], user_id: str - ) -> Dict[str, BundledAggregations]: + ) -> dict[str, BundledAggregations]: """Generate bundled aggregations for events. Args: @@ -456,7 +453,7 @@ async def get_bundled_aggregations( # De-duplicated events by ID to handle the same event requested multiple times. events_by_id = {} # A map of event ID to the relation in that event, if there is one. - relations_by_id: Dict[str, str] = {} + relations_by_id: dict[str, str] = {} for event in events: # State events do not get bundled aggregations. if event.is_state(): @@ -479,7 +476,7 @@ async def get_bundled_aggregations( events_by_id[event.event_id] = event # event ID -> bundled aggregation in non-serialized form. - results: Dict[str, BundledAggregations] = {} + results: dict[str, BundledAggregations] = {} # Fetch any ignored users of the requesting user. ignored_users = await self._main_store.ignored_users(user_id) diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index db6dc5efd0..f242accef1 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -33,10 +33,7 @@ Any, Awaitable, Callable, - Dict, - List, Optional, - Tuple, cast, ) @@ -112,11 +109,11 @@ @attr.s(slots=True, frozen=True, auto_attribs=True) class EventContext: - events_before: List[EventBase] + events_before: list[EventBase] event: EventBase - events_after: List[EventBase] - state: List[EventBase] - aggregations: Dict[str, BundledAggregations] + events_after: list[EventBase] + state: list[EventBase] + aggregations: dict[str, BundledAggregations] start: str end: str @@ -143,7 +140,7 @@ def __init__(self, hs: "HomeServer"): ) # Room state based off defined presets - self._presets_dict: Dict[str, Dict[str, Any]] = { + self._presets_dict: dict[str, dict[str, Any]] = { RoomCreationPreset.PRIVATE_CHAT: { "join_rules": JoinRules.INVITE, "history_visibility": HistoryVisibility.SHARED, @@ -184,7 +181,7 @@ def __init__(self, hs: "HomeServer"): # If a user tries to update the same room multiple times in quick # succession, only process the first attempt and return its result to # subsequent requests - self._upgrade_response_cache: ResponseCache[Tuple[str, str]] = ResponseCache( + self._upgrade_response_cache: ResponseCache[tuple[str, str]] = ResponseCache( clock=hs.get_clock(), name="room_upgrade", server_name=self.server_name, @@ -201,7 +198,7 @@ async def upgrade_room( requester: Requester, old_room_id: str, new_version: RoomVersion, - additional_creators: Optional[List[str]], + additional_creators: Optional[list[str]], auto_member: bool = False, ratelimit: bool = True, ) -> str: @@ -339,14 +336,14 @@ async def _upgrade_room( self, requester: Requester, old_room_id: str, - old_room: Tuple[bool, str, bool], + old_room: tuple[bool, str, bool], new_room_id: str, new_version: RoomVersion, tombstone_event: EventBase, tombstone_context: synapse.events.snapshot.EventContext, - additional_creators: Optional[List[str]], + additional_creators: Optional[list[str]], creation_event_with_context: Optional[ - Tuple[EventBase, synapse.events.snapshot.EventContext] + tuple[EventBase, synapse.events.snapshot.EventContext] ] = None, auto_member: bool = False, ) -> str: @@ -437,7 +434,7 @@ async def _update_upgraded_room_pls( old_room_id: str, new_room_id: str, old_room_state: StateMap[str], - additional_creators: Optional[List[str]], + additional_creators: Optional[list[str]], ) -> None: """Send updated power levels in both rooms after an upgrade @@ -529,7 +526,7 @@ def _calculate_upgraded_room_creation_content( old_room_create_event: EventBase, tombstone_event_id: Optional[str], new_room_version: RoomVersion, - additional_creators: Optional[List[str]], + additional_creators: Optional[list[str]], ) -> JsonDict: creation_content: JsonDict = { "room_version": new_room_version.identifier, @@ -561,9 +558,9 @@ async def clone_existing_room( new_room_id: str, new_room_version: RoomVersion, tombstone_event_id: str, - additional_creators: Optional[List[str]], + additional_creators: Optional[list[str]], creation_event_with_context: Optional[ - Tuple[EventBase, synapse.events.snapshot.EventContext] + tuple[EventBase, synapse.events.snapshot.EventContext] ] = None, auto_member: bool = False, ) -> None: @@ -600,7 +597,7 @@ async def clone_existing_room( initial_state: MutableStateMap = {} # Replicate relevant room events - types_to_copy: List[Tuple[str, Optional[str]]] = [ + types_to_copy: list[tuple[str, Optional[str]]] = [ (EventTypes.JoinRules, ""), (EventTypes.Name, ""), (EventTypes.Topic, ""), @@ -1044,7 +1041,7 @@ async def create_room( ratelimit: bool = True, creator_join_profile: Optional[JsonDict] = None, ignore_forced_encryption: bool = False, - ) -> Tuple[str, Optional[RoomAlias], int]: + ) -> tuple[str, Optional[RoomAlias], int]: """Creates a new room. Args: @@ -1394,7 +1391,7 @@ async def _generate_create_event_for_room_id( creation_content: JsonDict, is_public: bool, room_version: RoomVersion, - ) -> Tuple[EventBase, synapse.events.snapshot.EventContext]: + ) -> tuple[EventBase, synapse.events.snapshot.EventContext]: ( creation_event, new_unpersisted_context, @@ -1426,7 +1423,7 @@ async def _send_events_for_new_room( room_id: str, room_version: RoomVersion, room_config: JsonDict, - invite_list: List[str], + invite_list: list[str], initial_state: MutableStateMap, creation_content: JsonDict, room_alias: Optional[RoomAlias] = None, @@ -1434,9 +1431,9 @@ async def _send_events_for_new_room( creator_join_profile: Optional[JsonDict] = None, ignore_forced_encryption: bool = False, creation_event_with_context: Optional[ - Tuple[EventBase, synapse.events.snapshot.EventContext] + tuple[EventBase, synapse.events.snapshot.EventContext] ] = None, - ) -> Tuple[int, str, int]: + ) -> tuple[int, str, int]: """Sends the initial events into a new room. Sends the room creation, membership, and power level events into the room sequentially, then creates and batches up the rest of the events to persist as a batch to the DB. @@ -1485,7 +1482,7 @@ async def _send_events_for_new_room( depth = 1 # the most recently created event - prev_event: List[str] = [] + prev_event: list[str] = [] # a map of event types, state keys -> event_ids. We collect these mappings this as events are # created (but not persisted to the db) to determine state for future created events # (as this info can't be pulled from the db) @@ -1496,7 +1493,7 @@ async def create_event( content: JsonDict, for_batch: bool, **kwargs: Any, - ) -> Tuple[EventBase, synapse.events.snapshot.UnpersistedEventContextBase]: + ) -> tuple[EventBase, synapse.events.snapshot.UnpersistedEventContextBase]: """ Creates an event and associated event context. Args: @@ -1792,7 +1789,7 @@ def _validate_room_config( f"You cannot create an encrypted room. user_level ({room_admin_level}) < send_level ({encryption_level})", ) - def _room_preset_config(self, room_config: JsonDict) -> Tuple[str, dict]: + def _room_preset_config(self, room_config: JsonDict) -> tuple[str, dict]: # The spec says rooms should default to private visibility if # `visibility` is not specified. visibility = room_config.get("visibility", "private") @@ -1814,9 +1811,9 @@ def _room_preset_config(self, room_config: JsonDict) -> Tuple[str, dict]: def _remove_creators_from_pl_users_map( self, - users_map: Dict[str, int], + users_map: dict[str, int], creator: str, - additional_creators: Optional[List[str]], + additional_creators: Optional[list[str]], ) -> None: creators = [creator] if additional_creators: @@ -1916,7 +1913,7 @@ async def get_event_context( # The user is peeking if they aren't in the room already is_peeking = not is_user_in_room - async def filter_evts(events: List[EventBase]) -> List[EventBase]: + async def filter_evts(events: list[EventBase]) -> list[EventBase]: if use_admin_priviledge: return events return await filter_events_for_client( @@ -2021,7 +2018,7 @@ async def get_event_for_timestamp( room_id: str, timestamp: int, direction: Direction, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Find the closest event to the given timestamp in the given direction. If we can't find an event locally or the event we have locally is next to a gap, it will ask other federated homeservers for an event. @@ -2172,7 +2169,7 @@ async def get_new_events( room_ids: StrCollection, is_guest: bool, explicit_room_id: Optional[str] = None, - ) -> Tuple[List[EventBase], RoomStreamToken]: + ) -> tuple[list[EventBase], RoomStreamToken]: # We just ignore the key for now. to_key = self.get_current_key() diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 9d4307fb07..97a5d07c7c 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Any, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Optional import attr import msgpack @@ -67,14 +67,14 @@ def __init__(self, hs: "HomeServer"): self.hs = hs self.enable_room_list_search = hs.config.roomdirectory.enable_room_list_search self.response_cache: ResponseCache[ - Tuple[Optional[int], Optional[str], Optional[ThirdPartyInstanceID]] + tuple[Optional[int], Optional[str], Optional[ThirdPartyInstanceID]] ] = ResponseCache( clock=hs.get_clock(), name="room_list", server_name=self.server_name, ) self.remote_response_cache: ResponseCache[ - Tuple[str, Optional[int], Optional[str], bool, Optional[str]] + tuple[str, Optional[int], Optional[str], bool, Optional[str]] ] = ResponseCache( clock=hs.get_clock(), name="remote_room_list", @@ -175,7 +175,7 @@ async def _get_public_room_list( if since_token: batch_token = RoomListNextBatch.from_token(since_token) - bounds: Optional[Tuple[int, str]] = ( + bounds: Optional[tuple[int, str]] = ( batch_token.last_joined_members, batch_token.last_room_id, ) @@ -226,7 +226,7 @@ def build_room_entry(room: LargestRoomStats) -> JsonDict: return {k: v for k, v in entry.items() if v is not None} # Build a list of up to `limit` entries. - room_entries: List[JsonDict] = [] + room_entries: list[JsonDict] = [] rooms_iterator = results if forwards else reversed(results) # Track the first and last 'considered' rooms so that we can provide correct diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 2ab9b70f8c..03cfc99260 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -23,7 +23,7 @@ import logging import random from http import HTTPStatus -from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Iterable, Optional from synapse import types from synapse.api.constants import ( @@ -217,11 +217,11 @@ def _on_user_joined_room(self, event_id: str, room_id: str) -> None: async def _remote_join( self, requester: Requester, - remote_room_hosts: List[str], + remote_room_hosts: list[str], room_id: str, user: UserID, content: dict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Try and join a room that this server is not in Args: @@ -241,11 +241,11 @@ async def _remote_join( async def remote_knock( self, requester: Requester, - remote_room_hosts: List[str], + remote_room_hosts: list[str], room_id: str, user: UserID, content: dict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Try and knock on a room that this server is not in Args: @@ -263,7 +263,7 @@ async def remote_reject_invite( txn_id: Optional[str], requester: Requester, content: JsonDict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """ Rejects an out-of-band invite we have received from a remote server @@ -286,7 +286,7 @@ async def remote_rescind_knock( txn_id: Optional[str], requester: Requester, content: JsonDict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Rescind a local knock made on a remote room. Args: @@ -396,8 +396,8 @@ async def _local_membership_update( target: UserID, room_id: str, membership: str, - prev_event_ids: Optional[List[str]] = None, - state_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, + state_event_ids: Optional[list[str]] = None, depth: Optional[int] = None, txn_id: Optional[str] = None, ratelimit: bool = True, @@ -405,7 +405,7 @@ async def _local_membership_update( require_consent: bool = True, outlier: bool = False, origin_server_ts: Optional[int] = None, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """ Internal membership update function to get an existing event or create and persist a new event for the new membership change. @@ -573,18 +573,18 @@ async def update_membership( room_id: str, action: str, txn_id: Optional[str] = None, - remote_room_hosts: Optional[List[str]] = None, + remote_room_hosts: Optional[list[str]] = None, third_party_signed: Optional[dict] = None, ratelimit: bool = True, content: Optional[dict] = None, new_room: bool = False, require_consent: bool = True, outlier: bool = False, - prev_event_ids: Optional[List[str]] = None, - state_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, + state_event_ids: Optional[list[str]] = None, depth: Optional[int] = None, origin_server_ts: Optional[int] = None, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Update a user's membership in a room. Params: @@ -687,18 +687,18 @@ async def update_membership_locked( room_id: str, action: str, txn_id: Optional[str] = None, - remote_room_hosts: Optional[List[str]] = None, + remote_room_hosts: Optional[list[str]] = None, third_party_signed: Optional[dict] = None, ratelimit: bool = True, content: Optional[dict] = None, new_room: bool = False, require_consent: bool = True, outlier: bool = False, - prev_event_ids: Optional[List[str]] = None, - state_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, + state_event_ids: Optional[list[str]] = None, depth: Optional[int] = None, origin_server_ts: Optional[int] = None, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Helper for update_membership. Assumes that the membership linearizer is already held for the room. @@ -1224,12 +1224,12 @@ async def _should_perform_remote_join( self, user_id: str, room_id: str, - remote_room_hosts: List[str], + remote_room_hosts: list[str], content: JsonDict, is_partial_state_room: bool, is_host_in_room: bool, partial_state_before_join: StateMap[str], - ) -> Tuple[bool, List[str]]: + ) -> tuple[bool, list[str]]: """ Check whether the server should do a remote join (as opposed to a local join) for a user. @@ -1565,7 +1565,7 @@ async def kick_guest_users(self, current_state: Iterable[EventBase]) -> None: async def lookup_room_alias( self, room_alias: RoomAlias - ) -> Tuple[RoomID, List[str]]: + ) -> tuple[RoomID, list[str]]: """ Get the room ID associated with a room alias. @@ -1612,9 +1612,9 @@ async def do_3pid_invite( requester: Requester, txn_id: Optional[str], id_access_token: str, - prev_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, depth: Optional[int] = None, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Invite a 3PID to a room. Args: @@ -1726,9 +1726,9 @@ async def _make_and_store_3pid_invite( user: UserID, txn_id: Optional[str], id_access_token: str, - prev_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, depth: Optional[int] = None, - ) -> Tuple[EventBase, int]: + ) -> tuple[EventBase, int]: room_state = await self._storage_controllers.state.get_current_state( room_id, StateFilter.from_types( @@ -1863,7 +1863,7 @@ def __init__(self, hs: "HomeServer"): self.distributor.declare("user_left_room") async def _is_remote_room_too_complex( - self, room_id: str, remote_room_hosts: List[str] + self, room_id: str, remote_room_hosts: list[str] ) -> Optional[bool]: """ Check if complexity of a remote room is too great. @@ -1899,11 +1899,11 @@ async def _is_local_room_too_complex(self, room_id: str) -> bool: async def _remote_join( self, requester: Requester, - remote_room_hosts: List[str], + remote_room_hosts: list[str], room_id: str, user: UserID, content: dict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Implements RoomMemberHandler._remote_join""" # filter ourselves out of remote_room_hosts: do_invite_join ignores it # and if it is the only entry we'd like to return a 404 rather than a @@ -1980,7 +1980,7 @@ async def remote_reject_invite( txn_id: Optional[str], requester: Requester, content: JsonDict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """ Rejects an out-of-band invite received from a remote user @@ -2017,7 +2017,7 @@ async def remote_rescind_knock( txn_id: Optional[str], requester: Requester, content: JsonDict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """ Rescinds a local knock made on a remote room @@ -2046,7 +2046,7 @@ async def _generate_local_out_of_band_leave( txn_id: Optional[str], requester: Requester, content: JsonDict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Generate a local leave event for a room This can be called after we e.g fail to reject an invite via a remote server. @@ -2126,11 +2126,11 @@ async def _generate_local_out_of_band_leave( async def remote_knock( self, requester: Requester, - remote_room_hosts: List[str], + remote_room_hosts: list[str], room_id: str, user: UserID, content: dict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Sends a knock to a room. Attempts to do so via one remote out of a given list. Args: @@ -2270,7 +2270,7 @@ async def _unsafe_process(self) -> None: await self._store.update_room_forgetter_stream_pos(max_pos) - async def _handle_deltas(self, deltas: List[StateDelta]) -> None: + async def _handle_deltas(self, deltas: list[StateDelta]) -> None: """Called with the state deltas to process""" for delta in deltas: if delta.event_type != EventTypes.Member: @@ -2300,7 +2300,7 @@ async def _handle_deltas(self, deltas: List[StateDelta]) -> None: raise -def get_users_which_can_issue_invite(auth_events: StateMap[EventBase]) -> List[str]: +def get_users_which_can_issue_invite(auth_events: StateMap[EventBase]) -> list[str]: """ Return the list of users which can issue invites. @@ -2346,7 +2346,7 @@ def get_users_which_can_issue_invite(auth_events: StateMap[EventBase]) -> List[s return result -def get_servers_from_users(users: List[str]) -> Set[str]: +def get_servers_from_users(users: list[str]) -> set[str]: """ Resolve a list of users into their servers. diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py index 0616a9864d..0927c031f7 100644 --- a/synapse/handlers/room_member_worker.py +++ b/synapse/handlers/room_member_worker.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse.handlers.room_member import NoKnownServersError, RoomMemberHandler from synapse.replication.http.membership import ( @@ -51,11 +51,11 @@ def __init__(self, hs: "HomeServer"): async def _remote_join( self, requester: Requester, - remote_room_hosts: List[str], + remote_room_hosts: list[str], room_id: str, user: UserID, content: dict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Implements RoomMemberHandler._remote_join""" if len(remote_room_hosts) == 0: raise NoKnownServersError() @@ -76,7 +76,7 @@ async def remote_reject_invite( txn_id: Optional[str], requester: Requester, content: dict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """ Rejects an out-of-band invite received from a remote user @@ -96,7 +96,7 @@ async def remote_rescind_knock( txn_id: Optional[str], requester: Requester, content: JsonDict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """ Rescinds a local knock made on a remote room @@ -121,11 +121,11 @@ async def remote_rescind_knock( async def remote_knock( self, requester: Requester, - remote_room_hosts: List[str], + remote_room_hosts: list[str], room_id: str, user: UserID, content: dict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Sends a knock to a room. Implements RoomMemberHandler.remote_knock diff --git a/synapse/handlers/room_summary.py b/synapse/handlers/room_summary.py index 838fee6a30..a948202056 100644 --- a/synapse/handlers/room_summary.py +++ b/synapse/handlers/room_summary.py @@ -22,7 +22,7 @@ import itertools import logging import re -from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Sequence, Set, Tuple +from typing import TYPE_CHECKING, Iterable, Optional, Sequence import attr @@ -83,9 +83,9 @@ class _PaginationSession: # The time the pagination session was created, in milliseconds. creation_time_ms: int # The queue of rooms which are still to process. - room_queue: List["_RoomQueueEntry"] + room_queue: list["_RoomQueueEntry"] # A set of rooms which have been processed. - processed_rooms: Set[str] + processed_rooms: set[str] class RoomSummaryHandler: @@ -112,14 +112,14 @@ def __init__(self, hs: "HomeServer"): # If a user tries to fetch the same page multiple times in quick succession, # only process the first attempt and return its result to subsequent requests. self._pagination_response_cache: ResponseCache[ - Tuple[ + tuple[ str, str, bool, Optional[int], Optional[int], Optional[str], - Optional[Tuple[str, ...]], + Optional[tuple[str, ...]], ] ] = ResponseCache( clock=hs.get_clock(), @@ -136,7 +136,7 @@ async def get_room_hierarchy( max_depth: Optional[int] = None, limit: Optional[int] = None, from_token: Optional[str] = None, - remote_room_hosts: Optional[Tuple[str, ...]] = None, + remote_room_hosts: Optional[tuple[str, ...]] = None, ) -> JsonDict: """ Implementation of the room hierarchy C-S API. @@ -196,7 +196,7 @@ async def _get_room_hierarchy( max_depth: Optional[int] = None, limit: Optional[int] = None, from_token: Optional[str] = None, - remote_room_hosts: Optional[Tuple[str, ...]] = None, + remote_room_hosts: Optional[tuple[str, ...]] = None, ) -> JsonDict: """See docstring for SpaceSummaryHandler.get_room_hierarchy.""" @@ -262,7 +262,7 @@ async def _get_room_hierarchy( # Rooms we have already processed. processed_rooms = set() - rooms_result: List[JsonDict] = [] + rooms_result: list[JsonDict] = [] # Cap the limit to a server-side maximum. if limit is None: @@ -286,12 +286,12 @@ async def _get_room_hierarchy( # federation. The rationale for caching these and *maybe* using them # is to prefer any information local to the homeserver before trusting # data received over federation. - children_room_entries: Dict[str, JsonDict] = {} + children_room_entries: dict[str, JsonDict] = {} # A set of room IDs which are children that did not have information # returned over federation and are known to be inaccessible to the # current server. We should not reach out over federation to try to # summarise these rooms. - inaccessible_children: Set[str] = set() + inaccessible_children: set[str] = set() # If the room is known locally, summarise it! is_in_room = await self._store.is_host_joined(room_id, self._server_name) @@ -418,8 +418,8 @@ async def get_federation_hierarchy( # Room is inaccessible to the requesting server. raise SynapseError(404, "Unknown room: %s" % (requested_room_id,)) - children_rooms_result: List[JsonDict] = [] - inaccessible_children: List[str] = [] + children_rooms_result: list[JsonDict] = [] + inaccessible_children: list[str] = [] # Iterate through each child and potentially add it, but not its children, # to the response. @@ -496,7 +496,7 @@ async def _summarize_local_room( # we only care about suggested children child_events = filter(_is_suggested_child_event, child_events) - stripped_events: List[JsonDict] = [ + stripped_events: list[JsonDict] = [ { "type": e.type, "state_key": e.state_key, @@ -510,7 +510,7 @@ async def _summarize_local_room( async def _summarize_remote_room_hierarchy( self, room: "_RoomQueueEntry", suggested_only: bool - ) -> Tuple[Optional["_RoomEntry"], Dict[str, JsonDict], Set[str]]: + ) -> tuple[Optional["_RoomEntry"], dict[str, JsonDict], set[str]]: """ Request room entries and a list of event entries for a given room by querying a remote server. @@ -835,7 +835,7 @@ async def get_room_summary( self, requester: Optional[str], room_id: str, - remote_room_hosts: Optional[List[str]] = None, + remote_room_hosts: Optional[list[str]] = None, ) -> JsonDict: """ Implementation of the room summary C-S API from MSC3266 @@ -995,7 +995,7 @@ def _is_suggested_child_event(edge_event: EventBase) -> bool: def _child_events_comparison_key( child: EventBase, -) -> Tuple[bool, Optional[str], int, str]: +) -> tuple[bool, Optional[str], int, str]: """ Generate a value for comparing two child events for ordering. diff --git a/synapse/handlers/saml.py b/synapse/handlers/saml.py index 81bec7499c..218fbcaaa7 100644 --- a/synapse/handlers/saml.py +++ b/synapse/handlers/saml.py @@ -20,7 +20,7 @@ # import logging import re -from typing import TYPE_CHECKING, Callable, Dict, Optional, Set, Tuple +from typing import TYPE_CHECKING, Callable, Optional import attr import saml2 @@ -90,7 +90,7 @@ def __init__(self, hs: "HomeServer"): self.idp_brand = hs.config.saml2.idp_brand # a map from saml session id to Saml2SessionData object - self._outstanding_requests_dict: Dict[str, Saml2SessionData] = {} + self._outstanding_requests_dict: dict[str, Saml2SessionData] = {} self._sso_handler = hs.get_sso_handler() self._sso_handler.register_identity_provider(self) @@ -393,7 +393,7 @@ def dot_replace_for_mxid(username: str) -> str: return username -MXID_MAPPER_MAP: Dict[str, Callable[[str], str]] = { +MXID_MAPPER_MAP: dict[str, Callable[[str], str]] = { "hexencode": map_username_to_mxid_localpart, "dotreplace": dot_replace_for_mxid, } @@ -509,7 +509,7 @@ def parse_config(config: dict) -> SamlConfig: return SamlConfig(mxid_source_attribute, mxid_mapper) @staticmethod - def get_saml_attributes(config: SamlConfig) -> Tuple[Set[str], Set[str]]: + def get_saml_attributes(config: SamlConfig) -> tuple[set[str], set[str]]: """Returns the required attributes of a SAML Args: diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 1a71135d5f..8f39c6ec6b 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -21,7 +21,7 @@ import itertools import logging -from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Iterable, Optional import attr from unpaddedbase64 import decode_base64, encode_base64 @@ -46,13 +46,13 @@ class _SearchResult: # The count of results. count: int # A mapping of event ID to the rank of that event. - rank_map: Dict[str, int] + rank_map: dict[str, int] # A list of the resulting events. - allowed_events: List[EventBase] + allowed_events: list[EventBase] # A map of room ID to results. - room_groups: Dict[str, JsonDict] + room_groups: dict[str, JsonDict] # A set of event IDs to highlight. - highlights: Set[str] + highlights: set[str] class SearchHandler: @@ -230,11 +230,11 @@ async def _search( batch_group_key: Optional[str], batch_token: Optional[str], search_term: str, - keys: List[str], + keys: list[str], filter_dict: JsonDict, order_by: str, include_state: bool, - group_keys: List[str], + group_keys: list[str], event_context: Optional[bool], before_limit: Optional[int], after_limit: Optional[int], @@ -286,7 +286,7 @@ async def _search( # If doing a subset of all rooms search, check if any of the rooms # are from an upgraded room, and search their contents as well if search_filter.rooms: - historical_room_ids: List[str] = [] + historical_room_ids: list[str] = [] for room_id in search_filter.rooms: # Add any previous rooms to the search if they exist ids = await self.get_old_rooms_from_upgraded_room(room_id) @@ -307,7 +307,7 @@ async def _search( } } - sender_group: Optional[Dict[str, JsonDict]] + sender_group: Optional[dict[str, JsonDict]] if order_by == "rank": search_result, sender_group = await self._search_by_rank( @@ -442,7 +442,7 @@ async def _search_by_rank( search_term: str, keys: Iterable[str], search_filter: Filter, - ) -> Tuple[_SearchResult, Dict[str, JsonDict]]: + ) -> tuple[_SearchResult, dict[str, JsonDict]]: """ Performs a full text search for a user ordering by rank. @@ -461,9 +461,9 @@ async def _search_by_rank( """ rank_map = {} # event_id -> rank of event # Holds result of grouping by room, if applicable - room_groups: Dict[str, JsonDict] = {} + room_groups: dict[str, JsonDict] = {} # Holds result of grouping by sender, if applicable - sender_group: Dict[str, JsonDict] = {} + sender_group: dict[str, JsonDict] = {} search_result = await self.store.search_msgs(room_ids, search_term, keys) @@ -520,7 +520,7 @@ async def _search_by_recent( batch_group: Optional[str], batch_group_key: Optional[str], batch_token: Optional[str], - ) -> Tuple[_SearchResult, Optional[str]]: + ) -> tuple[_SearchResult, Optional[str]]: """ Performs a full text search for a user ordering by recent. @@ -542,14 +542,14 @@ async def _search_by_recent( """ rank_map = {} # event_id -> rank of event # Holds result of grouping by room, if applicable - room_groups: Dict[str, JsonDict] = {} + room_groups: dict[str, JsonDict] = {} # Holds the next_batch for the entire result set if one of those exists global_next_batch = None highlights = set() - room_events: List[EventBase] = [] + room_events: list[EventBase] = [] i = 0 pagination_token = batch_token @@ -632,11 +632,11 @@ async def _search_by_recent( async def _calculate_event_contexts( self, user: UserID, - allowed_events: List[EventBase], + allowed_events: list[EventBase], before_limit: int, after_limit: int, include_profile: bool, - ) -> Dict[str, JsonDict]: + ) -> dict[str, JsonDict]: """ Calculates the contextual events for any search results. diff --git a/synapse/handlers/send_email.py b/synapse/handlers/send_email.py index 6469b182c8..02fd48dbad 100644 --- a/synapse/handlers/send_email.py +++ b/synapse/handlers/send_email.py @@ -24,7 +24,7 @@ from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from io import BytesIO -from typing import TYPE_CHECKING, Dict, Optional +from typing import TYPE_CHECKING, Optional from twisted.internet.defer import Deferred from twisted.internet.endpoints import HostnameEndpoint @@ -136,7 +136,7 @@ async def send_email( app_name: str, html: str, text: str, - additional_headers: Optional[Dict[str, str]] = None, + additional_headers: Optional[dict[str, str]] = None, ) -> None: """Send a multipart email with the given information. diff --git a/synapse/handlers/sliding_sync/__init__.py b/synapse/handlers/sliding_sync/__init__.py index 255a041d0e..cea4b857ee 100644 --- a/synapse/handlers/sliding_sync/__init__.py +++ b/synapse/handlers/sliding_sync/__init__.py @@ -15,7 +15,7 @@ import itertools import logging from itertools import chain -from typing import TYPE_CHECKING, AbstractSet, Dict, List, Mapping, Optional, Set, Tuple +from typing import TYPE_CHECKING, AbstractSet, Mapping, Optional from prometheus_client import Histogram from typing_extensions import assert_never @@ -116,7 +116,7 @@ async def wait_for_sync_for_user( sync_config: SlidingSyncConfig, from_token: Optional[SlidingSyncStreamToken] = None, timeout_ms: int = 0, - ) -> Tuple[SlidingSyncResult, bool]: + ) -> tuple[SlidingSyncResult, bool]: """ Get the sync for a client if we have new data for it now. Otherwise wait for new data to arrive on the server. If the timeout expires, then @@ -262,7 +262,7 @@ async def current_sync_for_user( relevant_rooms_to_send_map = interested_rooms.relevant_rooms_to_send_map # Fetch room data - rooms: Dict[str, SlidingSyncResult.RoomResult] = {} + rooms: dict[str, SlidingSyncResult.RoomResult] = {} new_connection_state = previous_connection_state.get_mutable() @@ -490,7 +490,7 @@ async def get_current_state_deltas_for_room( room_membership_for_user_at_to_token: RoomsForUserType, from_token: RoomStreamToken, to_token: RoomStreamToken, - ) -> List[StateDelta]: + ) -> list[StateDelta]: """ Get the state deltas between two tokens taking into account the user's membership. If the user is LEAVE/BAN, we will only get the state deltas up to @@ -677,8 +677,8 @@ async def get_room_sync_data( # membership. Currently, we have to make all of these optional because # `invite`/`knock` rooms only have `stripped_state`. See # https://github.com/matrix-org/matrix-spec-proposals/pull/3575#discussion_r1653045932 - timeline_events: List[EventBase] = [] - bundled_aggregations: Optional[Dict[str, BundledAggregations]] = None + timeline_events: list[EventBase] = [] + bundled_aggregations: Optional[dict[str, BundledAggregations]] = None limited: Optional[bool] = None prev_batch_token: Optional[StreamToken] = None num_live: Optional[int] = None @@ -813,7 +813,7 @@ async def get_room_sync_data( # Figure out any stripped state events for invite/knocks. This allows the # potential joiner to identify the room. - stripped_state: List[JsonDict] = [] + stripped_state: list[JsonDict] = [] if room_membership_for_user_at_to_token.membership in ( Membership.INVITE, Membership.KNOCK, @@ -924,7 +924,7 @@ async def get_room_sync_data( # see https://github.com/matrix-org/matrix-spec/issues/380. This means that # clients won't be able to calculate the room name when necessary and just a # pitfall we have to deal with until that spec issue is resolved. - hero_user_ids: List[str] = [] + hero_user_ids: list[str] = [] # TODO: Should we also check for `EventTypes.CanonicalAlias` # (`m.room.canonical_alias`) as a fallback for the room name? see # https://github.com/matrix-org/matrix-spec-proposals/pull/3575#discussion_r1671260153 @@ -1036,7 +1036,7 @@ async def get_room_sync_data( ) required_state_filter = StateFilter.all() else: - required_state_types: List[Tuple[str, Optional[str]]] = [] + required_state_types: list[tuple[str, Optional[str]]] = [] num_wild_state_keys = 0 lazy_load_room_members = False num_others = 0 @@ -1057,7 +1057,7 @@ async def get_room_sync_data( lazy_load_room_members = True # Everyone in the timeline is relevant - timeline_membership: Set[str] = set() + timeline_membership: set[str] = set() if timeline_events is not None: for timeline_event in timeline_events: # Anyone who sent a message is relevant @@ -1219,7 +1219,7 @@ async def get_room_sync_data( room_avatar = avatar_event.content.get("url") # Assemble heroes: extract the info from the state we just fetched - heroes: List[SlidingSyncResult.RoomResult.StrippedHero] = [] + heroes: list[SlidingSyncResult.RoomResult.StrippedHero] = [] for hero_user_id in hero_user_ids: member_event = room_state.get((EventTypes.Member, hero_user_id)) if member_event is not None: @@ -1374,7 +1374,7 @@ async def _get_bump_stamp( self, room_id: str, to_token: StreamToken, - timeline: List[EventBase], + timeline: list[EventBase], check_outside_timeline: bool, ) -> Optional[int]: """Get a bump stamp for the room, if we have a bump event and it has @@ -1479,7 +1479,7 @@ def _required_state_changes( prev_required_state_map: Mapping[str, AbstractSet[str]], request_required_state_map: Mapping[str, AbstractSet[str]], state_deltas: StateMap[str], -) -> Tuple[Optional[Mapping[str, AbstractSet[str]]], StateFilter]: +) -> tuple[Optional[Mapping[str, AbstractSet[str]]], StateFilter]: """Calculates the changes between the required state room config from the previous requests compared with the current request. @@ -1524,15 +1524,15 @@ def _required_state_changes( # Contains updates to the required state map compared with the previous room # config. This has the same format as `RoomSyncConfig.required_state` - changes: Dict[str, AbstractSet[str]] = {} + changes: dict[str, AbstractSet[str]] = {} # The set of types/state keys that we need to fetch and return to the # client. Passed to `StateFilter.from_types(...)` - added: List[Tuple[str, Optional[str]]] = [] + added: list[tuple[str, Optional[str]]] = [] # Convert the list of state deltas to map from type to state_keys that have # changed. - changed_types_to_state_keys: Dict[str, Set[str]] = {} + changed_types_to_state_keys: dict[str, set[str]] = {} for event_type, state_key in state_deltas: changed_types_to_state_keys.setdefault(event_type, set()).add(state_key) diff --git a/synapse/handlers/sliding_sync/extensions.py b/synapse/handlers/sliding_sync/extensions.py index 25ee954b7f..221af86f7d 100644 --- a/synapse/handlers/sliding_sync/extensions.py +++ b/synapse/handlers/sliding_sync/extensions.py @@ -18,12 +18,10 @@ TYPE_CHECKING, AbstractSet, ChainMap, - Dict, Mapping, MutableMapping, Optional, Sequence, - Set, cast, ) @@ -85,7 +83,7 @@ async def get_extensions_response( previous_connection_state: "PerConnectionState", new_connection_state: "MutablePerConnectionState", actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList], - actual_room_ids: Set[str], + actual_room_ids: set[str], actual_room_response_map: Mapping[str, SlidingSyncResult.RoomResult], to_token: StreamToken, from_token: Optional[SlidingSyncStreamToken], @@ -208,7 +206,7 @@ def find_relevant_room_ids_for_extension( requested_room_ids: Optional[StrCollection], actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList], actual_room_ids: AbstractSet[str], - ) -> Set[str]: + ) -> set[str]: """ Handle the reserved `lists`/`rooms` keys for extensions. Extensions should only return results for rooms in the Sliding Sync response. This matches up the @@ -231,7 +229,7 @@ def find_relevant_room_ids_for_extension( # We only want to include account data for rooms that are already in the sliding # sync response AND that were requested in the account data request. - relevant_room_ids: Set[str] = set() + relevant_room_ids: set[str] = set() # See what rooms from the room subscriptions we should get account data for if requested_room_ids is not None: @@ -406,7 +404,7 @@ async def get_account_data_extension_response( previous_connection_state: "PerConnectionState", new_connection_state: "MutablePerConnectionState", actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList], - actual_room_ids: Set[str], + actual_room_ids: set[str], account_data_request: SlidingSyncConfig.Extensions.AccountDataExtension, to_token: StreamToken, from_token: Optional[SlidingSyncStreamToken], @@ -481,7 +479,7 @@ async def get_account_data_extension_response( # down account data previously or not, so we split the relevant # rooms up into different collections based on status. live_rooms = set() - previously_rooms: Dict[str, int] = {} + previously_rooms: dict[str, int] = {} initial_rooms = set() for room_id in relevant_room_ids: @@ -638,7 +636,7 @@ async def get_receipts_extension_response( previous_connection_state: "PerConnectionState", new_connection_state: "MutablePerConnectionState", actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList], - actual_room_ids: Set[str], + actual_room_ids: set[str], actual_room_response_map: Mapping[str, SlidingSyncResult.RoomResult], receipts_request: SlidingSyncConfig.Extensions.ReceiptsExtension, to_token: StreamToken, @@ -671,13 +669,13 @@ async def get_receipts_extension_response( actual_room_ids=actual_room_ids, ) - room_id_to_receipt_map: Dict[str, JsonMapping] = {} + room_id_to_receipt_map: dict[str, JsonMapping] = {} if len(relevant_room_ids) > 0: # We need to handle the different cases depending on if we have sent # down receipts previously or not, so we split the relevant rooms # up into different collections based on status. live_rooms = set() - previously_rooms: Dict[str, MultiWriterStreamToken] = {} + previously_rooms: dict[str, MultiWriterStreamToken] = {} initial_rooms = set() for room_id in relevant_room_ids: @@ -842,7 +840,7 @@ async def get_typing_extension_response( self, sync_config: SlidingSyncConfig, actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList], - actual_room_ids: Set[str], + actual_room_ids: set[str], actual_room_response_map: Mapping[str, SlidingSyncResult.RoomResult], typing_request: SlidingSyncConfig.Extensions.TypingExtension, to_token: StreamToken, @@ -872,7 +870,7 @@ async def get_typing_extension_response( actual_room_ids=actual_room_ids, ) - room_id_to_typing_map: Dict[str, JsonMapping] = {} + room_id_to_typing_map: dict[str, JsonMapping] = {} if len(relevant_room_ids) > 0: # Note: We don't need to take connection tracking into account for typing # notifications because they'll get anything still relevant and hasn't timed @@ -942,8 +940,8 @@ async def get_thread_subscriptions_extension_response( if len(updates) == 0: return None - subscribed_threads: Dict[str, Dict[str, _ThreadSubscription]] = {} - unsubscribed_threads: Dict[str, Dict[str, _ThreadUnsubscription]] = {} + subscribed_threads: dict[str, dict[str, _ThreadSubscription]] = {} + unsubscribed_threads: dict[str, dict[str, _ThreadUnsubscription]] = {} for stream_id, room_id, thread_root_id, subscribed, automatic in updates: if subscribed: subscribed_threads.setdefault(room_id, {})[thread_root_id] = ( diff --git a/synapse/handlers/sliding_sync/room_lists.py b/synapse/handlers/sliding_sync/room_lists.py index 19116590f7..fc77fd3c65 100644 --- a/synapse/handlers/sliding_sync/room_lists.py +++ b/synapse/handlers/sliding_sync/room_lists.py @@ -18,14 +18,10 @@ from typing import ( TYPE_CHECKING, AbstractSet, - Dict, - List, Literal, Mapping, MutableMapping, Optional, - Set, - Tuple, Union, cast, ) @@ -113,7 +109,7 @@ class SlidingSyncInterestedRooms: lists: Mapping[str, SlidingSyncResult.SlidingWindowList] relevant_room_map: Mapping[str, RoomSyncConfig] relevant_rooms_to_send_map: Mapping[str, RoomSyncConfig] - all_rooms: Set[str] + all_rooms: set[str] room_membership_for_user_map: Mapping[str, RoomsForUserType] newly_joined_rooms: AbstractSet[str] @@ -231,12 +227,12 @@ async def _compute_interested_rooms_new_tables( user_id = sync_config.user.to_string() # Assemble sliding window lists - lists: Dict[str, SlidingSyncResult.SlidingWindowList] = {} + lists: dict[str, SlidingSyncResult.SlidingWindowList] = {} # Keep track of the rooms that we can display and need to fetch more info about - relevant_room_map: Dict[str, RoomSyncConfig] = {} + relevant_room_map: dict[str, RoomSyncConfig] = {} # The set of room IDs of all rooms that could appear in any list. These # include rooms that are outside the list ranges. - all_rooms: Set[str] = set() + all_rooms: set[str] = set() # Note: this won't include rooms the user has left themselves. We add back # `newly_left` rooms below. This is more efficient than fetching all rooms and @@ -472,7 +468,7 @@ async def _compute_interested_rooms_new_tables( all_rooms.update(filtered_sync_room_map) - ops: List[SlidingSyncResult.SlidingWindowList.Operation] = [] + ops: list[SlidingSyncResult.SlidingWindowList.Operation] = [] if list_config.ranges: # Optimization: If we are asking for the full range, we don't @@ -487,7 +483,7 @@ async def _compute_interested_rooms_new_tables( and list_config.ranges[0][1] >= len(filtered_sync_room_map) - 1 ): - sorted_room_info: List[RoomsForUserType] = list( + sorted_room_info: list[RoomsForUserType] = list( filtered_sync_room_map.values() ) else: @@ -496,7 +492,7 @@ async def _compute_interested_rooms_new_tables( # Cast is safe because RoomsForUserSlidingSync is part # of the `RoomsForUserType` union. Why can't it detect this? cast( - Dict[str, RoomsForUserType], filtered_sync_room_map + dict[str, RoomsForUserType], filtered_sync_room_map ), to_token, # We only need to sort the rooms up to the end @@ -506,7 +502,7 @@ async def _compute_interested_rooms_new_tables( ) for range in list_config.ranges: - room_ids_in_list: List[str] = [] + room_ids_in_list: list[str] = [] # We're going to loop through the sorted list of rooms starting # at the range start index and keep adding rooms until we fill @@ -639,12 +635,12 @@ async def _compute_interested_rooms_fallback( dm_room_ids = await self._get_dm_rooms_for_user(sync_config.user.to_string()) # Assemble sliding window lists - lists: Dict[str, SlidingSyncResult.SlidingWindowList] = {} + lists: dict[str, SlidingSyncResult.SlidingWindowList] = {} # Keep track of the rooms that we can display and need to fetch more info about - relevant_room_map: Dict[str, RoomSyncConfig] = {} + relevant_room_map: dict[str, RoomSyncConfig] = {} # The set of room IDs of all rooms that could appear in any list. These # include rooms that are outside the list ranges. - all_rooms: Set[str] = set() + all_rooms: set[str] = set() if sync_config.lists: with start_active_span("assemble_sliding_window_lists"): @@ -691,10 +687,10 @@ async def _compute_interested_rooms_fallback( filtered_sync_room_map, to_token ) - ops: List[SlidingSyncResult.SlidingWindowList.Operation] = [] + ops: list[SlidingSyncResult.SlidingWindowList.Operation] = [] if list_config.ranges: for range in list_config.ranges: - room_ids_in_list: List[str] = [] + room_ids_in_list: list[str] = [] # We're going to loop through the sorted list of rooms starting # at the range start index and keep adding rooms until we fill @@ -811,14 +807,14 @@ async def _filter_relevant_rooms_to_send( self, previous_connection_state: PerConnectionState, from_token: Optional[StreamToken], - relevant_room_map: Dict[str, RoomSyncConfig], - ) -> Dict[str, RoomSyncConfig]: + relevant_room_map: dict[str, RoomSyncConfig], + ) -> dict[str, RoomSyncConfig]: """Filters the `relevant_room_map` down to those rooms that may have updates we need to fetch and return.""" # Filtered subset of `relevant_room_map` for rooms that may have updates # (in the event stream) - relevant_rooms_to_send_map: Dict[str, RoomSyncConfig] = relevant_room_map + relevant_rooms_to_send_map: dict[str, RoomSyncConfig] = relevant_room_map if relevant_room_map: with start_active_span("filter_relevant_rooms_to_send"): if from_token: @@ -908,7 +904,7 @@ async def _get_rewind_changes_to_current_membership_to_token( # # First, we need to get the max stream_ordering of each event persister instance # that we queried events from. - instance_to_max_stream_ordering_map: Dict[str, int] = {} + instance_to_max_stream_ordering_map: dict[str, int] = {} for room_for_user in rooms_for_user.values(): instance_name = room_for_user.event_pos.instance_name stream_ordering = room_for_user.event_pos.stream @@ -966,12 +962,12 @@ async def _get_rewind_changes_to_current_membership_to_token( # Otherwise we're about to make changes to `rooms_for_user`, so we turn # it into a mutable dict. - changes: Dict[str, Optional[RoomsForUser]] = {} + changes: dict[str, Optional[RoomsForUser]] = {} # Assemble a list of the first membership event after the `to_token` so we can # step backward to the previous membership that would apply to the from/to # range. - first_membership_change_by_room_id_after_to_token: Dict[ + first_membership_change_by_room_id_after_to_token: dict[ str, CurrentStateDeltaMembership ] = {} for membership_change in current_state_delta_membership_changes_after_to_token: @@ -1033,7 +1029,7 @@ async def get_room_membership_for_user_at_to_token( user: UserID, to_token: StreamToken, from_token: Optional[StreamToken], - ) -> Tuple[Dict[str, RoomsForUserType], AbstractSet[str], AbstractSet[str]]: + ) -> tuple[dict[str, RoomsForUserType], AbstractSet[str], AbstractSet[str]]: """ Fetch room IDs that the user has had membership in (the full room list including long-lost left rooms that will be filtered, sorted, and sliced). @@ -1108,7 +1104,7 @@ async def get_room_membership_for_user_at_to_token( # Since we fetched the users room list at some point in time after the # tokens, we need to revert/rewind some membership changes to match the point in # time of the `to_token`. - rooms_for_user: Dict[str, RoomsForUserType] = { + rooms_for_user: dict[str, RoomsForUserType] = { room.room_id: room for room in room_for_user_list } changes = await self._get_rewind_changes_to_current_membership_to_token( @@ -1143,7 +1139,7 @@ async def _get_newly_joined_and_left_rooms( user_id: str, to_token: StreamToken, from_token: Optional[StreamToken], - ) -> Tuple[AbstractSet[str], Mapping[str, RoomsForUserStateReset]]: + ) -> tuple[AbstractSet[str], Mapping[str, RoomsForUserStateReset]]: """Fetch the sets of rooms that the user newly joined or left in the given token range. @@ -1163,8 +1159,8 @@ async def _get_newly_joined_and_left_rooms( need to check if a membership still exists in the room. """ - newly_joined_room_ids: Set[str] = set() - newly_left_room_map: Dict[str, RoomsForUserStateReset] = {} + newly_joined_room_ids: set[str] = set() + newly_left_room_map: dict[str, RoomsForUserStateReset] = {} if not from_token: return newly_joined_room_ids, newly_left_room_map @@ -1190,7 +1186,7 @@ async def _get_newly_joined_and_left_rooms_fallback( user_id: str, to_token: StreamToken, from_token: Optional[StreamToken], - ) -> Tuple[AbstractSet[str], Mapping[str, RoomsForUserStateReset]]: + ) -> tuple[AbstractSet[str], Mapping[str, RoomsForUserStateReset]]: """Fetch the sets of rooms that the user newly joined or left in the given token range. @@ -1209,8 +1205,8 @@ async def _get_newly_joined_and_left_rooms_fallback( was state reset out of the room. To actually check for a state reset, you need to check if a membership still exists in the room. """ - newly_joined_room_ids: Set[str] = set() - newly_left_room_map: Dict[str, RoomsForUserStateReset] = {} + newly_joined_room_ids: set[str] = set() + newly_left_room_map: dict[str, RoomsForUserStateReset] = {} # We need to figure out the # @@ -1232,20 +1228,20 @@ async def _get_newly_joined_and_left_rooms_fallback( # 1) Assemble a list of the last membership events in some given ranges. Someone # could have left and joined multiple times during the given range but we only # care about end-result so we grab the last one. - last_membership_change_by_room_id_in_from_to_range: Dict[ + last_membership_change_by_room_id_in_from_to_range: dict[ str, CurrentStateDeltaMembership ] = {} # We also want to assemble a list of the first membership events during the token # range so we can step backward to the previous membership that would apply to # before the token range to see if we have `newly_joined` the room. - first_membership_change_by_room_id_in_from_to_range: Dict[ + first_membership_change_by_room_id_in_from_to_range: dict[ str, CurrentStateDeltaMembership ] = {} # Keep track if the room has a non-join event in the token range so we can later # tell if it was a `newly_joined` room. If the last membership event in the # token range is a join and there is also some non-join in the range, we know # they `newly_joined`. - has_non_join_event_by_room_id_in_from_to_range: Dict[str, bool] = {} + has_non_join_event_by_room_id_in_from_to_range: dict[str, bool] = {} for ( membership_change ) in current_state_delta_membership_changes_in_from_to_range: @@ -1355,9 +1351,9 @@ async def _get_dm_rooms_for_user( async def filter_rooms_relevant_for_sync( self, user: UserID, - room_membership_for_user_map: Dict[str, RoomsForUserType], + room_membership_for_user_map: dict[str, RoomsForUserType], newly_left_room_ids: AbstractSet[str], - ) -> Dict[str, RoomsForUserType]: + ) -> dict[str, RoomsForUserType]: """ Filter room IDs that should/can be listed for this user in the sync response (the full room list that will be further filtered, sorted, and sliced). @@ -1402,7 +1398,7 @@ async def filter_rooms_relevant_for_sync( async def check_room_subscription_allowed_for_user( self, room_id: str, - room_membership_for_user_map: Dict[str, RoomsForUserType], + room_membership_for_user_map: dict[str, RoomsForUserType], to_token: StreamToken, ) -> Optional[RoomsForUserType]: """ @@ -1469,8 +1465,8 @@ async def check_room_subscription_allowed_for_user( async def _bulk_get_stripped_state_for_rooms_from_sync_room_map( self, room_ids: StrCollection, - sync_room_map: Dict[str, RoomsForUserType], - ) -> Dict[str, Optional[StateMap[StrippedStateEvent]]]: + sync_room_map: dict[str, RoomsForUserType], + ) -> dict[str, Optional[StateMap[StrippedStateEvent]]]: """ Fetch stripped state for a list of room IDs. Stripped state is only applicable to invite/knock rooms. Other rooms will have `None` as their @@ -1488,7 +1484,7 @@ async def _bulk_get_stripped_state_for_rooms_from_sync_room_map( Mapping from room_id to mapping of (type, state_key) to stripped state event. """ - room_id_to_stripped_state_map: Dict[ + room_id_to_stripped_state_map: dict[ str, Optional[StateMap[StrippedStateEvent]] ] = {} @@ -1500,7 +1496,7 @@ async def _bulk_get_stripped_state_for_rooms_from_sync_room_map( ] # Gather a list of event IDs we can grab stripped state from - invite_or_knock_event_ids: List[str] = [] + invite_or_knock_event_ids: list[str] = [] for room_id in room_ids_to_fetch: if sync_room_map[room_id].membership in ( Membership.INVITE, @@ -1565,10 +1561,10 @@ async def _bulk_get_partial_current_state_content_for_rooms( # `content.algorithm` from `EventTypes.RoomEncryption` "room_encryption", ], - room_ids: Set[str], - sync_room_map: Dict[str, RoomsForUserType], + room_ids: set[str], + sync_room_map: dict[str, RoomsForUserType], to_token: StreamToken, - room_id_to_stripped_state_map: Dict[ + room_id_to_stripped_state_map: dict[ str, Optional[StateMap[StrippedStateEvent]] ], ) -> Mapping[str, Union[Optional[str], StateSentinel]]: @@ -1593,7 +1589,7 @@ async def _bulk_get_partial_current_state_content_for_rooms( the given state event (event_type, ""), otherwise `None`. Rooms unknown to this server will return `ROOM_UNKNOWN_SENTINEL`. """ - room_id_to_content: Dict[str, Union[Optional[str], StateSentinel]] = {} + room_id_to_content: dict[str, Union[Optional[str], StateSentinel]] = {} # As a bulk shortcut, use the current state if the server is particpating in the # room (meaning we have current state). Ideally, for leave/ban rooms, we would @@ -1650,7 +1646,7 @@ async def _bulk_get_partial_current_state_content_for_rooms( # Update our `room_id_to_content` map based on the stripped state # (applies to invite/knock rooms) - rooms_ids_without_stripped_state: Set[str] = set() + rooms_ids_without_stripped_state: set[str] = set() for room_id in room_ids_without_results: stripped_state_map = room_id_to_stripped_state_map.get( room_id, Sentinel.UNSET_SENTINEL @@ -1730,12 +1726,12 @@ async def _bulk_get_partial_current_state_content_for_rooms( async def filter_rooms( self, user: UserID, - sync_room_map: Dict[str, RoomsForUserType], + sync_room_map: dict[str, RoomsForUserType], previous_connection_state: PerConnectionState, filters: SlidingSyncConfig.SlidingSyncList.Filters, to_token: StreamToken, dm_room_ids: AbstractSet[str], - ) -> Dict[str, RoomsForUserType]: + ) -> dict[str, RoomsForUserType]: """ Filter rooms based on the sync request. @@ -1753,7 +1749,7 @@ async def filter_rooms( """ user_id = user.to_string() - room_id_to_stripped_state_map: Dict[ + room_id_to_stripped_state_map: dict[ str, Optional[StateMap[StrippedStateEvent]] ] = {} @@ -1891,7 +1887,7 @@ async def filter_rooms( with start_active_span("filters.tags"): # Fetch the user tags for their rooms room_tags = await self.store.get_tags_for_user(user_id) - room_id_to_tag_name_set: Dict[str, Set[str]] = { + room_id_to_tag_name_set: dict[str, set[str]] = { room_id: set(tags.keys()) for room_id, tags in room_tags.items() } @@ -1947,7 +1943,7 @@ async def filter_rooms_using_tables( filters: SlidingSyncConfig.SlidingSyncList.Filters, to_token: StreamToken, dm_room_ids: AbstractSet[str], - ) -> Dict[str, RoomsForUserSlidingSync]: + ) -> dict[str, RoomsForUserSlidingSync]: """ Filter rooms based on the sync request. @@ -2059,7 +2055,7 @@ async def filter_rooms_using_tables( with start_active_span("filters.tags"): # Fetch the user tags for their rooms room_tags = await self.store.get_tags_for_user(user_id) - room_id_to_tag_name_set: Dict[str, Set[str]] = { + room_id_to_tag_name_set: dict[str, set[str]] = { room_id: set(tags.keys()) for room_id, tags in room_tags.items() } @@ -2109,10 +2105,10 @@ async def filter_rooms_using_tables( @trace async def sort_rooms( self, - sync_room_map: Dict[str, RoomsForUserType], + sync_room_map: dict[str, RoomsForUserType], to_token: StreamToken, limit: Optional[int] = None, - ) -> List[RoomsForUserType]: + ) -> list[RoomsForUserType]: """ Sort by `stream_ordering` of the last event that the user should see in the room. `stream_ordering` is unique so we get a stable sort. @@ -2133,11 +2129,11 @@ async def sort_rooms( # Assemble a map of room ID to the `stream_ordering` of the last activity that the # user should see in the room (<= `to_token`) - last_activity_in_room_map: Dict[str, int] = {} + last_activity_in_room_map: dict[str, int] = {} # Same as above, except for positions that we know are in the event # stream cache. - cached_positions: Dict[str, int] = {} + cached_positions: dict[str, int] = {} earliest_cache_position = ( self.store._events_stream_cache.get_earliest_known_position() diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py index 735cfa0a0f..641241287e 100644 --- a/synapse/handlers/sso.py +++ b/synapse/handlers/sso.py @@ -27,14 +27,11 @@ Any, Awaitable, Callable, - Dict, Iterable, - List, Mapping, NoReturn, Optional, Protocol, - Set, ) from urllib.parse import urlencode @@ -227,10 +224,10 @@ def __init__(self, hs: "HomeServer"): self._mapping_lock = Linearizer(clock=hs.get_clock(), name="sso_user_mapping") # a map from session id to session data - self._username_mapping_sessions: Dict[str, UsernameMappingSession] = {} + self._username_mapping_sessions: dict[str, UsernameMappingSession] = {} # map from idp_id to SsoIdentityProvider - self._identity_providers: Dict[str, SsoIdentityProvider] = {} + self._identity_providers: dict[str, SsoIdentityProvider] = {} self._consent_at_registration = hs.config.consent.user_consent_at_registration @@ -999,7 +996,7 @@ async def handle_submit_username_request( session.use_avatar = use_avatar emails_from_idp = set(session.emails) - filtered_emails: Set[str] = set() + filtered_emails: set[str] = set() # we iterate through the list rather than just building a set conjunction, so # that we can log attempts to use unknown addresses @@ -1142,7 +1139,7 @@ def _expire_old_sessions(self) -> None: def check_required_attributes( self, request: SynapseRequest, - attributes: Mapping[str, List[Any]], + attributes: Mapping[str, list[Any]], attribute_requirements: Iterable[SsoAttributeRequirement], ) -> bool: """ @@ -1259,7 +1256,7 @@ def get_username_mapping_session_cookie_from_request(request: IRequest) -> str: def _check_attribute_requirement( - attributes: Mapping[str, List[Any]], req: SsoAttributeRequirement + attributes: Mapping[str, list[Any]], req: SsoAttributeRequirement ) -> bool: """Check if SSO attributes meet the proper requirements. diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py index 5b4a2cc62d..0804f72c47 100644 --- a/synapse/handlers/stats.py +++ b/synapse/handlers/stats.py @@ -25,10 +25,8 @@ TYPE_CHECKING, Any, Counter as CounterType, - Dict, Iterable, Optional, - Tuple, ) from synapse.api.constants import EventContentFields, EventTypes, Membership @@ -157,7 +155,7 @@ async def _unsafe_process(self) -> None: async def _handle_deltas( self, deltas: Iterable[StateDelta] - ) -> Tuple[Dict[str, CounterType[str]], Dict[str, CounterType[str]]]: + ) -> tuple[dict[str, CounterType[str]], dict[str, CounterType[str]]]: """Called with the state deltas to process Returns: @@ -165,10 +163,10 @@ async def _handle_deltas( mapping from room/user ID to changes in the various fields. """ - room_to_stats_deltas: Dict[str, CounterType[str]] = {} - user_to_stats_deltas: Dict[str, CounterType[str]] = {} + room_to_stats_deltas: dict[str, CounterType[str]] = {} + user_to_stats_deltas: dict[str, CounterType[str]] = {} - room_to_state_updates: Dict[str, Dict[str, Any]] = {} + room_to_state_updates: dict[str, dict[str, Any]] = {} for delta in deltas: logger.debug( diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 2a6652b585..a19b75203b 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -24,14 +24,9 @@ TYPE_CHECKING, AbstractSet, Any, - Dict, - FrozenSet, - List, Mapping, Optional, Sequence, - Set, - Tuple, ) import attr @@ -113,7 +108,7 @@ LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE = 100 -SyncRequestKey = Tuple[Any, ...] +SyncRequestKey = tuple[Any, ...] @attr.s(slots=True, frozen=True, auto_attribs=True) @@ -132,7 +127,7 @@ class TimelineBatch: limited: bool # A mapping of event ID to the bundled aggregations for the above events. # This is only calculated if limited is true. - bundled_aggregations: Optional[Dict[str, BundledAggregations]] = None + bundled_aggregations: Optional[dict[str, BundledAggregations]] = None def __bool__(self) -> bool: """Make the result appear empty if there are no updates. This is used @@ -151,8 +146,8 @@ class JoinedSyncResult: room_id: str timeline: TimelineBatch state: StateMap[EventBase] - ephemeral: List[JsonDict] - account_data: List[JsonDict] + ephemeral: list[JsonDict] + account_data: list[JsonDict] unread_notifications: JsonDict unread_thread_notifications: JsonDict summary: Optional[JsonDict] @@ -174,7 +169,7 @@ class ArchivedSyncResult: room_id: str timeline: TimelineBatch state: StateMap[EventBase] - account_data: List[JsonDict] + account_data: list[JsonDict] def __bool__(self) -> bool: """Make the result appear empty if there are no updates. This is used @@ -209,11 +204,11 @@ class _RoomChanges: and left room IDs since last sync. """ - room_entries: List["RoomSyncResultBuilder"] - invited: List[InvitedSyncResult] - knocked: List[KnockedSyncResult] - newly_joined_rooms: List[str] - newly_left_rooms: List[str] + room_entries: list["RoomSyncResultBuilder"] + invited: list[InvitedSyncResult] + knocked: list[KnockedSyncResult] + newly_joined_rooms: list[str] + newly_left_rooms: list[str] @attr.s(slots=True, frozen=True, auto_attribs=True) @@ -236,16 +231,16 @@ class SyncResult: """ next_batch: StreamToken - presence: List[UserPresenceState] - account_data: List[JsonDict] - joined: List[JoinedSyncResult] - invited: List[InvitedSyncResult] - knocked: List[KnockedSyncResult] - archived: List[ArchivedSyncResult] - to_device: List[JsonDict] + presence: list[UserPresenceState] + account_data: list[JsonDict] + joined: list[JoinedSyncResult] + invited: list[InvitedSyncResult] + knocked: list[KnockedSyncResult] + archived: list[ArchivedSyncResult] + to_device: list[JsonDict] device_lists: DeviceListUpdates device_one_time_keys_count: JsonMapping - device_unused_fallback_key_types: List[str] + device_unused_fallback_key_types: list[str] def __bool__(self) -> bool: """Make the result appear empty if there are no updates. This is used @@ -267,7 +262,7 @@ def __bool__(self) -> bool: def empty( next_batch: StreamToken, device_one_time_keys_count: JsonMapping, - device_unused_fallback_key_types: List[str], + device_unused_fallback_key_types: list[str], ) -> "SyncResult": "Return a new empty result" return SyncResult( @@ -319,7 +314,7 @@ def __init__(self, hs: "HomeServer"): # ExpiringCache((User, Device)) -> LruCache(user_id => event_id) self.lazy_loaded_members_cache: ExpiringCache[ - Tuple[str, Optional[str]], LruCache[str, str] + tuple[str, Optional[str]], LruCache[str, str] ] = ExpiringCache( cache_name="lazy_loaded_members_cache", server_name=self.server_name, @@ -419,7 +414,7 @@ async def _wait_for_sync_for_user( ) device_id = sync_config.device_id one_time_keys_count: JsonMapping = {} - unused_fallback_key_types: List[str] = [] + unused_fallback_key_types: list[str] = [] if device_id: user_id = sync_config.user.to_string() # TODO: We should have a way to let clients differentiate between the states of: @@ -543,7 +538,7 @@ async def ephemeral_by_room( sync_result_builder: "SyncResultBuilder", now_token: StreamToken, since_token: Optional[StreamToken] = None, - ) -> Tuple[StreamToken, Dict[str, List[JsonDict]]]: + ) -> tuple[StreamToken, dict[str, list[JsonDict]]]: """Get the ephemeral events for each room the user is in Args: sync_result_builder @@ -610,7 +605,7 @@ async def _load_filtered_recents( sync_config: SyncConfig, upto_token: StreamToken, since_token: Optional[StreamToken] = None, - potential_recents: Optional[List[EventBase]] = None, + potential_recents: Optional[list[EventBase]] = None, newly_joined_room: bool = False, ) -> TimelineBatch: """Create a timeline batch for the room @@ -669,7 +664,7 @@ async def _load_filtered_recents( # We check if there are any state events, if there are then we pass # all current state events to the filter_events function. This is to # ensure that we always include current state in the timeline - current_state_ids: FrozenSet[str] = frozenset() + current_state_ids: frozenset[str] = frozenset() if any(e.is_state() for e in recents): # FIXME(faster_joins): We use the partial state here as # we don't want to block `/sync` on finishing a lazy join. @@ -968,7 +963,7 @@ async def compute_summary( return summary def get_lazy_loaded_members_cache( - self, cache_key: Tuple[str, Optional[str]] + self, cache_key: tuple[str, Optional[str]] ) -> LruCache[str, str]: cache: Optional[LruCache[str, str]] = self.lazy_loaded_members_cache.get( cache_key @@ -1029,11 +1024,11 @@ async def compute_state_delta( ): # The memberships needed for events in the timeline. # Only calculated when `lazy_load_members` is on. - members_to_fetch: Optional[Set[str]] = None + members_to_fetch: Optional[set[str]] = None # A dictionary mapping user IDs to the first event in the timeline sent by # them. Only calculated when `lazy_load_members` is on. - first_event_by_sender_map: Optional[Dict[str, EventBase]] = None + first_event_by_sender_map: Optional[dict[str, EventBase]] = None # The contribution to the room state from state events in the timeline. # Only contains the last event for any given state key. @@ -1159,7 +1154,7 @@ async def compute_state_delta( if t[0] == EventTypes.Member: cache.set(t[1], event_id) - state: Dict[str, EventBase] = {} + state: dict[str, EventBase] = {} if state_ids: state = await self.store.get_events(list(state_ids.values())) @@ -1177,7 +1172,7 @@ async def _compute_state_delta_for_full_sync( sync_config: SyncConfig, batch: TimelineBatch, end_token: StreamToken, - members_to_fetch: Optional[Set[str]], + members_to_fetch: Optional[set[str]], timeline_state: StateMap[str], joined: bool, ) -> StateMap[str]: @@ -1327,7 +1322,7 @@ async def _compute_state_delta_for_incremental_sync( batch: TimelineBatch, since_token: StreamToken, end_token: StreamToken, - members_to_fetch: Optional[Set[str]], + members_to_fetch: Optional[set[str]], timeline_state: StateMap[str], ) -> StateMap[str]: """Calculate the state events to be included in an incremental sync response. @@ -1562,7 +1557,7 @@ async def _find_missing_partial_state_memberships( # Identify memberships missing from `found_state_ids` and pick out the auth # events in which to look for them. - auth_event_ids: Set[str] = set() + auth_event_ids: set[str] = set() for member in members_to_fetch: if (EventTypes.Member, member) in found_state_ids: continue @@ -1765,7 +1760,7 @@ async def generate_sync_result( logger.debug("Fetching OTK data") device_id = sync_config.device_id one_time_keys_count: JsonMapping = {} - unused_fallback_key_types: List[str] = [] + unused_fallback_key_types: list[str] = [] if device_id: # TODO: We should have a way to let clients differentiate between the states of: # * no change in OTK count since the provided since token @@ -1855,7 +1850,7 @@ async def get_sync_result_builder( self.rooms_to_exclude_globally, ) - last_membership_change_by_room_id: Dict[str, EventBase] = {} + last_membership_change_by_room_id: dict[str, EventBase] = {} for event in membership_change_events: last_membership_change_by_room_id[event.room_id] = event @@ -1914,7 +1909,7 @@ async def get_sync_result_builder( # - are full-stated # - became fully-stated at some point during the sync period # (These rooms will have been omitted during a previous eager sync.) - forced_newly_joined_room_ids: Set[str] = set() + forced_newly_joined_room_ids: set[str] = set() if since_token and not sync_config.filter_collection.lazy_load_members(): un_partial_stated_rooms = ( await self.store.get_un_partial_stated_rooms_between( @@ -2123,7 +2118,7 @@ async def _generate_sync_entry_for_presence( async def _generate_sync_entry_for_rooms( self, sync_result_builder: "SyncResultBuilder" - ) -> Tuple[AbstractSet[str], AbstractSet[str]]: + ) -> tuple[AbstractSet[str], AbstractSet[str]]: """Generates the rooms portion of the sync response. Populates the `sync_result_builder` with the result. @@ -2172,7 +2167,7 @@ async def _generate_sync_entry_for_rooms( or sync_result_builder.sync_config.filter_collection.blocks_all_room_ephemeral() ) if block_all_room_ephemeral: - ephemeral_by_room: Dict[str, List[JsonDict]] = {} + ephemeral_by_room: dict[str, list[JsonDict]] = {} else: now_token, ephemeral_by_room = await self.ephemeral_by_room( sync_result_builder, @@ -2266,7 +2261,7 @@ async def _have_rooms_changed( async def _get_room_changes_for_incremental_sync( self, sync_result_builder: "SyncResultBuilder", - ignored_users: FrozenSet[str], + ignored_users: frozenset[str], ) -> _RoomChanges: """Determine the changes in rooms to report to the user. @@ -2297,17 +2292,17 @@ async def _get_room_changes_for_incremental_sync( assert since_token - mem_change_events_by_room_id: Dict[str, List[EventBase]] = {} + mem_change_events_by_room_id: dict[str, list[EventBase]] = {} for event in membership_change_events: mem_change_events_by_room_id.setdefault(event.room_id, []).append(event) - newly_joined_rooms: List[str] = list( + newly_joined_rooms: list[str] = list( sync_result_builder.forced_newly_joined_room_ids ) - newly_left_rooms: List[str] = [] - room_entries: List[RoomSyncResultBuilder] = [] - invited: List[InvitedSyncResult] = [] - knocked: List[KnockedSyncResult] = [] + newly_left_rooms: list[str] = [] + room_entries: list[RoomSyncResultBuilder] = [] + invited: list[InvitedSyncResult] = [] + knocked: list[KnockedSyncResult] = [] invite_config = await self.store.get_invite_config_for_user(user_id) for room_id, events in mem_change_events_by_room_id.items(): # The body of this loop will add this room to at least one of the five lists @@ -2444,7 +2439,7 @@ async def _get_room_changes_for_incremental_sync( # This is all screaming out for a refactor, as the logic here is # subtle and the moving parts numerous. if leave_event.internal_metadata.is_out_of_band_membership(): - batch_events: Optional[List[EventBase]] = [leave_event] + batch_events: Optional[list[EventBase]] = [leave_event] else: batch_events = None @@ -2526,7 +2521,7 @@ async def _get_room_changes_for_incremental_sync( async def _get_room_changes_for_initial_sync( self, sync_result_builder: "SyncResultBuilder", - ignored_users: FrozenSet[str], + ignored_users: frozenset[str], ) -> _RoomChanges: """Returns entries for all rooms for the user. @@ -2612,7 +2607,7 @@ async def _generate_room_entry( self, sync_result_builder: "SyncResultBuilder", room_builder: "RoomSyncResultBuilder", - ephemeral: List[JsonDict], + ephemeral: list[JsonDict], tags: Optional[Mapping[str, JsonMapping]], account_data: Mapping[str, JsonMapping], always_include: bool = False, @@ -2791,7 +2786,7 @@ async def _generate_room_entry( ) if room_builder.rtype == "joined": - unread_notifications: Dict[str, int] = {} + unread_notifications: dict[str, int] = {} room_sync = JoinedSyncResult( room_id=room_id, timeline=batch, @@ -2858,7 +2853,7 @@ async def _generate_room_entry( raise Exception("Unrecognized rtype: %r", room_builder.rtype) -def _action_has_highlight(actions: List[JsonDict]) -> bool: +def _action_has_highlight(actions: list[JsonDict]) -> bool: for action in actions: try: if action.get("set_tweak", None) == "highlight": @@ -3014,20 +3009,20 @@ class SyncResultBuilder: full_state: bool since_token: Optional[StreamToken] now_token: StreamToken - joined_room_ids: FrozenSet[str] - excluded_room_ids: FrozenSet[str] - forced_newly_joined_room_ids: FrozenSet[str] - membership_change_events: List[EventBase] - - presence: List[UserPresenceState] = attr.Factory(list) - account_data: List[JsonDict] = attr.Factory(list) - joined: List[JoinedSyncResult] = attr.Factory(list) - invited: List[InvitedSyncResult] = attr.Factory(list) - knocked: List[KnockedSyncResult] = attr.Factory(list) - archived: List[ArchivedSyncResult] = attr.Factory(list) - to_device: List[JsonDict] = attr.Factory(list) - - def calculate_user_changes(self) -> Tuple[AbstractSet[str], AbstractSet[str]]: + joined_room_ids: frozenset[str] + excluded_room_ids: frozenset[str] + forced_newly_joined_room_ids: frozenset[str] + membership_change_events: list[EventBase] + + presence: list[UserPresenceState] = attr.Factory(list) + account_data: list[JsonDict] = attr.Factory(list) + joined: list[JoinedSyncResult] = attr.Factory(list) + invited: list[InvitedSyncResult] = attr.Factory(list) + knocked: list[KnockedSyncResult] = attr.Factory(list) + archived: list[ArchivedSyncResult] = attr.Factory(list) + to_device: list[JsonDict] = attr.Factory(list) + + def calculate_user_changes(self) -> tuple[AbstractSet[str], AbstractSet[str]]: """Work out which other users have joined or left rooms we are joined to. This data only is only useful for an incremental sync. @@ -3105,7 +3100,7 @@ class RoomSyncResultBuilder: room_id: str rtype: str - events: Optional[List[EventBase]] + events: Optional[list[EventBase]] newly_joined: bool full_state: bool since_token: Optional[StreamToken] diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index 77c5b747c3..17e43858c9 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -20,7 +20,7 @@ # import logging import random -from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Iterable, Optional import attr @@ -96,15 +96,15 @@ def __init__(self, hs: "HomeServer"): ) # map room IDs to serial numbers - self._room_serials: Dict[str, int] = {} + self._room_serials: dict[str, int] = {} # map room IDs to sets of users currently typing - self._room_typing: Dict[str, Set[str]] = {} + self._room_typing: dict[str, set[str]] = {} - self._member_last_federation_poke: Dict[RoomMember, int] = {} + self._member_last_federation_poke: dict[RoomMember, int] = {} self.wheel_timer: WheelTimer[RoomMember] = WheelTimer(bucket_size=5000) self._latest_room_serial = 0 - self._rooms_updated: Set[str] = set() + self._rooms_updated: set[str] = set() self.clock.looping_call(self._handle_timeouts, 5000) self.clock.looping_call(self._prune_old_typing, FORGET_TIMEOUT) @@ -195,7 +195,7 @@ async def _push_remote(self, member: RoomMember, typing: bool) -> None: logger.exception("Error pushing typing notif to remotes") def process_replication_rows( - self, token: int, rows: List[TypingStream.TypingStreamRow] + self, token: int, rows: list[TypingStream.TypingStreamRow] ) -> None: """Should be called whenever we receive updates for typing stream.""" @@ -226,7 +226,7 @@ def process_replication_rows( ) async def _send_changes_in_typing_to_remotes( - self, room_id: str, prev_typing: Set[str], now_typing: Set[str] + self, room_id: str, prev_typing: set[str], now_typing: set[str] ) -> None: """Process a change in typing of a room from replication, sending EDUs for any local users. @@ -280,7 +280,7 @@ def __init__(self, hs: "HomeServer"): hs.get_distributor().observe("user_left_room", self.user_left_room) # clock time we expect to stop - self._member_typing_until: Dict[RoomMember, int] = {} + self._member_typing_until: dict[RoomMember, int] = {} # caches which room_ids changed at which serials self._typing_stream_change_cache = StreamChangeCache( @@ -452,7 +452,7 @@ def _push_update_local(self, member: RoomMember, typing: bool) -> None: async def get_all_typing_updates( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, list]], int, bool]: + ) -> tuple[list[tuple[int, list]], int, bool]: """Get updates for typing replication stream. Args: @@ -504,7 +504,7 @@ async def get_all_typing_updates( return rows, current_id, limited def process_replication_rows( - self, token: int, rows: List[TypingStream.TypingStreamRow] + self, token: int, rows: list[TypingStream.TypingStreamRow] ) -> None: # The writing process should never get updates from replication. raise Exception("Typing writer instance got typing info over replication") @@ -531,7 +531,7 @@ def _make_event_for(self, room_id: str) -> JsonMapping: async def get_new_events_as( self, from_key: int, service: ApplicationService - ) -> Tuple[List[JsonMapping], int]: + ) -> tuple[list[JsonMapping], int]: """Returns a set of new typing events that an appservice may be interested in. @@ -578,7 +578,7 @@ async def get_new_events( is_guest: bool, explicit_room_id: Optional[str] = None, to_key: Optional[int] = None, - ) -> Tuple[List[JsonMapping], int]: + ) -> tuple[list[JsonMapping], int]: """ Find typing notifications for given rooms (> `from_token` and <= `to_token`) """ diff --git a/synapse/handlers/ui_auth/checkers.py b/synapse/handlers/ui_auth/checkers.py index f3c295d9f2..cbae33eaec 100644 --- a/synapse/handlers/ui_auth/checkers.py +++ b/synapse/handlers/ui_auth/checkers.py @@ -21,7 +21,7 @@ import logging from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Any, ClassVar, Sequence, Type +from typing import TYPE_CHECKING, Any, ClassVar, Sequence from twisted.web.client import PartialDownloadError @@ -321,7 +321,7 @@ async def check_auth(self, authdict: dict, clientip: str) -> Any: ) -INTERACTIVE_AUTH_CHECKERS: Sequence[Type[UserInteractiveAuthChecker]] = [ +INTERACTIVE_AUTH_CHECKERS: Sequence[type[UserInteractiveAuthChecker]] = [ DummyAuthChecker, TermsAuthChecker, RecaptchaAuthChecker, diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py index 28961f5925..fd05aff4c8 100644 --- a/synapse/handlers/user_directory.py +++ b/synapse/handlers/user_directory.py @@ -21,7 +21,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Optional from twisted.internet.interfaces import IDelayedCall @@ -129,7 +129,7 @@ def __init__(self, hs: "HomeServer"): # Guard to ensure we only have one process for refreshing remote profiles # for the given servers. # Set of server names. - self._is_refreshing_remote_profiles_for_servers: Set[str] = set() + self._is_refreshing_remote_profiles_for_servers: set[str] = set() if self.update_user_directory: self.notifier.add_replication_callback(self.notify_new_event) @@ -270,7 +270,7 @@ async def _unsafe_process(self) -> None: await self.store.update_user_directory_stream_pos(max_pos) - async def _handle_deltas(self, deltas: List[StateDelta]) -> None: + async def _handle_deltas(self, deltas: list[StateDelta]) -> None: """Called with the state deltas to process""" for delta in deltas: logger.debug( @@ -466,7 +466,7 @@ async def _track_user_joined_room(self, room_id: str, joining_user_id: str) -> N or await self.store.should_include_local_user_in_dir(other) ) ] - updates_to_users_who_share_rooms: Set[Tuple[str, str]] = set() + updates_to_users_who_share_rooms: set[tuple[str, str]] = set() # First, if the joining user is our local user then we need an # update for every other user in the room. diff --git a/synapse/handlers/worker_lock.py b/synapse/handlers/worker_lock.py index ca1e2b166c..af5498c560 100644 --- a/synapse/handlers/worker_lock.py +++ b/synapse/handlers/worker_lock.py @@ -26,10 +26,7 @@ TYPE_CHECKING, AsyncContextManager, Collection, - Dict, Optional, - Tuple, - Type, Union, ) from weakref import WeakSet @@ -75,8 +72,8 @@ def __init__(self, hs: "HomeServer") -> None: # Map from lock name/key to set of `WaitingLock` that are active for # that lock. - self._locks: Dict[ - Tuple[str, str], WeakSet[Union[WaitingLock, WaitingMultiLock]] + self._locks: dict[ + tuple[str, str], WeakSet[Union[WaitingLock, WaitingMultiLock]] ] = {} self._clock.looping_call(self._cleanup_locks, 30_000) @@ -141,7 +138,7 @@ def acquire_read_write_lock( def acquire_multi_read_write_lock( self, - lock_names: Collection[Tuple[str, str]], + lock_names: Collection[tuple[str, str]], *, write: bool, ) -> "WaitingMultiLock": @@ -261,7 +258,7 @@ async def __aenter__(self) -> None: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType], ) -> Optional[bool]: @@ -289,7 +286,7 @@ def _get_next_retry_interval(self) -> float: @attr.s(auto_attribs=True, eq=False) class WaitingMultiLock: - lock_names: Collection[Tuple[str, str]] + lock_names: Collection[tuple[str, str]] write: bool @@ -341,7 +338,7 @@ async def __aenter__(self) -> None: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType], ) -> Optional[bool]: diff --git a/synapse/http/additional_resource.py b/synapse/http/additional_resource.py index 59eae841d5..1a17b8461f 100644 --- a/synapse/http/additional_resource.py +++ b/synapse/http/additional_resource.py @@ -18,7 +18,7 @@ # # -from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional, Tuple +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional from twisted.web.server import Request @@ -41,7 +41,7 @@ class AdditionalResource(DirectServeJsonResource): def __init__( self, hs: "HomeServer", - handler: Callable[[Request], Awaitable[Optional[Tuple[int, Any]]]], + handler: Callable[[Request], Awaitable[Optional[tuple[int, Any]]]], ): """Initialise AdditionalResource @@ -56,7 +56,7 @@ def __init__( super().__init__(clock=hs.get_clock()) self._handler = handler - async def _async_render(self, request: Request) -> Optional[Tuple[int, Any]]: + async def _async_render(self, request: Request) -> Optional[tuple[int, Any]]: # Cheekily pass the result straight through, so we don't need to worry # if its an awaitable or not. return await self._handler(request) diff --git a/synapse/http/client.py b/synapse/http/client.py index 370cdc3568..ff1f7c7128 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -27,12 +27,9 @@ Any, BinaryIO, Callable, - Dict, - List, Mapping, Optional, Protocol, - Tuple, Union, ) @@ -135,10 +132,10 @@ # the entries can either be Lists or bytes. RawHeaderValue = Union[ StrSequence, - List[bytes], - List[Union[str, bytes]], - Tuple[bytes, ...], - Tuple[Union[str, bytes], ...], + list[bytes], + list[Union[str, bytes]], + tuple[bytes, ...], + tuple[Union[str, bytes], ...], ] @@ -205,7 +202,7 @@ def __init__( def resolveHostName( self, recv: IResolutionReceiver, hostname: str, portNumber: int = 0 ) -> IResolutionReceiver: - addresses: List[IAddress] = [] + addresses: list[IAddress] = [] def _callback() -> None: has_bad_ip = False @@ -349,7 +346,7 @@ class BaseHttpClient: def __init__( self, hs: "HomeServer", - treq_args: Optional[Dict[str, Any]] = None, + treq_args: Optional[dict[str, Any]] = None, ): self.hs = hs self.server_name = hs.hostname @@ -479,7 +476,7 @@ async def request( async def post_urlencoded_get_json( self, uri: str, - args: Optional[Mapping[str, Union[str, List[str]]]] = None, + args: Optional[Mapping[str, Union[str, list[str]]]] = None, headers: Optional[RawHeaders] = None, ) -> Any: """ @@ -707,7 +704,7 @@ async def get_file( max_size: Optional[int] = None, headers: Optional[RawHeaders] = None, is_allowed_content_type: Optional[Callable[[str], bool]] = None, - ) -> Tuple[int, Dict[bytes, List[bytes]], str, int]: + ) -> tuple[int, dict[bytes, list[bytes]], str, int]: """GETs a file from a given URL Args: url: The URL to GET @@ -815,7 +812,7 @@ class SimpleHttpClient(BaseHttpClient): def __init__( self, hs: "HomeServer", - treq_args: Optional[Dict[str, Any]] = None, + treq_args: Optional[dict[str, Any]] = None, ip_allowlist: Optional[IPSet] = None, ip_blocklist: Optional[IPSet] = None, use_proxy: bool = False, diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py index 9d87514be0..f8482d9c48 100644 --- a/synapse/http/federation/matrix_federation_agent.py +++ b/synapse/http/federation/matrix_federation_agent.py @@ -19,7 +19,7 @@ # import logging import urllib.parse -from typing import Any, Generator, List, Optional +from typing import Any, Generator, Optional from urllib.request import ( # type: ignore[attr-defined] proxy_bypass_environment, ) @@ -413,7 +413,7 @@ async def _do_connect(self, protocol_factory: IProtocolFactory) -> IProtocol: # to try and if that doesn't work then we'll have an exception. raise Exception("Failed to resolve server %r" % (self._parsed_uri.netloc,)) - async def _resolve_server(self) -> List[Server]: + async def _resolve_server(self) -> list[Server]: """Resolves the server name to a list of hosts and ports to attempt to connect to. """ diff --git a/synapse/http/federation/srv_resolver.py b/synapse/http/federation/srv_resolver.py index 639bf309d6..76a51e4873 100644 --- a/synapse/http/federation/srv_resolver.py +++ b/synapse/http/federation/srv_resolver.py @@ -22,7 +22,7 @@ import logging import random import time -from typing import Any, Callable, Dict, List +from typing import Any, Callable import attr @@ -34,7 +34,7 @@ logger = logging.getLogger(__name__) -SERVER_CACHE: Dict[bytes, List["Server"]] = {} +SERVER_CACHE: dict[bytes, list["Server"]] = {} @attr.s(auto_attribs=True, slots=True, frozen=True) @@ -58,11 +58,11 @@ class Server: expires: int = 0 -def _sort_server_list(server_list: List[Server]) -> List[Server]: +def _sort_server_list(server_list: list[Server]) -> list[Server]: """Given a list of SRV records sort them into priority order and shuffle each priority with the given weight. """ - priority_map: Dict[int, List[Server]] = {} + priority_map: dict[int, list[Server]] = {} for server in server_list: priority_map.setdefault(server.priority, []).append(server) @@ -116,14 +116,14 @@ class SrvResolver: def __init__( self, dns_client: Any = client, - cache: Dict[bytes, List[Server]] = SERVER_CACHE, + cache: dict[bytes, list[Server]] = SERVER_CACHE, get_time: Callable[[], float] = time.time, ): self._dns_client = dns_client self._cache = cache self._get_time = get_time - async def resolve_service(self, service_name: bytes) -> List[Server]: + async def resolve_service(self, service_name: bytes) -> list[Server]: """Look up a SRV record Args: diff --git a/synapse/http/federation/well_known_resolver.py b/synapse/http/federation/well_known_resolver.py index 2f52abcc03..ac4d954c2c 100644 --- a/synapse/http/federation/well_known_resolver.py +++ b/synapse/http/federation/well_known_resolver.py @@ -22,7 +22,7 @@ import random import time from io import BytesIO -from typing import Callable, Dict, Optional, Tuple +from typing import Callable, Optional import attr @@ -188,7 +188,7 @@ async def get_well_known(self, server_name: bytes) -> WellKnownLookupResult: return WellKnownLookupResult(delegated_server=result) - async def _fetch_well_known(self, server_name: bytes) -> Tuple[bytes, float]: + async def _fetch_well_known(self, server_name: bytes) -> tuple[bytes, float]: """Actually fetch and parse a .well-known, without checking the cache Args: @@ -251,7 +251,7 @@ async def _fetch_well_known(self, server_name: bytes) -> Tuple[bytes, float]: async def _make_well_known_request( self, server_name: bytes, retry: bool - ) -> Tuple[IResponse, bytes]: + ) -> tuple[IResponse, bytes]: """Make the well known request. This will retry the request if requested and it fails (with unable @@ -348,7 +348,7 @@ def _cache_period_from_headers( return None -def _parse_cache_control(headers: Headers) -> Dict[bytes, Optional[bytes]]: +def _parse_cache_control(headers: Headers) -> dict[bytes, Optional[bytes]]: cache_controls = {} cache_control_headers = headers.getRawHeaders(b"cache-control") or [] for hdr in cache_control_headers: diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 4d72c72d01..d0e47cf8dc 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -31,13 +31,10 @@ Any, BinaryIO, Callable, - Dict, Generic, - List, Literal, Optional, TextIO, - Tuple, TypeVar, Union, cast, @@ -253,7 +250,7 @@ def _validate(v: Any) -> bool: return isinstance(v, dict) -class LegacyJsonSendParser(_BaseJsonParser[Tuple[int, JsonDict]]): +class LegacyJsonSendParser(_BaseJsonParser[tuple[int, JsonDict]]): """Ensure the legacy responses of /send_join & /send_leave are correct.""" def __init__(self) -> None: @@ -667,7 +664,7 @@ async def _send_request( ) # Inject the span into the headers - headers_dict: Dict[bytes, List[bytes]] = {} + headers_dict: dict[bytes, list[bytes]] = {} opentracing.inject_header_dict(headers_dict, request.destination) headers_dict[b"User-Agent"] = [self.version_string_bytes] @@ -913,7 +910,7 @@ def build_auth_headers( url_bytes: bytes, content: Optional[JsonDict] = None, destination_is: Optional[bytes] = None, - ) -> List[bytes]: + ) -> list[bytes]: """ Builds the Authorization headers for a federation request Args: @@ -1291,7 +1288,7 @@ async def get_json_with_headers( ignore_backoff: bool = False, try_trailing_slash_on_400: bool = False, parser: Literal[None] = None, - ) -> Tuple[JsonDict, Dict[bytes, List[bytes]]]: ... + ) -> tuple[JsonDict, dict[bytes, list[bytes]]]: ... @overload async def get_json_with_headers( @@ -1304,7 +1301,7 @@ async def get_json_with_headers( ignore_backoff: bool = ..., try_trailing_slash_on_400: bool = ..., parser: ByteParser[T] = ..., - ) -> Tuple[T, Dict[bytes, List[bytes]]]: ... + ) -> tuple[T, dict[bytes, list[bytes]]]: ... async def get_json_with_headers( self, @@ -1316,7 +1313,7 @@ async def get_json_with_headers( ignore_backoff: bool = False, try_trailing_slash_on_400: bool = False, parser: Optional[ByteParser[T]] = None, - ) -> Tuple[Union[JsonDict, T], Dict[bytes, List[bytes]]]: + ) -> tuple[Union[JsonDict, T], dict[bytes, list[bytes]]]: """GETs some json from the given host homeserver and path Args: @@ -1484,7 +1481,7 @@ async def get_file( retry_on_dns_fail: bool = True, ignore_backoff: bool = False, follow_redirects: bool = False, - ) -> Tuple[int, Dict[bytes, List[bytes]]]: + ) -> tuple[int, dict[bytes, list[bytes]]]: """GETs a file from a given homeserver Args: destination: The remote server to send the HTTP request to. @@ -1645,7 +1642,7 @@ async def federation_get_file( args: Optional[QueryParams] = None, retry_on_dns_fail: bool = True, ignore_backoff: bool = False, - ) -> Tuple[int, Dict[bytes, List[bytes]], bytes]: + ) -> tuple[int, dict[bytes, list[bytes]], bytes]: """GETs a file from a given homeserver over the federation /download endpoint Args: destination: The remote server to send the HTTP request to. diff --git a/synapse/http/proxy.py b/synapse/http/proxy.py index fa17432984..583dd092bd 100644 --- a/synapse/http/proxy.py +++ b/synapse/http/proxy.py @@ -22,7 +22,7 @@ import json import logging import urllib.parse -from typing import TYPE_CHECKING, Any, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, Any, Optional, cast from twisted.internet import protocol from twisted.internet.interfaces import ITCPTransport @@ -66,7 +66,7 @@ def parse_connection_header_value( connection_header_value: Optional[bytes], -) -> Set[str]: +) -> set[str]: """ Parse the `Connection` header to determine which headers we should not be copied over from the remote response. @@ -86,7 +86,7 @@ def parse_connection_header_value( The set of header names that should not be copied over from the remote response. The keys are lowercased. """ - extra_headers_to_remove: Set[str] = set() + extra_headers_to_remove: set[str] = set() if connection_header_value: extra_headers_to_remove = { connection_option.decode("ascii").strip().lower() @@ -140,7 +140,7 @@ def _check_auth(self, request: Request) -> None: "Invalid Proxy-Authorization header.", Codes.UNAUTHORIZED ) - async def _async_render(self, request: "SynapseRequest") -> Tuple[int, Any]: + async def _async_render(self, request: "SynapseRequest") -> tuple[int, Any]: uri = urllib.parse.urlparse(request.uri) assert uri.scheme == b"matrix-federation" diff --git a/synapse/http/proxyagent.py b/synapse/http/proxyagent.py index ab413990c5..67e04b18d9 100644 --- a/synapse/http/proxyagent.py +++ b/synapse/http/proxyagent.py @@ -21,7 +21,7 @@ import logging import random import re -from typing import Any, Collection, Dict, List, Optional, Sequence, Tuple, Union, cast +from typing import Any, Collection, Optional, Sequence, Union, cast from urllib.parse import urlparse from urllib.request import ( # type: ignore[attr-defined] proxy_bypass_environment, @@ -139,7 +139,7 @@ def __init__( else: self.proxy_reactor = proxy_reactor - self._endpoint_kwargs: Dict[str, Any] = {} + self._endpoint_kwargs: dict[str, Any] = {} if connectTimeout is not None: self._endpoint_kwargs["timeout"] = connectTimeout if bindAddress is not None: @@ -182,7 +182,7 @@ def __init__( "`federation_proxy_credentials` are required when using `federation_proxy_locations`" ) - endpoints: List[IStreamClientEndpoint] = [] + endpoints: list[IStreamClientEndpoint] = [] for federation_proxy_location in federation_proxy_locations: endpoint: IStreamClientEndpoint if isinstance(federation_proxy_location, InstanceTcpLocationConfig): @@ -369,7 +369,7 @@ def http_proxy_endpoint( timeout: float = 30, bindAddress: Optional[Union[bytes, str, tuple[Union[bytes, str], int]]] = None, attemptDelay: Optional[float] = None, -) -> Tuple[Optional[IStreamClientEndpoint], Optional[ProxyCredentials]]: +) -> tuple[Optional[IStreamClientEndpoint], Optional[ProxyCredentials]]: """Parses an http proxy setting and returns an endpoint for the proxy Args: @@ -418,7 +418,7 @@ def http_proxy_endpoint( def parse_proxy( proxy: bytes, default_scheme: bytes = b"http", default_port: int = 1080 -) -> Tuple[bytes, bytes, int, Optional[ProxyCredentials]]: +) -> tuple[bytes, bytes, int, Optional[ProxyCredentials]]: """ Parse a proxy connection string. @@ -487,7 +487,7 @@ def connect( return run_in_background(self._do_connect, protocol_factory) async def _do_connect(self, protocol_factory: IProtocolFactory) -> IProtocol: - failures: List[Failure] = [] + failures: list[Failure] = [] for endpoint in random.sample(self._endpoints, k=len(self._endpoints)): try: return await endpoint.connect(protocol_factory) diff --git a/synapse/http/replicationagent.py b/synapse/http/replicationagent.py index d70575dbd5..f4799bd1b2 100644 --- a/synapse/http/replicationagent.py +++ b/synapse/http/replicationagent.py @@ -20,7 +20,7 @@ # import logging -from typing import Dict, Optional +from typing import Optional from zope.interface import implementer @@ -60,7 +60,7 @@ class ReplicationEndpointFactory: def __init__( self, reactor: ISynapseReactor, - instance_map: Dict[str, InstanceLocationConfig], + instance_map: dict[str, InstanceLocationConfig], context_factory: IPolicyForHTTPS, ) -> None: self.reactor = reactor @@ -117,7 +117,7 @@ class ReplicationAgent(_AgentBase): def __init__( self, reactor: ISynapseReactor, - instance_map: Dict[str, InstanceLocationConfig], + instance_map: dict[str, InstanceLocationConfig], contextFactory: IPolicyForHTTPS, connectTimeout: Optional[float] = None, bindAddress: Optional[bytes] = None, diff --git a/synapse/http/request_metrics.py b/synapse/http/request_metrics.py index 83f52edb7c..5cc8a2ebd8 100644 --- a/synapse/http/request_metrics.py +++ b/synapse/http/request_metrics.py @@ -22,7 +22,7 @@ import logging import threading import traceback -from typing import Dict, Mapping, Set, Tuple +from typing import Mapping from prometheus_client.core import Counter, Histogram @@ -133,13 +133,13 @@ labelnames=["method", "servlet", SERVER_NAME_LABEL], ) -_in_flight_requests: Set["RequestMetrics"] = set() +_in_flight_requests: set["RequestMetrics"] = set() # Protects the _in_flight_requests set from concurrent access _in_flight_requests_lock = threading.Lock() -def _get_in_flight_counts() -> Mapping[Tuple[str, ...], int]: +def _get_in_flight_counts() -> Mapping[tuple[str, ...], int]: """Returns a count of all in flight requests by (method, server_name)""" # Cast to a list to prevent it changing while the Prometheus # thread is collecting metrics @@ -152,7 +152,7 @@ def _get_in_flight_counts() -> Mapping[Tuple[str, ...], int]: # Map from (method, name) -> int, the number of in flight requests of that # type. The key type is Tuple[str, str], but we leave the length unspecified # for compatability with LaterGauge's annotations. - counts: Dict[Tuple[str, ...], int] = {} + counts: dict[tuple[str, ...], int] = {} for request_metric in request_metrics: key = ( request_metric.method, diff --git a/synapse/http/server.py b/synapse/http/server.py index d5af8758ac..1f4728fba2 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -33,14 +33,11 @@ Any, Awaitable, Callable, - Dict, Iterable, Iterator, - List, Optional, Pattern, Protocol, - Tuple, Union, cast, ) @@ -267,7 +264,7 @@ async def wrapped_async_request_handler( # it is actually called with a SynapseRequest and a kwargs dict for the params, # but I can't figure out how to represent that. ServletCallback = Callable[ - ..., Union[None, Awaitable[None], Tuple[int, Any], Awaitable[Tuple[int, Any]]] + ..., Union[None, Awaitable[None], tuple[int, Any], Awaitable[tuple[int, Any]]] ] @@ -354,7 +351,7 @@ async def _async_render_wrapper(self, request: "SynapseRequest") -> None: async def _async_render( self, request: "SynapseRequest" - ) -> Optional[Tuple[int, Any]]: + ) -> Optional[tuple[int, Any]]: """Delegates to `_async_render_` methods, or returns a 400 if no appropriate method exists. Can be overridden in sub classes for different routing. @@ -491,7 +488,7 @@ def __init__( self.clock = hs.get_clock() super().__init__(canonical_json, extract_context, clock=self.clock) # Map of path regex -> method -> callback. - self._routes: Dict[Pattern[str], Dict[bytes, _PathEntry]] = {} + self._routes: dict[Pattern[str], dict[bytes, _PathEntry]] = {} self.hs = hs def register_paths( @@ -527,7 +524,7 @@ def register_paths( def _get_handler_for_request( self, request: "SynapseRequest" - ) -> Tuple[ServletCallback, str, Dict[str, str]]: + ) -> tuple[ServletCallback, str, dict[str, str]]: """Finds a callback method to handle the given request. Returns: @@ -556,7 +553,7 @@ def _get_handler_for_request( # Huh. No one wanted to handle that? Fiiiiiine. raise UnrecognizedRequestError(code=404) - async def _async_render(self, request: "SynapseRequest") -> Tuple[int, Any]: + async def _async_render(self, request: "SynapseRequest") -> tuple[int, Any]: callback, servlet_classname, group_dict = self._get_handler_for_request(request) request.is_render_cancellable = is_function_cancellable(callback) @@ -758,7 +755,7 @@ def __init__( # Start producing if `registerProducer` was successful self.resumeProducing() - def _send_data(self, data: List[bytes]) -> None: + def _send_data(self, data: list[bytes]) -> None: """ Send a list of bytes as a chunk of a response. """ diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index 71e809b3f1..66694e0607 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -27,13 +27,10 @@ from http import HTTPStatus from typing import ( TYPE_CHECKING, - List, Literal, Mapping, Optional, Sequence, - Tuple, - Type, TypeVar, overload, ) @@ -548,7 +545,7 @@ def parse_json_from_args( def parse_enum( request: Request, name: str, - E: Type[EnumT], + E: type[EnumT], default: EnumT, ) -> EnumT: ... @@ -557,7 +554,7 @@ def parse_enum( def parse_enum( request: Request, name: str, - E: Type[EnumT], + E: type[EnumT], *, required: Literal[True], ) -> EnumT: ... @@ -566,7 +563,7 @@ def parse_enum( def parse_enum( request: Request, name: str, - E: Type[EnumT], + E: type[EnumT], default: Optional[EnumT] = None, required: bool = False, ) -> Optional[EnumT]: @@ -637,18 +634,18 @@ def parse_strings_from_args( *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[List[str]]: ... +) -> Optional[list[str]]: ... @overload def parse_strings_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, - default: List[str], + default: list[str], *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> List[str]: ... +) -> list[str]: ... @overload @@ -659,29 +656,29 @@ def parse_strings_from_args( required: Literal[True], allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> List[str]: ... +) -> list[str]: ... @overload def parse_strings_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, - default: Optional[List[str]] = None, + default: Optional[list[str]] = None, *, required: bool = False, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[List[str]]: ... +) -> Optional[list[str]]: ... def parse_strings_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, - default: Optional[List[str]] = None, + default: Optional[list[str]] = None, required: bool = False, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[List[str]]: +) -> Optional[list[str]]: """ Parse a string parameter from the request query string list. @@ -892,7 +889,7 @@ def parse_json_object_from_request( Model = TypeVar("Model", bound=BaseModel) -def validate_json_object(content: JsonDict, model_type: Type[Model]) -> Model: +def validate_json_object(content: JsonDict, model_type: type[Model]) -> Model: """Validate a deserialized JSON object using the given pydantic model. Raises: @@ -922,7 +919,7 @@ def validate_json_object(content: JsonDict, model_type: Type[Model]) -> Model: def parse_and_validate_json_object_from_request( - request: Request, model_type: Type[Model] + request: Request, model_type: type[Model] ) -> Model: """Parse a JSON object from the body of a twisted HTTP request, then deserialise and validate using the given pydantic model. @@ -988,8 +985,8 @@ def __init__(self, hs: "HomeServer"): self.room_member_handler = hs.get_room_member_handler() async def resolve_room_id( - self, room_identifier: str, remote_room_hosts: Optional[List[str]] = None - ) -> Tuple[str, Optional[List[str]]]: + self, room_identifier: str, remote_room_hosts: Optional[list[str]] = None + ) -> tuple[str, Optional[list[str]]]: """ Resolve a room identifier to a room ID, if necessary. diff --git a/synapse/http/site.py b/synapse/http/site.py index cf31b64d80..ccf6ff27f0 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -22,7 +22,7 @@ import logging import time from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Generator, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Generator, Optional, Union import attr from zope.interface import implementer @@ -266,7 +266,7 @@ def get_method(self) -> str: return self.method.decode("ascii") return method - def get_authenticated_entity(self) -> Tuple[Optional[str], Optional[str]]: + def get_authenticated_entity(self) -> tuple[Optional[str], Optional[str]]: """ Get the "authenticated" entity of the request, which might be the user performing the action, or a user being puppeted by a server admin. @@ -783,7 +783,7 @@ def __init__( self.access_logger = logging.getLogger(logger_name) self.server_version_string = server_version_string.encode("ascii") - self.connections: List[Protocol] = [] + self.connections: list[Protocol] = [] def buildProtocol(self, addr: IAddress) -> SynapseProtocol: protocol = SynapseProtocol( diff --git a/synapse/logging/_remote.py b/synapse/logging/_remote.py index ac34fa6525..a3444221a0 100644 --- a/synapse/logging/_remote.py +++ b/synapse/logging/_remote.py @@ -25,7 +25,7 @@ from collections import deque from ipaddress import IPv4Address, IPv6Address, ip_address from math import floor -from typing import Callable, Deque, Optional +from typing import Callable, Optional import attr from zope.interface import implementer @@ -66,7 +66,7 @@ class LogProducer: # (connected and registerProducer) which are part of the implementation. transport: Connection _format: Callable[[logging.LogRecord], str] - _buffer: Deque[logging.LogRecord] + _buffer: deque[logging.LogRecord] _paused: bool = attr.ib(default=False, init=False) def pauseProducing(self) -> None: @@ -120,7 +120,7 @@ def __init__( self.port = port self.maximum_buffer = maximum_buffer - self._buffer: Deque[logging.LogRecord] = deque() + self._buffer: deque[logging.LogRecord] = deque() self._connection_waiter: Optional[Deferred] = None self._producer: Optional[LogProducer] = None diff --git a/synapse/logging/context.py b/synapse/logging/context.py index 1b9c770311..6a4425ff1d 100644 --- a/synapse/logging/context.py +++ b/synapse/logging/context.py @@ -41,8 +41,6 @@ Callable, Literal, Optional, - Tuple, - Type, TypeVar, Union, overload, @@ -393,7 +391,7 @@ def __enter__(self) -> "LoggingContext": def __exit__( self, - type: Optional[Type[BaseException]], + type: Optional[type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType], ) -> None: @@ -498,13 +496,13 @@ def get_resource_usage(self) -> ContextResourceUsage: return res - def _get_cputime(self, current: "resource.struct_rusage") -> Tuple[float, float]: + def _get_cputime(self, current: "resource.struct_rusage") -> tuple[float, float]: """Get the cpu usage time between start() and the given rusage Args: rusage: the current resource usage - Returns: Tuple[float, float]: seconds in user mode, seconds in system mode + Returns: tuple[float, float]: seconds in user mode, seconds in system mode """ assert self.usage_start is not None @@ -672,7 +670,7 @@ def __enter__(self) -> None: def __exit__( self, - type: Optional[Type[BaseException]], + type: Optional[type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType], ) -> None: diff --git a/synapse/logging/formatter.py b/synapse/logging/formatter.py index 228e5ed278..e5d73a47a8 100644 --- a/synapse/logging/formatter.py +++ b/synapse/logging/formatter.py @@ -23,7 +23,7 @@ import traceback from io import StringIO from types import TracebackType -from typing import Optional, Tuple, Type +from typing import Optional class LogFormatter(logging.Formatter): @@ -38,8 +38,8 @@ class LogFormatter(logging.Formatter): def formatException( self, - ei: Tuple[ - Optional[Type[BaseException]], + ei: tuple[ + Optional[type[BaseException]], Optional[BaseException], Optional[TracebackType], ], diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index 1c89a358df..fbb9971b32 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -183,13 +183,10 @@ def set_fates(clotho, lachesis, atropos, father="Zues", mother="Themis"): Callable, Collection, ContextManager, - Dict, Generator, Iterable, - List, Optional, Pattern, - Type, TypeVar, Union, cast, @@ -292,7 +289,7 @@ def report_span(self, span: "opentracing.Span") -> None: except Exception: logger.exception("Failed to report span") - RustReporter: Optional[Type[_WrappedRustReporter]] = _WrappedRustReporter + RustReporter: Optional[type[_WrappedRustReporter]] = _WrappedRustReporter except ImportError: RustReporter = None @@ -536,8 +533,8 @@ def whitelisted_homeserver(destination: str) -> bool: def start_active_span( operation_name: str, child_of: Optional[Union["opentracing.Span", "opentracing.SpanContext"]] = None, - references: Optional[List["opentracing.Reference"]] = None, - tags: Optional[Dict[str, str]] = None, + references: Optional[list["opentracing.Reference"]] = None, + tags: Optional[dict[str, str]] = None, start_time: Optional[float] = None, ignore_active_span: bool = False, finish_on_close: bool = True, @@ -577,7 +574,7 @@ def start_active_span_follows_from( operation_name: str, contexts: Collection, child_of: Optional[Union["opentracing.Span", "opentracing.SpanContext"]] = None, - tags: Optional[Dict[str, str]] = None, + tags: Optional[dict[str, str]] = None, start_time: Optional[float] = None, ignore_active_span: bool = False, *, @@ -631,10 +628,10 @@ def start_active_span_follows_from( def start_active_span_from_edu( - edu_content: Dict[str, Any], + edu_content: dict[str, Any], operation_name: str, - references: Optional[List["opentracing.Reference"]] = None, - tags: Optional[Dict[str, str]] = None, + references: Optional[list["opentracing.Reference"]] = None, + tags: Optional[dict[str, str]] = None, start_time: Optional[float] = None, ignore_active_span: bool = False, finish_on_close: bool = True, @@ -709,7 +706,7 @@ def set_tag(key: str, value: Union[str, bool, int, float]) -> None: @ensure_active_span("log") -def log_kv(key_values: Dict[str, Any], timestamp: Optional[float] = None) -> None: +def log_kv(key_values: dict[str, Any], timestamp: Optional[float] = None) -> None: """Log to the active span""" assert opentracing.tracer.active_span is not None opentracing.tracer.active_span.log_kv(key_values, timestamp) @@ -760,7 +757,7 @@ def is_context_forced_tracing( @ensure_active_span("inject the span into a header dict") def inject_header_dict( - headers: Dict[bytes, List[bytes]], + headers: dict[bytes, list[bytes]], destination: Optional[str] = None, check_destination: bool = True, ) -> None: @@ -792,7 +789,7 @@ def inject_header_dict( span = opentracing.tracer.active_span - carrier: Dict[str, str] = {} + carrier: dict[str, str] = {} assert span is not None opentracing.tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, carrier) @@ -820,16 +817,16 @@ def inject_response_headers(response_headers: Headers) -> None: @ensure_active_span("inject the span into a header dict") -def inject_request_headers(headers: Dict[str, str]) -> None: +def inject_request_headers(headers: dict[str, str]) -> None: span = opentracing.tracer.active_span assert span is not None opentracing.tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, headers) @ensure_active_span( - "get the active span context as a dict", ret=cast(Dict[str, str], {}) + "get the active span context as a dict", ret=cast(dict[str, str], {}) ) -def get_active_span_text_map(destination: Optional[str] = None) -> Dict[str, str]: +def get_active_span_text_map(destination: Optional[str] = None) -> dict[str, str]: """ Gets a span context as a dict. This can be used instead of manually injecting a span into an empty carrier. @@ -844,7 +841,7 @@ def get_active_span_text_map(destination: Optional[str] = None) -> Dict[str, str if destination and not whitelisted_homeserver(destination): return {} - carrier: Dict[str, str] = {} + carrier: dict[str, str] = {} assert opentracing.tracer.active_span is not None opentracing.tracer.inject( opentracing.tracer.active_span.context, opentracing.Format.TEXT_MAP, carrier @@ -859,7 +856,7 @@ def active_span_context_as_string() -> str: Returns: The active span context encoded as a string. """ - carrier: Dict[str, str] = {} + carrier: dict[str, str] = {} if opentracing: assert opentracing.tracer.active_span is not None opentracing.tracer.inject( @@ -888,12 +885,12 @@ def span_context_from_string(carrier: str) -> Optional["opentracing.SpanContext" Returns: The active span context decoded from a string. """ - payload: Dict[str, str] = json_decoder.decode(carrier) + payload: dict[str, str] = json_decoder.decode(carrier) return opentracing.tracer.extract(opentracing.Format.TEXT_MAP, payload) @only_if_tracing -def extract_text_map(carrier: Dict[str, str]) -> Optional["opentracing.SpanContext"]: +def extract_text_map(carrier: dict[str, str]) -> Optional["opentracing.SpanContext"]: """ Wrapper method for opentracing's tracer.extract for TEXT_MAP. Args: diff --git a/synapse/media/_base.py b/synapse/media/_base.py index d3a9a66f5a..319ca662e2 100644 --- a/synapse/media/_base.py +++ b/synapse/media/_base.py @@ -29,12 +29,8 @@ TYPE_CHECKING, Awaitable, BinaryIO, - Dict, Generator, - List, Optional, - Tuple, - Type, ) import attr @@ -505,7 +501,7 @@ def __enter__(self) -> None: # noqa: B027 def __exit__( # noqa: B027 self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: @@ -570,7 +566,7 @@ def thumbnail_length(self) -> Optional[int]: return self.thumbnail.length -def get_filename_from_headers(headers: Dict[bytes, List[bytes]]) -> Optional[str]: +def get_filename_from_headers(headers: dict[bytes, list[bytes]]) -> Optional[str]: """ Get the filename of the downloaded file by inspecting the Content-Disposition HTTP header. @@ -618,7 +614,7 @@ def get_filename_from_headers(headers: Dict[bytes, List[bytes]]) -> Optional[str return upload_name -def _parse_header(line: bytes) -> Tuple[bytes, Dict[bytes, bytes]]: +def _parse_header(line: bytes) -> tuple[bytes, dict[bytes, bytes]]: """Parse a Content-type like header. Cargo-culted from `cgi`, but works on bytes rather than strings. diff --git a/synapse/media/filepath.py b/synapse/media/filepath.py index 3d7863e2fb..7659971661 100644 --- a/synapse/media/filepath.py +++ b/synapse/media/filepath.py @@ -24,7 +24,7 @@ import os import re import string -from typing import Any, Callable, List, TypeVar, Union, cast +from typing import Any, Callable, TypeVar, Union, cast NEW_FORMAT_ID_RE = re.compile(r"^\d\d\d\d-\d\d-\d\d") @@ -46,7 +46,7 @@ def _wrapped(self: "MediaFilePaths", *args: Any, **kwargs: Any) -> str: GetPathMethod = TypeVar( - "GetPathMethod", bound=Union[Callable[..., str], Callable[..., List[str]]] + "GetPathMethod", bound=Union[Callable[..., str], Callable[..., list[str]]] ) @@ -73,7 +73,7 @@ def _wrap_with_jail_check_inner(func: GetPathMethod) -> GetPathMethod: @functools.wraps(func) def _wrapped( self: "MediaFilePaths", *args: Any, **kwargs: Any - ) -> Union[str, List[str]]: + ) -> Union[str, list[str]]: path_or_paths = func(self, *args, **kwargs) if isinstance(path_or_paths, list): @@ -303,7 +303,7 @@ def url_cache_filepath_rel(self, media_id: str) -> str: url_cache_filepath = _wrap_in_base_path(url_cache_filepath_rel) @_wrap_with_jail_check(relative=False) - def url_cache_filepath_dirs_to_delete(self, media_id: str) -> List[str]: + def url_cache_filepath_dirs_to_delete(self, media_id: str) -> list[str]: "The dirs to try and remove if we delete the media_id file" if NEW_FORMAT_ID_RE.match(media_id): return [ @@ -376,7 +376,7 @@ def url_cache_thumbnail_directory_rel(self, media_id: str) -> str: ) @_wrap_with_jail_check(relative=False) - def url_cache_thumbnail_dirs_to_delete(self, media_id: str) -> List[str]: + def url_cache_thumbnail_dirs_to_delete(self, media_id: str) -> list[str]: "The dirs to try and remove if we delete the media_id thumbnails" # Media id is of the form # E.g.: 2017-09-28-fsdRDt24DS234dsf diff --git a/synapse/media/media_repository.py b/synapse/media/media_repository.py index 238dc6cb2f..eda1410767 100644 --- a/synapse/media/media_repository.py +++ b/synapse/media/media_repository.py @@ -24,7 +24,7 @@ import os import shutil from io import BytesIO -from typing import IO, TYPE_CHECKING, Dict, List, Optional, Set, Tuple +from typing import IO, TYPE_CHECKING, Optional import attr from matrix_common.types.mxc_uri import MXCUri @@ -109,8 +109,8 @@ def __init__(self, hs: "HomeServer"): self.remote_media_linearizer = Linearizer(name="media_remote", clock=self.clock) - self.recently_accessed_remotes: Set[Tuple[str, str]] = set() - self.recently_accessed_locals: Set[str] = set() + self.recently_accessed_remotes: set[tuple[str, str]] = set() + self.recently_accessed_locals: set[str] = set() self.federation_domain_whitelist = ( hs.config.federation.federation_domain_whitelist @@ -221,7 +221,7 @@ def mark_recently_accessed(self, server_name: Optional[str], media_id: str) -> N self.recently_accessed_locals.add(media_id) @trace - async def create_media_id(self, auth_user: UserID) -> Tuple[str, int]: + async def create_media_id(self, auth_user: UserID) -> tuple[str, int]: """Create and store a media ID for a local user and return the MXC URI and its expiration. @@ -242,7 +242,7 @@ async def create_media_id(self, auth_user: UserID) -> Tuple[str, int]: return f"mxc://{self.server_name}/{media_id}", now + self.unused_expiration_time @trace - async def reached_pending_media_limit(self, auth_user: UserID) -> Tuple[bool, int]: + async def reached_pending_media_limit(self, auth_user: UserID) -> tuple[bool, int]: """Check if the user is over the limit for pending media uploads. Args: @@ -696,7 +696,7 @@ async def _get_remote_media_impl( ip_address: str, use_federation_endpoint: bool, allow_authenticated: bool, - ) -> Tuple[Optional[Responder], RemoteMedia]: + ) -> tuple[Optional[Responder], RemoteMedia]: """Looks for media in local cache, if not there then attempt to download from remote server. @@ -1052,7 +1052,7 @@ async def _federation_download_remote_file( def _get_thumbnail_requirements( self, media_type: str - ) -> Tuple[ThumbnailRequirement, ...]: + ) -> tuple[ThumbnailRequirement, ...]: scpos = media_type.find(";") if scpos > 0: media_type = media_type[:scpos] @@ -1099,7 +1099,7 @@ async def generate_local_exact_thumbnail( t_method: str, t_type: str, url_cache: bool, - ) -> Optional[Tuple[str, FileInfo]]: + ) -> Optional[tuple[str, FileInfo]]: input_path = await self.media_storage.ensure_media_is_in_local_cache( FileInfo(None, media_id, url_cache=url_cache) ) @@ -1308,7 +1308,7 @@ async def _generate_thumbnails( # We deduplicate the thumbnail sizes by ignoring the cropped versions if # they have the same dimensions of a scaled one. - thumbnails: Dict[Tuple[int, int, str], str] = {} + thumbnails: dict[tuple[int, int, str], str] = {} for requirement in requirements: if requirement.method == "crop": thumbnails.setdefault( @@ -1461,7 +1461,7 @@ async def _apply_media_retention_rules(self) -> None: delete_protected_media=False, ) - async def delete_old_remote_media(self, before_ts: int) -> Dict[str, int]: + async def delete_old_remote_media(self, before_ts: int) -> dict[str, int]: old_media = await self.store.get_remote_media_ids( before_ts, include_quarantined_media=False ) @@ -1497,8 +1497,8 @@ async def delete_old_remote_media(self, before_ts: int) -> Dict[str, int]: return {"deleted": deleted} async def delete_local_media_ids( - self, media_ids: List[str] - ) -> Tuple[List[str], int]: + self, media_ids: list[str] + ) -> tuple[list[str], int]: """ Delete the given local or remote media ID from this server @@ -1516,7 +1516,7 @@ async def delete_old_local_media( keep_profiles: bool = True, delete_quarantined_media: bool = False, delete_protected_media: bool = False, - ) -> Tuple[List[str], int]: + ) -> tuple[list[str], int]: """ Delete local or remote media from this server by size and timestamp. Removes media files, any thumbnails and cached URLs. @@ -1543,8 +1543,8 @@ async def delete_old_local_media( return await self._remove_local_media_from_disk(old_media) async def _remove_local_media_from_disk( - self, media_ids: List[str] - ) -> Tuple[List[str], int]: + self, media_ids: list[str] + ) -> tuple[list[str], int]: """ Delete local or remote media from this server. Removes media files, any thumbnails and cached URLs. diff --git a/synapse/media/media_storage.py b/synapse/media/media_storage.py index 99d002a8df..f6be9edf50 100644 --- a/synapse/media/media_storage.py +++ b/synapse/media/media_storage.py @@ -34,11 +34,8 @@ AsyncIterator, BinaryIO, Callable, - List, Optional, Sequence, - Tuple, - Type, Union, cast, ) @@ -205,7 +202,7 @@ async def write_to_file(self, source: IO, output: IO) -> None: @contextlib.asynccontextmanager async def store_into_file( self, file_info: FileInfo - ) -> AsyncIterator[Tuple[BinaryIO, str]]: + ) -> AsyncIterator[tuple[BinaryIO, str]]: """Async Context manager used to get a file like object to write into, as described by file_info. @@ -423,7 +420,7 @@ def write_to_consumer(self, consumer: IConsumer) -> Deferred: def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: @@ -674,7 +671,7 @@ def __init__( self, name: bytes, value: Any, - params: Optional[List[Tuple[Any, Any]]] = None, + params: Optional[list[tuple[Any, Any]]] = None, ): self.name = name self.value = value diff --git a/synapse/media/oembed.py b/synapse/media/oembed.py index 45b481f229..059d8ad1cf 100644 --- a/synapse/media/oembed.py +++ b/synapse/media/oembed.py @@ -21,7 +21,7 @@ import html import logging import urllib.parse -from typing import TYPE_CHECKING, List, Optional, cast +from typing import TYPE_CHECKING, Optional, cast import attr @@ -118,7 +118,7 @@ def autodiscover_from_html(self, tree: "etree._Element") -> Optional[str]: # Search for link elements with the proper rel and type attributes. # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. for tag in cast( - List["etree._Element"], + list["etree._Element"], tree.xpath("//link[@rel='alternate'][@type='application/json+oembed']"), ): if "href" in tag.attrib: @@ -127,7 +127,7 @@ def autodiscover_from_html(self, tree: "etree._Element") -> Optional[str]: # Some providers (e.g. Flickr) use alternative instead of alternate. # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. for tag in cast( - List["etree._Element"], + list["etree._Element"], tree.xpath("//link[@rel='alternative'][@type='application/json+oembed']"), ): if "href" in tag.attrib: @@ -223,10 +223,10 @@ def parse_oembed_response(self, url: str, raw_body: bytes) -> OEmbedResult: return OEmbedResult(open_graph_response, author_name, cache_age) -def _fetch_urls(tree: "etree._Element", tag_name: str) -> List[str]: +def _fetch_urls(tree: "etree._Element", tag_name: str) -> list[str]: results = [] # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. - for tag in cast(List["etree._Element"], tree.xpath("//*/" + tag_name)): + for tag in cast(list["etree._Element"], tree.xpath("//*/" + tag_name)): if "src" in tag.attrib: results.append(cast(str, tag.attrib["src"])) return results diff --git a/synapse/media/preview_html.py b/synapse/media/preview_html.py index 38ae126a23..6a8e479152 100644 --- a/synapse/media/preview_html.py +++ b/synapse/media/preview_html.py @@ -24,12 +24,9 @@ from typing import ( TYPE_CHECKING, Callable, - Dict, Generator, Iterable, - List, Optional, - Set, Union, cast, ) @@ -83,7 +80,7 @@ def _get_html_media_encodings( The character encoding of the body, as a string. """ # There's no point in returning an encoding more than once. - attempted_encodings: Set[str] = set() + attempted_encodings: set[str] = set() # Limit searches to the first 1kb, since it ought to be at the top. body_start = body[:1024] @@ -190,7 +187,7 @@ def _get_meta_tags( property: str, prefix: str, property_mapper: Optional[Callable[[str], Optional[str]]] = None, -) -> Dict[str, Optional[str]]: +) -> dict[str, Optional[str]]: """ Search for meta tags prefixed with a particular string. @@ -207,10 +204,10 @@ def _get_meta_tags( """ # This actually returns Dict[str, str], but the caller sets this as a variable # which is Dict[str, Optional[str]]. - results: Dict[str, Optional[str]] = {} + results: dict[str, Optional[str]] = {} # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. for tag in cast( - List["etree._Element"], + list["etree._Element"], tree.xpath( f"//*/meta[starts-with(@{property}, '{prefix}:')][@content][not(@content='')]" ), @@ -256,7 +253,7 @@ def _map_twitter_to_open_graph(key: str) -> Optional[str]: return "og" + key[7:] -def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]]: +def parse_html_to_open_graph(tree: "etree._Element") -> dict[str, Optional[str]]: """ Parse the HTML document into an Open Graph response. @@ -315,7 +312,7 @@ def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]] # Attempt to find a title from the title tag, or the biggest header on the page. # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. title = cast( - List["etree._ElementUnicodeResult"], + list["etree._ElementUnicodeResult"], tree.xpath("((//title)[1] | (//h1)[1] | (//h2)[1] | (//h3)[1])/text()"), ) if title: @@ -326,7 +323,7 @@ def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]] if "og:image" not in og: # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. meta_image = cast( - List["etree._ElementUnicodeResult"], + list["etree._ElementUnicodeResult"], tree.xpath( "//*/meta[translate(@itemprop, 'IMAGE', 'image')='image'][not(@content='')]/@content[1]" ), @@ -340,7 +337,7 @@ def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]] # # TODO: consider inlined CSS styles as well as width & height attribs images = cast( - List["etree._Element"], + list["etree._Element"], tree.xpath("//img[@src][number(@width)>10][number(@height)>10]"), ) images = sorted( @@ -352,7 +349,7 @@ def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]] # If no images were found, try to find *any* images. if not images: # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. - images = cast(List["etree._Element"], tree.xpath("//img[@src][1]")) + images = cast(list["etree._Element"], tree.xpath("//img[@src][1]")) if images: og["og:image"] = cast(str, images[0].attrib["src"]) @@ -360,7 +357,7 @@ def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]] else: # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. favicons = cast( - List["etree._ElementUnicodeResult"], + list["etree._ElementUnicodeResult"], tree.xpath("//link[@href][contains(@rel, 'icon')]/@href[1]"), ) if favicons: @@ -370,7 +367,7 @@ def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]] # Check the first meta description tag for content. # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. meta_description = cast( - List["etree._ElementUnicodeResult"], + list["etree._ElementUnicodeResult"], tree.xpath( "//*/meta[translate(@name, 'DESCRIPTION', 'description')='description'][not(@content='')]/@content[1]" ), @@ -443,7 +440,7 @@ def parse_html_description(tree: "etree._Element") -> Optional[str]: def _iterate_over_text( tree: Optional["etree._Element"], - tags_to_ignore: Set[object], + tags_to_ignore: set[object], stack_limit: int = 1024, ) -> Generator[str, None, None]: """Iterate over the tree returning text nodes in a depth first fashion, @@ -463,7 +460,7 @@ def _iterate_over_text( # This is a stack whose items are elements to iterate over *or* strings # to be returned. - elements: List[Union[str, "etree._Element"]] = [tree] + elements: list[Union[str, "etree._Element"]] = [tree] while elements: el = elements.pop() diff --git a/synapse/media/thumbnailer.py b/synapse/media/thumbnailer.py index 5d9afda322..cc2fe7318b 100644 --- a/synapse/media/thumbnailer.py +++ b/synapse/media/thumbnailer.py @@ -22,7 +22,7 @@ import logging from io import BytesIO from types import TracebackType -from typing import TYPE_CHECKING, List, Optional, Tuple, Type +from typing import TYPE_CHECKING, Optional from PIL import Image @@ -116,7 +116,7 @@ def __init__(self, input_path: str): logger.info("Error parsing image EXIF information: %s", e) @trace - def transpose(self) -> Tuple[int, int]: + def transpose(self) -> tuple[int, int]: """Transpose the image using its EXIF Orientation tag Returns: @@ -134,7 +134,7 @@ def transpose(self) -> Tuple[int, int]: self.image.info["exif"] = None return self.image.size - def aspect(self, max_width: int, max_height: int) -> Tuple[int, int]: + def aspect(self, max_width: int, max_height: int) -> tuple[int, int]: """Calculate the largest size that preserves aspect ratio which fits within the given rectangle:: @@ -246,7 +246,7 @@ def __enter__(self) -> "Thumbnailer": def __exit__( self, - type: Optional[Type[BaseException]], + type: Optional[type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType], ) -> None: @@ -553,7 +553,7 @@ async def _select_and_respond_with_thumbnail( desired_height: int, desired_method: str, desired_type: str, - thumbnail_infos: List[ThumbnailInfo], + thumbnail_infos: list[ThumbnailInfo], media_id: str, file_id: str, url_cache: bool, @@ -719,7 +719,7 @@ def _select_thumbnail( desired_height: int, desired_method: str, desired_type: str, - thumbnail_infos: List[ThumbnailInfo], + thumbnail_infos: list[ThumbnailInfo], file_id: str, url_cache: bool, server_name: Optional[str], @@ -750,12 +750,12 @@ def _select_thumbnail( if desired_method == "crop": # Thumbnails that match equal or larger sizes of desired width/height. - crop_info_list: List[ - Tuple[int, int, int, bool, Optional[int], ThumbnailInfo] + crop_info_list: list[ + tuple[int, int, int, bool, Optional[int], ThumbnailInfo] ] = [] # Other thumbnails. - crop_info_list2: List[ - Tuple[int, int, int, bool, Optional[int], ThumbnailInfo] + crop_info_list2: list[ + tuple[int, int, int, bool, Optional[int], ThumbnailInfo] ] = [] for info in thumbnail_infos: # Skip thumbnails generated with different methods. @@ -801,9 +801,9 @@ def _select_thumbnail( thumbnail_info = min(crop_info_list2, key=lambda t: t[:-1])[-1] elif desired_method == "scale": # Thumbnails that match equal or larger sizes of desired width/height. - info_list: List[Tuple[int, bool, int, ThumbnailInfo]] = [] + info_list: list[tuple[int, bool, int, ThumbnailInfo]] = [] # Other thumbnails. - info_list2: List[Tuple[int, bool, int, ThumbnailInfo]] = [] + info_list2: list[tuple[int, bool, int, ThumbnailInfo]] = [] for info in thumbnail_infos: # Skip thumbnails generated with different methods. diff --git a/synapse/media/url_previewer.py b/synapse/media/url_previewer.py index 1a82cc46e3..2a63842fb7 100644 --- a/synapse/media/url_previewer.py +++ b/synapse/media/url_previewer.py @@ -28,7 +28,7 @@ import shutil import sys import traceback -from typing import TYPE_CHECKING, BinaryIO, Iterable, Optional, Tuple +from typing import TYPE_CHECKING, BinaryIO, Iterable, Optional from urllib.parse import urljoin, urlparse, urlsplit from urllib.request import urlopen @@ -705,7 +705,7 @@ async def _precache_image_url( async def _handle_oembed_response( self, url: str, media_info: MediaInfo, expiration_ms: int - ) -> Tuple[JsonDict, Optional[str], int]: + ) -> tuple[JsonDict, Optional[str], int]: """ Parse the downloaded oEmbed info. diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 2ffb14070b..def21ac942 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -28,15 +28,11 @@ from importlib import metadata from typing import ( Callable, - Dict, Generic, Iterable, Mapping, Optional, Sequence, - Set, - Tuple, - Type, TypeVar, Union, cast, @@ -161,10 +157,10 @@ class LaterGauge(Collector): name: str desc: str labelnames: Optional[StrSequence] = attr.ib(hash=False) - _instance_id_to_hook_map: Dict[ + _instance_id_to_hook_map: dict[ Optional[str], # instance_id Callable[ - [], Union[Mapping[Tuple[str, ...], Union[int, float]], Union[int, float]] + [], Union[Mapping[tuple[str, ...], Union[int, float]], Union[int, float]] ], ] = attr.ib(factory=dict, hash=False) """ @@ -206,7 +202,7 @@ def register_hook( *, homeserver_instance_id: Optional[str], hook: Callable[ - [], Union[Mapping[Tuple[str, ...], Union[int, float]], Union[int, float]] + [], Union[Mapping[tuple[str, ...], Union[int, float]], Union[int, float]] ], ) -> None: """ @@ -260,7 +256,7 @@ def __attrs_post_init__(self) -> None: all_later_gauges_to_clean_up_on_shutdown[self.name] = self -all_later_gauges_to_clean_up_on_shutdown: Dict[str, LaterGauge] = {} +all_later_gauges_to_clean_up_on_shutdown: dict[str, LaterGauge] = {} """ Track all `LaterGauge` instances so we can remove any associated hooks during homeserver shutdown. @@ -302,15 +298,15 @@ def __init__( # Create a class which have the sub_metrics values as attributes, which # default to 0 on initialization. Used to pass to registered callbacks. - self._metrics_class: Type[MetricsEntry] = attr.make_class( + self._metrics_class: type[MetricsEntry] = attr.make_class( "_MetricsEntry", attrs={x: attr.ib(default=0) for x in sub_metrics}, slots=True, ) # Counts number of in flight blocks for a given set of label values - self._registrations: Dict[ - Tuple[str, ...], Set[Callable[[MetricsEntry], None]] + self._registrations: dict[ + tuple[str, ...], set[Callable[[MetricsEntry], None]] ] = {} # Protects access to _registrations @@ -320,7 +316,7 @@ def __init__( def register( self, - key: Tuple[str, ...], + key: tuple[str, ...], callback: Callable[[MetricsEntry], None], ) -> None: """Registers that we've entered a new block with labels `key`. @@ -349,7 +345,7 @@ def register( def unregister( self, - key: Tuple[str, ...], + key: tuple[str, ...], callback: Callable[[MetricsEntry], None], ) -> None: """ @@ -424,7 +420,7 @@ def __init__( name: str, documentation: str, gsum_value: float, - buckets: Optional[Sequence[Tuple[str, float]]] = None, + buckets: Optional[Sequence[tuple[str, float]]] = None, labelnames: StrSequence = (), labelvalues: StrSequence = (), unit: str = "", diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py index 05e84038ac..c871598680 100644 --- a/synapse/metrics/background_process_metrics.py +++ b/synapse/metrics/background_process_metrics.py @@ -29,13 +29,10 @@ Awaitable, Callable, ContextManager, - Dict, Generator, Iterable, Optional, Protocol, - Set, - Type, TypeVar, Union, ) @@ -134,7 +131,7 @@ # map from description to a counter, so that we can name our logcontexts # incrementally. (It actually duplicates _background_process_start_count, but # it's much simpler to do so than to try to combine them.) -_background_process_counts: Dict[str, int] = {} +_background_process_counts: dict[str, int] = {} # Set of all running background processes that became active active since the # last time metrics were scraped (i.e. background processes that performed some @@ -144,7 +141,7 @@ # background processes stacking up behind a lock or linearizer, where we then # only need to iterate over and update metrics for the process that have # actually been active and can ignore the idle ones. -_background_processes_active_since_last_scrape: "Set[_BackgroundProcess]" = set() +_background_processes_active_since_last_scrape: "set[_BackgroundProcess]" = set() # A lock that covers the above set and dict _bg_metrics_lock = threading.Lock() @@ -531,7 +528,7 @@ def start(self, rusage: "Optional[resource.struct_rusage]") -> None: def __exit__( self, - type: Optional[Type[BaseException]], + type: Optional[type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType], ) -> None: diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index ea0887966a..9287747cea 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -26,13 +26,10 @@ Awaitable, Callable, Collection, - Dict, Generator, Iterable, - List, Mapping, Optional, - Tuple, TypeVar, Union, ) @@ -559,7 +556,7 @@ def register_password_auth_provider_callbacks( check_3pid_auth: Optional[CHECK_3PID_AUTH_CALLBACK] = None, on_logged_out: Optional[ON_LOGGED_OUT_CALLBACK] = None, auth_checkers: Optional[ - Dict[Tuple[str, Tuple[str, ...]], CHECK_AUTH_CALLBACK] + dict[tuple[str, tuple[str, ...]], CHECK_AUTH_CALLBACK] ] = None, is_3pid_allowed: Optional[IS_3PID_ALLOWED_CALLBACK] = None, get_username_for_registration: Optional[ @@ -829,7 +826,7 @@ async def get_profile_for_user(self, localpart: str) -> ProfileInfo: user_id = UserID.from_string(f"@{localpart}:{server_name}") return await self._store.get_profileinfo(user_id) - async def get_threepids_for_user(self, user_id: str) -> List[Dict[str, str]]: + async def get_threepids_for_user(self, user_id: str) -> list[dict[str, str]]: """Look up the threepids (email addresses and phone numbers) associated with the given Matrix user ID. @@ -865,8 +862,8 @@ def register( self, localpart: str, displayname: Optional[str] = None, - emails: Optional[List[str]] = None, - ) -> Generator["defer.Deferred[Any]", Any, Tuple[str, str]]: + emails: Optional[list[str]] = None, + ) -> Generator["defer.Deferred[Any]", Any, tuple[str, str]]: """Registers a new user with given localpart and optional displayname, emails. Also returns an access token for the new user. @@ -896,7 +893,7 @@ def register_user( self, localpart: str, displayname: Optional[str] = None, - emails: Optional[List[str]] = None, + emails: Optional[list[str]] = None, admin: bool = False, ) -> "defer.Deferred[str]": """Registers a new user with given localpart and optional displayname, emails. @@ -931,7 +928,7 @@ def register_device( user_id: str, device_id: Optional[str] = None, initial_display_name: Optional[str] = None, - ) -> "defer.Deferred[Tuple[str, str, Optional[int], Optional[str]]]": + ) -> "defer.Deferred[tuple[str, str, Optional[int], Optional[str]]]": """Register a device for a user and generate an access token. Added in Synapse v1.2.0. @@ -1085,7 +1082,7 @@ def register_cached_function(self, cached_func: CachedFunction) -> None: ) async def invalidate_cache( - self, cached_func: CachedFunction, keys: Tuple[Any, ...] + self, cached_func: CachedFunction, keys: tuple[Any, ...] ) -> None: """Invalidate a cache entry of a cached function across workers. The cached function needs to be registered on all workers first with `register_cached_function`. @@ -1138,7 +1135,7 @@ async def complete_sso_login_async( @defer.inlineCallbacks def get_state_events_in_room( - self, room_id: str, types: Iterable[Tuple[str, Optional[str]]] + self, room_id: str, types: Iterable[tuple[str, Optional[str]]] ) -> Generator[defer.Deferred, Any, Iterable[EventBase]]: """Gets current state events for the given room. @@ -1170,7 +1167,7 @@ async def update_room_membership( room_id: str, new_membership: str, content: Optional[JsonDict] = None, - remote_room_hosts: Optional[List[str]] = None, + remote_room_hosts: Optional[list[str]] = None, ) -> EventBase: """Updates the membership of a user to the given value. @@ -1346,7 +1343,7 @@ async def send_local_online_presence_to(self, users: Iterable[str]) -> None: ) async def set_presence_for_users( - self, users: Mapping[str, Tuple[str, Optional[str]]] + self, users: Mapping[str, tuple[str, Optional[str]]] ) -> None: """ Update the internal presence state of users. @@ -1490,7 +1487,7 @@ async def send_http_push_notification( content: JsonDict, tweaks: Optional[JsonMapping] = None, default_payload: Optional[JsonMapping] = None, - ) -> Dict[str, bool]: + ) -> dict[str, bool]: """Send an HTTP push notification that is forwarded to the registered push gateway for the specified user/device. @@ -1554,9 +1551,9 @@ async def send_mail( def read_templates( self, - filenames: List[str], + filenames: list[str], custom_template_directory: Optional[str] = None, - ) -> List[jinja2.Template]: + ) -> list[jinja2.Template]: """Read and load the content of the template files at the given location. By default, Synapse will look for these templates in its configured template directory, but another directory to search in can be provided. @@ -1595,7 +1592,7 @@ def is_mine(self, id: Union[str, DomainSpecificString]) -> bool: async def get_user_ip_and_agents( self, user_id: str, since_ts: int = 0 - ) -> List[UserIpAndAgent]: + ) -> list[UserIpAndAgent]: """ Return the list of user IPs and agents for a user. @@ -1638,7 +1635,7 @@ async def get_user_ip_and_agents( async def get_room_state( self, room_id: str, - event_filter: Optional[Iterable[Tuple[str, Optional[str]]]] = None, + event_filter: Optional[Iterable[tuple[str, Optional[str]]]] = None, ) -> StateMap[EventBase]: """Returns the current state of the given room. @@ -1803,7 +1800,7 @@ async def store_remote_3pid_association( await self._store.add_user_bound_threepid(user_id, medium, address, id_server) def check_push_rule_actions( - self, actions: List[Union[str, Dict[str, str]]] + self, actions: list[Union[str, dict[str, str]]] ) -> None: """Checks if the given push rule actions are valid according to the Matrix specification. @@ -1827,7 +1824,7 @@ async def set_push_rule_action( scope: str, kind: str, rule_id: str, - actions: List[Union[str, Dict[str, str]]], + actions: list[Union[str, dict[str, str]]], ) -> None: """Changes the actions of an existing push rule for the given user. @@ -1866,7 +1863,7 @@ async def set_push_rule_action( async def get_monthly_active_users_by_service( self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None - ) -> List[Tuple[str, str]]: + ) -> list[tuple[str, str]]: """Generates list of monthly active users and their services. Please see corresponding storage docstring for more details. @@ -1912,7 +1909,7 @@ async def get_canonical_room_alias(self, room_id: RoomID) -> Optional[RoomAlias] return RoomAlias.from_string(room_alias_str) return None - async def lookup_room_alias(self, room_alias: str) -> Tuple[str, List[str]]: + async def lookup_room_alias(self, room_alias: str) -> tuple[str, list[str]]: """ Get the room ID associated with a room alias. @@ -1942,7 +1939,7 @@ async def create_room( config: JsonDict, ratelimit: bool = True, creator_join_profile: Optional[JsonDict] = None, - ) -> Tuple[str, Optional[str]]: + ) -> tuple[str, Optional[str]]: """Creates a new room. Added in Synapse v1.65.0. diff --git a/synapse/module_api/callbacks/account_validity_callbacks.py b/synapse/module_api/callbacks/account_validity_callbacks.py index a989249280..da01414d9a 100644 --- a/synapse/module_api/callbacks/account_validity_callbacks.py +++ b/synapse/module_api/callbacks/account_validity_callbacks.py @@ -20,7 +20,7 @@ # import logging -from typing import Awaitable, Callable, List, Optional, Tuple +from typing import Awaitable, Callable, Optional from twisted.web.http import Request @@ -33,15 +33,15 @@ # Temporary hooks to allow for a transition from `/_matrix/client` endpoints # to `/_synapse/client/account_validity`. See `register_callbacks` below. ON_LEGACY_SEND_MAIL_CALLBACK = Callable[[str], Awaitable] -ON_LEGACY_RENEW_CALLBACK = Callable[[str], Awaitable[Tuple[bool, bool, int]]] +ON_LEGACY_RENEW_CALLBACK = Callable[[str], Awaitable[tuple[bool, bool, int]]] ON_LEGACY_ADMIN_REQUEST = Callable[[Request], Awaitable] class AccountValidityModuleApiCallbacks: def __init__(self) -> None: - self.is_user_expired_callbacks: List[IS_USER_EXPIRED_CALLBACK] = [] - self.on_user_registration_callbacks: List[ON_USER_REGISTRATION_CALLBACK] = [] - self.on_user_login_callbacks: List[ON_USER_LOGIN_CALLBACK] = [] + self.is_user_expired_callbacks: list[IS_USER_EXPIRED_CALLBACK] = [] + self.on_user_registration_callbacks: list[ON_USER_REGISTRATION_CALLBACK] = [] + self.on_user_login_callbacks: list[ON_USER_LOGIN_CALLBACK] = [] self.on_legacy_send_mail_callback: Optional[ON_LEGACY_SEND_MAIL_CALLBACK] = None self.on_legacy_renew_callback: Optional[ON_LEGACY_RENEW_CALLBACK] = None diff --git a/synapse/module_api/callbacks/media_repository_callbacks.py b/synapse/module_api/callbacks/media_repository_callbacks.py index 7d3aed9d66..7cb56e558b 100644 --- a/synapse/module_api/callbacks/media_repository_callbacks.py +++ b/synapse/module_api/callbacks/media_repository_callbacks.py @@ -13,7 +13,7 @@ # import logging -from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional +from typing import TYPE_CHECKING, Awaitable, Callable, Optional from synapse.config.repository import MediaUploadLimit from synapse.types import JsonDict @@ -30,7 +30,7 @@ IS_USER_ALLOWED_TO_UPLOAD_MEDIA_OF_SIZE_CALLBACK = Callable[[str, int], Awaitable[bool]] GET_MEDIA_UPLOAD_LIMITS_FOR_USER_CALLBACK = Callable[ - [str], Awaitable[Optional[List[MediaUploadLimit]]] + [str], Awaitable[Optional[list[MediaUploadLimit]]] ] ON_MEDIA_UPLOAD_LIMIT_EXCEEDED_CALLBACK = Callable[ @@ -42,16 +42,16 @@ class MediaRepositoryModuleApiCallbacks: def __init__(self, hs: "HomeServer") -> None: self.server_name = hs.hostname self.clock = hs.get_clock() - self._get_media_config_for_user_callbacks: List[ + self._get_media_config_for_user_callbacks: list[ GET_MEDIA_CONFIG_FOR_USER_CALLBACK ] = [] - self._is_user_allowed_to_upload_media_of_size_callbacks: List[ + self._is_user_allowed_to_upload_media_of_size_callbacks: list[ IS_USER_ALLOWED_TO_UPLOAD_MEDIA_OF_SIZE_CALLBACK ] = [] - self._get_media_upload_limits_for_user_callbacks: List[ + self._get_media_upload_limits_for_user_callbacks: list[ GET_MEDIA_UPLOAD_LIMITS_FOR_USER_CALLBACK ] = [] - self._on_media_upload_limit_exceeded_callbacks: List[ + self._on_media_upload_limit_exceeded_callbacks: list[ ON_MEDIA_UPLOAD_LIMIT_EXCEEDED_CALLBACK ] = [] @@ -117,7 +117,7 @@ async def is_user_allowed_to_upload_media_of_size( async def get_media_upload_limits_for_user( self, user_id: str - ) -> Optional[List[MediaUploadLimit]]: + ) -> Optional[list[MediaUploadLimit]]: """ Get the first non-None list of MediaUploadLimits for the user from the registered callbacks. If a list is returned it will be sorted in descending order of duration. @@ -128,7 +128,7 @@ async def get_media_upload_limits_for_user( name=f"{callback.__module__}.{callback.__qualname__}", server_name=self.server_name, ): - res: Optional[List[MediaUploadLimit]] = await delay_cancellation( + res: Optional[list[MediaUploadLimit]] = await delay_cancellation( callback(user_id) ) if res is not None: # to allow [] to be returned meaning no limit diff --git a/synapse/module_api/callbacks/ratelimit_callbacks.py b/synapse/module_api/callbacks/ratelimit_callbacks.py index a580ea7d7c..6afcda1216 100644 --- a/synapse/module_api/callbacks/ratelimit_callbacks.py +++ b/synapse/module_api/callbacks/ratelimit_callbacks.py @@ -13,7 +13,7 @@ # import logging -from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional +from typing import TYPE_CHECKING, Awaitable, Callable, Optional import attr @@ -45,7 +45,7 @@ class RatelimitModuleApiCallbacks: def __init__(self, hs: "HomeServer") -> None: self.server_name = hs.hostname self.clock = hs.get_clock() - self._get_ratelimit_override_for_user_callbacks: List[ + self._get_ratelimit_override_for_user_callbacks: list[ GET_RATELIMIT_OVERRIDE_FOR_USER_CALLBACK ] = [] diff --git a/synapse/module_api/callbacks/spamchecker_callbacks.py b/synapse/module_api/callbacks/spamchecker_callbacks.py index 428e733979..4c331c4210 100644 --- a/synapse/module_api/callbacks/spamchecker_callbacks.py +++ b/synapse/module_api/callbacks/spamchecker_callbacks.py @@ -29,10 +29,8 @@ Awaitable, Callable, Collection, - List, Literal, Optional, - Tuple, Union, cast, ) @@ -63,7 +61,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -83,7 +81,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -99,7 +97,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -115,7 +113,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -131,7 +129,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -144,7 +142,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -167,7 +165,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -183,7 +181,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -199,7 +197,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], ] ], ] @@ -211,7 +209,7 @@ [ Optional[dict], Optional[str], - Collection[Tuple[str, str]], + Collection[tuple[str, str]], ], Awaitable[RegistrationBehaviour], ] @@ -219,7 +217,7 @@ [ Optional[dict], Optional[str], - Collection[Tuple[str, str]], + Collection[tuple[str, str]], Optional[str], ], Awaitable[RegistrationBehaviour], @@ -234,7 +232,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -245,7 +243,7 @@ str, Optional[str], Optional[str], - Collection[Tuple[Optional[str], str]], + Collection[tuple[Optional[str], str]], Optional[str], ], Awaitable[ @@ -256,7 +254,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], ] ], ] @@ -266,7 +264,7 @@ def load_legacy_spam_checkers(hs: "synapse.server.HomeServer") -> None: """Wrapper that loads spam checkers configured using the old configuration, and registers the spam checker hooks they implement. """ - spam_checkers: List[Any] = [] + spam_checkers: list[Any] = [] api = hs.get_module_api() for module, config in hs.config.spamchecker.spam_checkers: # Older spam checkers don't accept the `api` argument, so we @@ -312,7 +310,7 @@ def async_wrapper(f: Optional[Callable]) -> Optional[Callable[..., Awaitable]]: def wrapper( email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], auth_provider_id: Optional[str], ) -> Union[Awaitable[RegistrationBehaviour], RegistrationBehaviour]: # Assertion required because mypy can't prove we won't @@ -359,36 +357,36 @@ def __init__(self, hs: "synapse.server.HomeServer") -> None: self.server_name = hs.hostname self.clock = hs.get_clock() - self._check_event_for_spam_callbacks: List[CHECK_EVENT_FOR_SPAM_CALLBACK] = [] - self._should_drop_federated_event_callbacks: List[ + self._check_event_for_spam_callbacks: list[CHECK_EVENT_FOR_SPAM_CALLBACK] = [] + self._should_drop_federated_event_callbacks: list[ SHOULD_DROP_FEDERATED_EVENT_CALLBACK ] = [] - self._user_may_join_room_callbacks: List[USER_MAY_JOIN_ROOM_CALLBACK] = [] - self._user_may_invite_callbacks: List[USER_MAY_INVITE_CALLBACK] = [] - self._federated_user_may_invite_callbacks: List[ + self._user_may_join_room_callbacks: list[USER_MAY_JOIN_ROOM_CALLBACK] = [] + self._user_may_invite_callbacks: list[USER_MAY_INVITE_CALLBACK] = [] + self._federated_user_may_invite_callbacks: list[ FEDERATED_USER_MAY_INVITE_CALLBACK ] = [] - self._user_may_send_3pid_invite_callbacks: List[ + self._user_may_send_3pid_invite_callbacks: list[ USER_MAY_SEND_3PID_INVITE_CALLBACK ] = [] - self._user_may_create_room_callbacks: List[USER_MAY_CREATE_ROOM_CALLBACK] = [] - self._user_may_send_state_event_callbacks: List[ + self._user_may_create_room_callbacks: list[USER_MAY_CREATE_ROOM_CALLBACK] = [] + self._user_may_send_state_event_callbacks: list[ USER_MAY_SEND_STATE_EVENT_CALLBACK ] = [] - self._user_may_create_room_alias_callbacks: List[ + self._user_may_create_room_alias_callbacks: list[ USER_MAY_CREATE_ROOM_ALIAS_CALLBACK ] = [] - self._user_may_publish_room_callbacks: List[USER_MAY_PUBLISH_ROOM_CALLBACK] = [] - self._check_username_for_spam_callbacks: List[ + self._user_may_publish_room_callbacks: list[USER_MAY_PUBLISH_ROOM_CALLBACK] = [] + self._check_username_for_spam_callbacks: list[ CHECK_USERNAME_FOR_SPAM_CALLBACK ] = [] - self._check_registration_for_spam_callbacks: List[ + self._check_registration_for_spam_callbacks: list[ CHECK_REGISTRATION_FOR_SPAM_CALLBACK ] = [] - self._check_media_file_for_spam_callbacks: List[ + self._check_media_file_for_spam_callbacks: list[ CHECK_MEDIA_FILE_FOR_SPAM_CALLBACK ] = [] - self._check_login_for_spam_callbacks: List[CHECK_LOGIN_FOR_SPAM_CALLBACK] = [] + self._check_login_for_spam_callbacks: list[CHECK_LOGIN_FOR_SPAM_CALLBACK] = [] def register_callbacks( self, @@ -471,7 +469,7 @@ def register_callbacks( @trace async def check_event_for_spam( self, event: "synapse.events.EventBase" - ) -> Union[Tuple[Codes, JsonDict], str]: + ) -> Union[tuple[Codes, JsonDict], str]: """Checks if a given event is considered "spammy" by this server. If the server considers an event spammy, then it will be rejected if @@ -561,7 +559,7 @@ async def should_drop_federated_event( async def user_may_join_room( self, user_id: str, room_id: str, is_invited: bool - ) -> Union[Tuple[Codes, JsonDict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, JsonDict], Literal["NOT_SPAM"]]: """Checks if a given users is allowed to join a room. Not called when a user creates a room. @@ -605,7 +603,7 @@ async def user_may_join_room( async def user_may_invite( self, inviter_userid: str, invitee_userid: str, room_id: str - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a given user may send an invite Args: @@ -650,7 +648,7 @@ async def user_may_invite( async def federated_user_may_invite( self, event: "synapse.events.EventBase" - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a given user may send an invite Args: @@ -691,7 +689,7 @@ async def federated_user_may_invite( async def user_may_send_3pid_invite( self, inviter_userid: str, medium: str, address: str, room_id: str - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a given user may invite a given threepid into the room Note that if the threepid is already associated with a Matrix user ID, Synapse @@ -739,7 +737,7 @@ async def user_may_send_3pid_invite( async def user_may_create_room( self, userid: str, room_config: JsonDict - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a given user may create a room Args: @@ -805,7 +803,7 @@ async def user_may_send_state_event( event_type: str, state_key: str, content: JsonDict, - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a given user may create a room with a given visibility Args: user_id: The ID of the user attempting to create a room @@ -838,7 +836,7 @@ async def user_may_send_state_event( async def user_may_create_room_alias( self, userid: str, room_alias: RoomAlias - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a given user may create a room alias Args: @@ -876,7 +874,7 @@ async def user_may_create_room_alias( async def user_may_publish_room( self, userid: str, room_id: str - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a given user may publish a room to the directory Args: @@ -964,7 +962,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], auth_provider_id: Optional[str] = None, ) -> RegistrationBehaviour: """Checks if we should allow the given registration request. @@ -1000,7 +998,7 @@ async def check_registration_for_spam( @trace async def check_media_file_for_spam( self, file_wrapper: ReadableFileWrapper, file_info: FileInfo - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a piece of newly uploaded media should be blocked. This will be called for local uploads, downloads of remote media, each @@ -1062,9 +1060,9 @@ async def check_login_for_spam( user_id: str, device_id: Optional[str], initial_display_name: Optional[str], - request_info: Collection[Tuple[Optional[str], str]], + request_info: Collection[tuple[Optional[str], str]], auth_provider_id: Optional[str] = None, - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if we should allow the given registration request. Args: diff --git a/synapse/module_api/callbacks/third_party_event_rules_callbacks.py b/synapse/module_api/callbacks/third_party_event_rules_callbacks.py index 9f7a04372d..2b886cbabb 100644 --- a/synapse/module_api/callbacks/third_party_event_rules_callbacks.py +++ b/synapse/module_api/callbacks/third_party_event_rules_callbacks.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, Any, Awaitable, Callable, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional from twisted.internet.defer import CancelledError @@ -37,7 +37,7 @@ CHECK_EVENT_ALLOWED_CALLBACK = Callable[ - [EventBase, StateMap[EventBase]], Awaitable[Tuple[bool, Optional[dict]]] + [EventBase, StateMap[EventBase]], Awaitable[tuple[bool, Optional[dict]]] ] ON_CREATE_ROOM_CALLBACK = Callable[[Requester, dict, bool], Awaitable] CHECK_THREEPID_CAN_BE_INVITED_CALLBACK = Callable[ @@ -93,7 +93,7 @@ def async_wrapper(f: Optional[Callable]) -> Optional[Callable[..., Awaitable]]: async def wrap_check_event_allowed( event: EventBase, state_events: StateMap[EventBase], - ) -> Tuple[bool, Optional[dict]]: + ) -> tuple[bool, Optional[dict]]: # Assertion required because mypy can't prove we won't change # `f` back to `None`. See # https://mypy.readthedocs.io/en/latest/common_issues.html#narrowing-and-inner-functions @@ -159,30 +159,30 @@ def __init__(self, hs: "HomeServer"): self.store = hs.get_datastores().main self._storage_controllers = hs.get_storage_controllers() - self._check_event_allowed_callbacks: List[CHECK_EVENT_ALLOWED_CALLBACK] = [] - self._on_create_room_callbacks: List[ON_CREATE_ROOM_CALLBACK] = [] - self._check_threepid_can_be_invited_callbacks: List[ + self._check_event_allowed_callbacks: list[CHECK_EVENT_ALLOWED_CALLBACK] = [] + self._on_create_room_callbacks: list[ON_CREATE_ROOM_CALLBACK] = [] + self._check_threepid_can_be_invited_callbacks: list[ CHECK_THREEPID_CAN_BE_INVITED_CALLBACK ] = [] - self._check_visibility_can_be_modified_callbacks: List[ + self._check_visibility_can_be_modified_callbacks: list[ CHECK_VISIBILITY_CAN_BE_MODIFIED_CALLBACK ] = [] - self._on_new_event_callbacks: List[ON_NEW_EVENT_CALLBACK] = [] - self._check_can_shutdown_room_callbacks: List[ + self._on_new_event_callbacks: list[ON_NEW_EVENT_CALLBACK] = [] + self._check_can_shutdown_room_callbacks: list[ CHECK_CAN_SHUTDOWN_ROOM_CALLBACK ] = [] - self._check_can_deactivate_user_callbacks: List[ + self._check_can_deactivate_user_callbacks: list[ CHECK_CAN_DEACTIVATE_USER_CALLBACK ] = [] - self._on_profile_update_callbacks: List[ON_PROFILE_UPDATE_CALLBACK] = [] - self._on_user_deactivation_status_changed_callbacks: List[ + self._on_profile_update_callbacks: list[ON_PROFILE_UPDATE_CALLBACK] = [] + self._on_user_deactivation_status_changed_callbacks: list[ ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK ] = [] - self._on_threepid_bind_callbacks: List[ON_THREEPID_BIND_CALLBACK] = [] - self._on_add_user_third_party_identifier_callbacks: List[ + self._on_threepid_bind_callbacks: list[ON_THREEPID_BIND_CALLBACK] = [] + self._on_add_user_third_party_identifier_callbacks: list[ ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK ] = [] - self._on_remove_user_third_party_identifier_callbacks: List[ + self._on_remove_user_third_party_identifier_callbacks: list[ ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK ] = [] @@ -261,7 +261,7 @@ async def check_event_allowed( self, event: EventBase, context: UnpersistedEventContextBase, - ) -> Tuple[bool, Optional[dict]]: + ) -> tuple[bool, Optional[dict]]: """Check if a provided event should be allowed in the given context. The module can return: diff --git a/synapse/notifier.py b/synapse/notifier.py index 9169f50c4d..4a75d07e37 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -25,14 +25,10 @@ Awaitable, Callable, Collection, - Dict, Iterable, - List, Literal, Mapping, Optional, - Set, - Tuple, TypeVar, Union, overload, @@ -148,7 +144,7 @@ def __init__( self.last_notified_ms = time_now_ms # Set of listeners that we need to wake up when there has been a change. - self.listeners: Set[Deferred[StreamToken]] = set() + self.listeners: set[Deferred[StreamToken]] = set() def update_and_fetch_deferreds( self, @@ -215,7 +211,7 @@ def new_listener(self, token: StreamToken) -> "Deferred[StreamToken]": @attr.s(slots=True, frozen=True, auto_attribs=True) class EventStreamResult: - events: List[Union[JsonDict, EventBase]] + events: list[Union[JsonDict, EventBase]] start_token: StreamToken end_token: StreamToken @@ -244,25 +240,25 @@ class Notifier: UNUSED_STREAM_EXPIRY_MS = 10 * 60 * 1000 def __init__(self, hs: "HomeServer"): - self.user_to_user_stream: Dict[str, _NotifierUserStream] = {} - self.room_to_user_streams: Dict[str, Set[_NotifierUserStream]] = {} + self.user_to_user_stream: dict[str, _NotifierUserStream] = {} + self.room_to_user_streams: dict[str, set[_NotifierUserStream]] = {} self.hs = hs self.server_name = hs.hostname self._storage_controllers = hs.get_storage_controllers() self.event_sources = hs.get_event_sources() self.store = hs.get_datastores().main - self.pending_new_room_events: List[_PendingRoomEventEntry] = [] + self.pending_new_room_events: list[_PendingRoomEventEntry] = [] self._replication_notifier = hs.get_replication_notifier() - self._new_join_in_room_callbacks: List[Callable[[str, str], None]] = [] + self._new_join_in_room_callbacks: list[Callable[[str, str], None]] = [] self._federation_client = hs.get_federation_http_client() self._third_party_rules = hs.get_module_api_callbacks().third_party_event_rules # List of callbacks to be notified when a lock is released - self._lock_released_callback: List[Callable[[str, str, str], None]] = [] + self._lock_released_callback: list[Callable[[str, str, str], None]] = [] self.reactor = hs.get_reactor() self.clock = hs.get_clock() @@ -283,10 +279,10 @@ def __init__(self, hs: "HomeServer"): # when rendering the metrics page, which is likely once per minute at # most when scraping it. # - # Ideally, we'd use `Mapping[Tuple[str], int]` here but mypy doesn't like it. + # Ideally, we'd use `Mapping[tuple[str], int]` here but mypy doesn't like it. # This is close enough and better than a type ignore. - def count_listeners() -> Mapping[Tuple[str, ...], int]: - all_user_streams: Set[_NotifierUserStream] = set() + def count_listeners() -> Mapping[tuple[str, ...], int]: + all_user_streams: set[_NotifierUserStream] = set() for streams in list(self.room_to_user_streams.values()): all_user_streams |= streams @@ -338,7 +334,7 @@ def add_new_join_in_room_callback(self, cb: Callable[[str, str], None]) -> None: async def on_new_room_events( self, - events_and_pos: List[Tuple[EventBase, PersistedEventPosition]], + events_and_pos: list[tuple[EventBase, PersistedEventPosition]], max_room_stream_token: RoomStreamToken, extra_users: Optional[Collection[UserID]] = None, ) -> None: @@ -373,7 +369,7 @@ async def on_un_partial_stated_room( time_now_ms = self.clock.time_msec() current_token = self.event_sources.get_current_token() - listeners: List["Deferred[StreamToken]"] = [] + listeners: list["Deferred[StreamToken]"] = [] for user_stream in user_streams: try: listeners.extend( @@ -397,7 +393,7 @@ async def on_un_partial_stated_room( async def notify_new_room_events( self, - event_entries: List[Tuple[_PendingRoomEventEntry, str]], + event_entries: list[tuple[_PendingRoomEventEntry, str]], max_room_stream_token: RoomStreamToken, ) -> None: """Used by handlers to inform the notifier something has happened @@ -453,8 +449,8 @@ def _notify_pending_new_room_events( pending = self.pending_new_room_events self.pending_new_room_events = [] - users: Set[UserID] = set() - rooms: Set[str] = set() + users: set[UserID] = set() + rooms: set[str] = set() for entry in pending: if entry.event_pos.persisted_after(max_room_stream_token): @@ -560,7 +556,7 @@ def on_new_event( users = users or [] rooms = rooms or [] - user_streams: Set[_NotifierUserStream] = set() + user_streams: set[_NotifierUserStream] = set() log_kv( { @@ -593,7 +589,7 @@ def on_new_event( time_now_ms = self.clock.time_msec() current_token = self.event_sources.get_current_token() - listeners: List["Deferred[StreamToken]"] = [] + listeners: list["Deferred[StreamToken]"] = [] for user_stream in user_streams: try: listeners.extend( @@ -771,7 +767,7 @@ async def check_for_updates( # The events fetched from each source are a JsonDict, EventBase, or # UserPresenceState, but see below for UserPresenceState being # converted to JsonDict. - events: List[Union[JsonDict, EventBase]] = [] + events: list[Union[JsonDict, EventBase]] = [] end_token = from_token for keyname, source in self.event_sources.sources.get_sources(): @@ -871,7 +867,7 @@ async def wait_for_stream_token(self, stream_token: StreamToken) -> bool: async def _get_room_ids( self, user: UserID, explicit_room_id: Optional[str] - ) -> Tuple[StrCollection, bool]: + ) -> tuple[StrCollection, bool]: joined_room_ids = await self.store.get_rooms_for_user(user.to_string()) if explicit_room_id: if explicit_room_id in joined_room_ids: @@ -960,7 +956,7 @@ class ReplicationNotifier: This is separate from the notifier to avoid circular dependencies. """ - _replication_callbacks: List[Callable[[], None]] = attr.Factory(list) + _replication_callbacks: list[Callable[[], None]] = attr.Factory(list) def add_replication_callback(self, cb: Callable[[], None]) -> None: """Add a callback that will be called when some new data is available. diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py index 7bc99bd785..552af8e14a 100644 --- a/synapse/push/__init__.py +++ b/synapse/push/__init__.py @@ -94,7 +94,7 @@ """ import abc -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any, Optional import attr @@ -131,7 +131,7 @@ class PusherConfig: # while the "set_device_id_for_pushers" background update is running. access_token: Optional[int] - def as_dict(self) -> Dict[str, Any]: + def as_dict(self) -> dict[str, Any]: """Information that can be retrieved about a pusher after creation.""" return { "app_display_name": self.app_display_name, diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index ea9169aef0..9fcd7fdc6e 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -24,13 +24,9 @@ TYPE_CHECKING, Any, Collection, - Dict, - FrozenSet, - List, Mapping, Optional, Sequence, - Tuple, Union, cast, ) @@ -237,7 +233,7 @@ async def _get_power_levels_and_sender_level( event: EventBase, context: EventContext, event_id_to_event: Mapping[str, EventBase], - ) -> Tuple[dict, Optional[int]]: + ) -> tuple[dict, Optional[int]]: """ Given an event and an event context, get the power level event relevant to the event and the power level of the sender of the event. @@ -309,13 +305,13 @@ async def _get_power_levels_and_sender_level( async def _related_events( self, event: EventBase - ) -> Dict[str, Dict[str, JsonValue]]: + ) -> dict[str, dict[str, JsonValue]]: """Fetches the related events for 'event'. Sets the im.vector.is_falling_back key if the event is from a fallback relation Returns: Mapping of relation type to flattened events. """ - related_events: Dict[str, Dict[str, JsonValue]] = {} + related_events: dict[str, dict[str, JsonValue]] = {} if self._related_event_match_enabled: related_event_id = event.content.get("m.relates_to", {}).get("event_id") relation_type = event.content.get("m.relates_to", {}).get("rel_type") @@ -352,7 +348,7 @@ async def _related_events( return related_events async def action_for_events_by_user( - self, events_and_context: List[EventPersistencePair] + self, events_and_context: list[EventPersistencePair] ) -> None: """Given a list of events and their associated contexts, evaluate the push rules for each event, check if the message should increment the unread count, and @@ -394,7 +390,7 @@ async def _action_for_event_by_user( count_as_unread = _should_count_as_unread(event, context) rules_by_user = await self._get_rules_for_event(event) - actions_by_user: Dict[str, Collection[Union[Mapping, str]]] = {} + actions_by_user: dict[str, Collection[Union[Mapping, str]]] = {} # Gather a bunch of info in parallel. # @@ -409,7 +405,7 @@ async def _action_for_event_by_user( profiles, ) = await make_deferred_yieldable( cast( - "Deferred[Tuple[int, Tuple[dict, Optional[int]], Dict[str, Dict[str, JsonValue]], Mapping[str, ProfileInfo]]]", + "Deferred[tuple[int, tuple[dict, Optional[int]], dict[str, dict[str, JsonValue]], Mapping[str, ProfileInfo]]]", gather_results( ( run_in_background( # type: ignore[call-overload] @@ -481,7 +477,7 @@ async def _action_for_event_by_user( self.hs.config.experimental.msc4306_enabled, ) - msc4306_thread_subscribers: Optional[FrozenSet[str]] = None + msc4306_thread_subscribers: Optional[frozenset[str]] = None if self.hs.config.experimental.msc4306_enabled and thread_id != MAIN_TIMELINE: # pull out, in batch, all local subscribers to this thread # (in the common case, they will all be getting processed for push @@ -556,9 +552,9 @@ async def _action_for_event_by_user( ) -MemberMap = Dict[str, Optional[EventIdMembership]] -Rule = Dict[str, dict] -RulesByUser = Dict[str, List[Rule]] +MemberMap = dict[str, Optional[EventIdMembership]] +Rule = dict[str, dict] +RulesByUser = dict[str, list[Rule]] StateGroup = Union[object, int] @@ -572,9 +568,9 @@ def _is_simple_value(value: Any) -> bool: def _flatten_dict( d: Union[EventBase, Mapping[str, Any]], - prefix: Optional[List[str]] = None, - result: Optional[Dict[str, JsonValue]] = None, -) -> Dict[str, JsonValue]: + prefix: Optional[list[str]] = None, + result: Optional[dict[str, JsonValue]] = None, +) -> dict[str, JsonValue]: """ Given a JSON dictionary (or event) which might contain sub dictionaries, flatten it into a single layer dictionary by combining the keys & sub-keys. diff --git a/synapse/push/clientformat.py b/synapse/push/clientformat.py index 4f647491f1..fd1758db9d 100644 --- a/synapse/push/clientformat.py +++ b/synapse/push/clientformat.py @@ -20,7 +20,7 @@ # import copy -from typing import Any, Dict, List, Optional +from typing import Any, Optional from synapse.push.rulekinds import PRIORITY_CLASS_INVERSE_MAP, PRIORITY_CLASS_MAP from synapse.synapse_rust.push import FilteredPushRules, PushRule @@ -29,11 +29,11 @@ def format_push_rules_for_user( user: UserID, ruleslist: FilteredPushRules -) -> Dict[str, Dict[str, List[Dict[str, Any]]]]: +) -> dict[str, dict[str, list[dict[str, Any]]]]: """Converts a list of rawrules and a enabled map into nested dictionaries to match the Matrix client-server format for push rules""" - rules: Dict[str, Dict[str, List[Dict[str, Any]]]] = {"global": {}} + rules: dict[str, dict[str, list[dict[str, Any]]]] = {"global": {}} rules["global"] = _add_empty_priority_class_arrays(rules["global"]) @@ -70,7 +70,7 @@ def format_push_rules_for_user( return rules -def _convert_type_to_value(rule_or_cond: Dict[str, Any], user: UserID) -> None: +def _convert_type_to_value(rule_or_cond: dict[str, Any], user: UserID) -> None: for type_key in ("pattern", "value"): type_value = rule_or_cond.pop(f"{type_key}_type", None) if type_value == "user_id": @@ -79,14 +79,14 @@ def _convert_type_to_value(rule_or_cond: Dict[str, Any], user: UserID) -> None: rule_or_cond[type_key] = user.localpart -def _add_empty_priority_class_arrays(d: Dict[str, list]) -> Dict[str, list]: +def _add_empty_priority_class_arrays(d: dict[str, list]) -> dict[str, list]: for pc in PRIORITY_CLASS_MAP.keys(): d[pc] = [] return d -def _rule_to_template(rule: PushRule) -> Optional[Dict[str, Any]]: - templaterule: Dict[str, Any] +def _rule_to_template(rule: PushRule) -> Optional[dict[str, Any]]: + templaterule: dict[str, Any] unscoped_rule_id = _rule_id_from_namespaced(rule.rule_id) diff --git a/synapse/push/emailpusher.py b/synapse/push/emailpusher.py index 1484bc8fc0..83823c2284 100644 --- a/synapse/push/emailpusher.py +++ b/synapse/push/emailpusher.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Dict, List, Optional +from typing import TYPE_CHECKING, Optional from twisted.internet.error import AlreadyCalled, AlreadyCancelled from twisted.internet.interfaces import IDelayedCall @@ -71,7 +71,7 @@ def __init__(self, hs: "HomeServer", pusher_config: PusherConfig, mailer: Mailer self.store = self.hs.get_datastores().main self.email = pusher_config.pushkey self.timed_call: Optional[IDelayedCall] = None - self.throttle_params: Dict[str, ThrottleParams] = {} + self.throttle_params: dict[str, ThrottleParams] = {} self._inited = False self._is_processing = False @@ -324,7 +324,7 @@ async def sent_notif_update_throttle( ) async def send_notification( - self, push_actions: List[EmailPushAction], reason: EmailReason + self, push_actions: list[EmailPushAction], reason: EmailReason ) -> None: logger.info("Sending notif email for user %r", self.user_id) diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py index 5cac5de8cb..8df106b859 100644 --- a/synapse/push/httppusher.py +++ b/synapse/push/httppusher.py @@ -21,7 +21,7 @@ import logging import random import urllib.parse -from typing import TYPE_CHECKING, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Optional, Union from prometheus_client import Counter @@ -68,7 +68,7 @@ ) -def tweaks_for_actions(actions: List[Union[str, Dict]]) -> JsonMapping: +def tweaks_for_actions(actions: list[Union[str, dict]]) -> JsonMapping: """ Converts a list of actions into a `tweaks` dict (which can then be passed to the push gateway). @@ -396,7 +396,7 @@ async def dispatch_push( content: JsonDict, tweaks: Optional[JsonMapping] = None, default_payload: Optional[JsonMapping] = None, - ) -> Union[bool, List[str]]: + ) -> Union[bool, list[str]]: """Send a notification to the registered push gateway, with `content` being the content of the `notification` top property specified in the spec. Note that the `devices` property will be added with device-specific @@ -453,7 +453,7 @@ async def dispatch_push_event( event: EventBase, tweaks: JsonMapping, badge: int, - ) -> Union[bool, List[str]]: + ) -> Union[bool, list[str]]: """Send a notification to the registered push gateway by building it from an event. diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index d76cc8237b..3dac61aed5 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -21,7 +21,7 @@ import logging import urllib.parse -from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, TypeVar +from typing import TYPE_CHECKING, Iterable, Optional, TypeVar import bleach import jinja2 @@ -287,7 +287,7 @@ async def send_notification_mail( notif_events = await self.store.get_events([pa.event_id for pa in push_actions]) - notifs_by_room: Dict[str, List[EmailPushAction]] = {} + notifs_by_room: dict[str, list[EmailPushAction]] = {} for pa in push_actions: notifs_by_room.setdefault(pa.room_id, []).append(pa) @@ -317,7 +317,7 @@ async def _fetch_room_state(room_id: str) -> None: # actually sort our so-called rooms_in_order list, most recent room first rooms_in_order.sort(key=lambda r: -(notifs_by_room[r][-1].received_ts or 0)) - rooms: List[RoomVars] = [] + rooms: list[RoomVars] = [] for r in rooms_in_order: roomvars = await self._get_room_vars( @@ -417,7 +417,7 @@ async def _get_room_vars( room_id: str, user_id: str, notifs: Iterable[EmailPushAction], - notif_events: Dict[str, EventBase], + notif_events: dict[str, EventBase], room_state_ids: StateMap[str], ) -> RoomVars: """ @@ -665,9 +665,9 @@ def _add_image_message_vars( async def _make_summary_text_single_room( self, room_id: str, - notifs: List[EmailPushAction], + notifs: list[EmailPushAction], room_state_ids: StateMap[str], - notif_events: Dict[str, EventBase], + notif_events: dict[str, EventBase], user_id: str, ) -> str: """ @@ -781,9 +781,9 @@ async def _make_summary_text_single_room( async def _make_summary_text( self, - notifs_by_room: Dict[str, List[EmailPushAction]], - room_state_ids: Dict[str, StateMap[str]], - notif_events: Dict[str, EventBase], + notifs_by_room: dict[str, list[EmailPushAction]], + room_state_ids: dict[str, StateMap[str]], + notif_events: dict[str, EventBase], reason: EmailReason, ) -> str: """ @@ -814,9 +814,9 @@ async def _make_summary_text( async def _make_summary_text_from_member_events( self, room_id: str, - notifs: List[EmailPushAction], + notifs: list[EmailPushAction], room_state_ids: StateMap[str], - notif_events: Dict[str, EventBase], + notif_events: dict[str, EventBase], ) -> str: """ Make a summary text for the email when only a single room has notifications. @@ -995,7 +995,7 @@ def safe_text(raw_text: str) -> Markup: ) -def deduped_ordered_list(it: Iterable[T]) -> List[T]: +def deduped_ordered_list(it: Iterable[T]) -> list[T]: seen = set() ret = [] for item in it: diff --git a/synapse/push/presentable_names.py b/synapse/push/presentable_names.py index 1faa57e9f5..2f32e18b9a 100644 --- a/synapse/push/presentable_names.py +++ b/synapse/push/presentable_names.py @@ -21,7 +21,7 @@ import logging import re -from typing import TYPE_CHECKING, Dict, Iterable, Optional +from typing import TYPE_CHECKING, Iterable, Optional from synapse.api.constants import EventTypes, Membership from synapse.events import EventBase @@ -205,8 +205,8 @@ def name_from_member_event(member_event: EventBase) -> str: return member_event.state_key -def _state_as_two_level_dict(state: StateMap[str]) -> Dict[str, Dict[str, str]]: - ret: Dict[str, Dict[str, str]] = {} +def _state_as_two_level_dict(state: StateMap[str]) -> dict[str, dict[str, str]]: + ret: dict[str, dict[str, str]] = {} for k, v in state.items(): ret.setdefault(k[0], {})[k[1]] = v return ret diff --git a/synapse/push/push_tools.py b/synapse/push/push_tools.py index 3f3e4a9234..8e2ff2bcb4 100644 --- a/synapse/push/push_tools.py +++ b/synapse/push/push_tools.py @@ -18,7 +18,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict from synapse.api.constants import EventTypes, Membership from synapse.events import EventBase @@ -56,8 +55,8 @@ async def get_badge_count(store: DataStore, user_id: str, group_by_room: bool) - async def get_context_for_event( storage: StorageControllers, ev: EventBase, user_id: str -) -> Dict[str, str]: - ctx: Dict[str, str] = {} +) -> dict[str, str]: + ctx: dict[str, str] = {} if ev.internal_metadata.outlier: # We don't have state for outliers, so we can't compute the context diff --git a/synapse/push/push_types.py b/synapse/push/push_types.py index 57fa926a46..e1678cd717 100644 --- a/synapse/push/push_types.py +++ b/synapse/push/push_types.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional, TypedDict +from typing import Optional, TypedDict class EmailReason(TypedDict, total=False): @@ -91,7 +91,7 @@ class NotifVars(TypedDict): link: str ts: Optional[int] - messages: List[MessageVars] + messages: list[MessageVars] class RoomVars(TypedDict): @@ -110,7 +110,7 @@ class RoomVars(TypedDict): title: Optional[str] hash: int invite: bool - notifs: List[NotifVars] + notifs: list[NotifVars] link: str avatar_url: Optional[str] @@ -137,5 +137,5 @@ class TemplateVars(TypedDict, total=False): user_display_name: str unsubscribe_link: str summary_text: str - rooms: List[RoomVars] + rooms: list[RoomVars] reason: EmailReason diff --git a/synapse/push/pusher.py b/synapse/push/pusher.py index 9a5dd7a9d4..17238c95c0 100644 --- a/synapse/push/pusher.py +++ b/synapse/push/pusher.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Callable, Dict, Optional +from typing import TYPE_CHECKING, Callable, Optional from synapse.push import Pusher, PusherConfig from synapse.push.emailpusher import EmailPusher @@ -38,13 +38,13 @@ def __init__(self, hs: "HomeServer"): self.hs = hs self.config = hs.config - self.pusher_types: Dict[str, Callable[[HomeServer, PusherConfig], Pusher]] = { + self.pusher_types: dict[str, Callable[[HomeServer, PusherConfig], Pusher]] = { "http": HttpPusher } logger.info("email enable notifs: %r", hs.config.email.email_enable_notifs) if hs.config.email.email_enable_notifs: - self.mailers: Dict[str, Mailer] = {} + self.mailers: dict[str, Mailer] = {} self._notif_template_html = hs.config.email.email_notif_template_html self._notif_template_text = hs.config.email.email_notif_template_text diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index 977c55b683..6b70de976a 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Dict, Iterable, Optional +from typing import TYPE_CHECKING, Iterable, Optional from prometheus_client import Gauge @@ -100,7 +100,7 @@ def __init__(self, hs: "HomeServer"): self._last_room_stream_id_seen = self.store.get_room_max_stream_ordering() # map from user id to app_id:pushkey to pusher - self.pushers: Dict[str, Dict[str, Pusher]] = {} + self.pushers: dict[str, dict[str, Pusher]] = {} self._account_validity_handler = hs.get_account_validity_handler() diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index 0850a99e0c..d76b40cf39 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -23,7 +23,7 @@ import re import urllib.parse from inspect import signature -from typing import TYPE_CHECKING, Any, Awaitable, Callable, ClassVar, Dict, List, Tuple +from typing import TYPE_CHECKING, Any, Awaitable, Callable, ClassVar from prometheus_client import Counter, Gauge @@ -112,7 +112,7 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta): """ NAME: str = abc.abstractproperty() # type: ignore - PATH_ARGS: Tuple[str, ...] = abc.abstractproperty() # type: ignore + PATH_ARGS: tuple[str, ...] = abc.abstractproperty() # type: ignore METHOD = "POST" CACHE = True RETRY_ON_TIMEOUT = True @@ -187,7 +187,7 @@ async def _serialize_payload(**kwargs) -> JsonDict: @abc.abstractmethod async def _handle_request( self, request: Request, content: JsonDict, **kwargs: Any - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """Handle incoming request. This is called with the request object and PATH_ARGS. @@ -292,7 +292,7 @@ async def send_request( "/".join(url_args), ) - headers: Dict[bytes, List[bytes]] = {} + headers: dict[bytes, list[bytes]] = {} # Add an authorization header, if configured. if replication_secret: headers[b"Authorization"] = [b"Bearer " + replication_secret] @@ -403,7 +403,7 @@ def register(self, http_server: HttpServer) -> None: async def _check_auth_and_handle( self, request: SynapseRequest, **kwargs: Any - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """Called on new incoming requests when caching is enabled. Checks if there is a cached response for the request and returns that, otherwise calls `_handle_request` and caches its response. diff --git a/synapse/replication/http/account_data.py b/synapse/replication/http/account_data.py index b6eac153ba..560973b916 100644 --- a/synapse/replication/http/account_data.py +++ b/synapse/replication/http/account_data.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -68,7 +68,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str, account_data_type: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: max_stream_id = await self.handler.add_account_data_for_user( user_id, account_data_type, content["content"] ) @@ -106,7 +106,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str, account_data_type: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: max_stream_id = await self.handler.remove_account_data_for_user( user_id, account_data_type ) @@ -153,7 +153,7 @@ async def _handle_request( # type: ignore[override] user_id: str, room_id: str, account_data_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: max_stream_id = await self.handler.add_account_data_to_room( user_id, room_id, account_data_type, content["content"] ) @@ -196,7 +196,7 @@ async def _handle_request( # type: ignore[override] user_id: str, room_id: str, account_data_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: max_stream_id = await self.handler.remove_account_data_for_room( user_id, room_id, account_data_type ) @@ -238,7 +238,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str, room_id: str, tag: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: max_stream_id = await self.handler.add_tag_to_room( user_id, room_id, tag, content["content"] ) @@ -276,7 +276,7 @@ async def _serialize_payload(user_id: str, room_id: str, tag: str) -> JsonDict: async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str, room_id: str, tag: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: max_stream_id = await self.handler.remove_tag_from_room( user_id, room_id, diff --git a/synapse/replication/http/deactivate_account.py b/synapse/replication/http/deactivate_account.py index 89658350a5..82df1e1322 100644 --- a/synapse/replication/http/deactivate_account.py +++ b/synapse/replication/http/deactivate_account.py @@ -19,7 +19,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -69,7 +69,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: by_admin = content["by_admin"] await self.deactivate_account_handler.notify_account_deactivated( user_id, by_admin=by_admin diff --git a/synapse/replication/http/delayed_events.py b/synapse/replication/http/delayed_events.py index 229022070c..e448ac32bf 100644 --- a/synapse/replication/http/delayed_events.py +++ b/synapse/replication/http/delayed_events.py @@ -13,7 +13,7 @@ # import logging -from typing import TYPE_CHECKING, Dict, Optional, Tuple +from typing import TYPE_CHECKING, Optional from twisted.web.server import Request @@ -52,7 +52,7 @@ async def _serialize_payload(next_send_ts: int) -> JsonDict: # type: ignore[ove async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict - ) -> Tuple[int, Dict[str, Optional[JsonMapping]]]: + ) -> tuple[int, dict[str, Optional[JsonMapping]]]: self.handler.on_added(int(content["next_send_ts"])) return 200, {} diff --git a/synapse/replication/http/devices.py b/synapse/replication/http/devices.py index 94981e22eb..2fadee8a06 100644 --- a/synapse/replication/http/devices.py +++ b/synapse/replication/http/devices.py @@ -19,7 +19,7 @@ # import logging -from typing import TYPE_CHECKING, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from twisted.web.server import Request @@ -59,13 +59,13 @@ def __init__(self, hs: "HomeServer"): @staticmethod async def _serialize_payload( # type: ignore[override] - user_id: str, device_ids: List[str] + user_id: str, device_ids: list[str] ) -> JsonDict: return {"device_ids": device_ids} async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: device_ids = content["device_ids"] span = active_span() @@ -102,12 +102,12 @@ def __init__(self, hs: "HomeServer"): self.clock = hs.get_clock() @staticmethod - async def _serialize_payload(from_user_id: str, user_ids: List[str]) -> JsonDict: # type: ignore[override] + async def _serialize_payload(from_user_id: str, user_ids: list[str]) -> JsonDict: # type: ignore[override] return {"user_ids": user_ids} async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, from_user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: user_ids = content["user_ids"] span = active_span() @@ -165,13 +165,13 @@ def __init__(self, hs: "HomeServer"): self.clock = hs.get_clock() @staticmethod - async def _serialize_payload(user_ids: List[str]) -> JsonDict: # type: ignore[override] + async def _serialize_payload(user_ids: list[str]) -> JsonDict: # type: ignore[override] return {"user_ids": user_ids} async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict - ) -> Tuple[int, Dict[str, Optional[JsonMapping]]]: - user_ids: List[str] = content["user_ids"] + ) -> tuple[int, dict[str, Optional[JsonMapping]]]: + user_ids: list[str] = content["user_ids"] logger.info("Resync for %r", user_ids) span = active_span() @@ -210,7 +210,7 @@ async def _serialize_payload() -> JsonDict: # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.device_handler.handle_new_device_update() return 200, {} @@ -241,7 +241,7 @@ async def _serialize_payload(room_id: str) -> JsonDict: # type: ignore[override async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.device_handler.handle_room_un_partial_stated(room_id) return 200, {} diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py index 1e302ef59f..448a1f8a71 100644 --- a/synapse/replication/http/federation.py +++ b/synapse/replication/http/federation.py @@ -19,7 +19,7 @@ # import logging -from typing import TYPE_CHECKING, List, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -86,7 +86,7 @@ def __init__(self, hs: "HomeServer"): async def _serialize_payload( # type: ignore[override] store: "DataStore", room_id: str, - event_and_contexts: List[EventPersistencePair], + event_and_contexts: list[EventPersistencePair], backfilled: bool, ) -> JsonDict: """ @@ -122,7 +122,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: with Measure( self.clock, name="repl_fed_send_events_parse", server_name=self.server_name ): @@ -194,7 +194,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, edu_type: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: origin = content["origin"] edu_content = content["content"] @@ -243,7 +243,7 @@ async def _serialize_payload(query_type: str, args: JsonDict) -> JsonDict: # ty async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, query_type: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: args = content["args"] args["origin"] = content["origin"] @@ -285,7 +285,7 @@ async def _serialize_payload(room_id: str) -> JsonDict: # type: ignore[override async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.store.clean_room_for_join(room_id) return 200, {} @@ -320,7 +320,7 @@ async def _serialize_payload(room_id: str, room_version: RoomVersion) -> JsonDic async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: room_version = KNOWN_ROOM_VERSIONS[content["room_version"]] await self.store.maybe_store_room_on_outlier_membership(room_id, room_version) return 200, {} diff --git a/synapse/replication/http/login.py b/synapse/replication/http/login.py index 8b5b7f755a..0022e12eac 100644 --- a/synapse/replication/http/login.py +++ b/synapse/replication/http/login.py @@ -19,7 +19,7 @@ # import logging -from typing import TYPE_CHECKING, Optional, Tuple, cast +from typing import TYPE_CHECKING, Optional, cast from twisted.web.server import Request @@ -79,7 +79,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: device_id = content["device_id"] initial_display_name = content["initial_display_name"] is_guest = content["is_guest"] diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py index fc66039b2f..0e588037b6 100644 --- a/synapse/replication/http/membership.py +++ b/synapse/replication/http/membership.py @@ -18,7 +18,7 @@ # # import logging -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from twisted.web.server import Request @@ -63,7 +63,7 @@ async def _serialize_payload( # type: ignore[override] requester: Requester, room_id: str, user_id: str, - remote_room_hosts: List[str], + remote_room_hosts: list[str], content: JsonDict, ) -> JsonDict: """ @@ -85,7 +85,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: SynapseRequest, content: JsonDict, room_id: str, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: remote_room_hosts = content["remote_room_hosts"] event_content = content["content"] @@ -130,7 +130,7 @@ async def _serialize_payload( # type: ignore[override] requester: Requester, room_id: str, user_id: str, - remote_room_hosts: List[str], + remote_room_hosts: list[str], content: JsonDict, ) -> JsonDict: """ @@ -149,7 +149,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: SynapseRequest, content: JsonDict, room_id: str, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: remote_room_hosts = content["remote_room_hosts"] event_content = content["content"] @@ -215,7 +215,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: SynapseRequest, content: JsonDict, invite_event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: txn_id = content["txn_id"] event_content = content["content"] @@ -279,7 +279,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: SynapseRequest, content: JsonDict, knock_event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: txn_id = content["txn_id"] event_content = content["content"] @@ -343,7 +343,7 @@ async def _handle_request( # type: ignore[override] room_id: str, user_id: str, change: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: logger.info("user membership change: %s in %s", user_id, room_id) user = UserID.from_string(user_id) diff --git a/synapse/replication/http/presence.py b/synapse/replication/http/presence.py index 8a3f3b0e67..4a894b0221 100644 --- a/synapse/replication/http/presence.py +++ b/synapse/replication/http/presence.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from twisted.web.server import Request @@ -63,7 +63,7 @@ async def _serialize_payload(user_id: str, device_id: Optional[str]) -> JsonDict async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self._presence_handler.bump_presence_active_time( UserID.from_string(user_id), content.get("device_id") ) @@ -116,7 +116,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self._presence_handler.set_state( UserID.from_string(user_id), content.get("device_id"), diff --git a/synapse/replication/http/push.py b/synapse/replication/http/push.py index 6e20a208b6..905414b5ee 100644 --- a/synapse/replication/http/push.py +++ b/synapse/replication/http/push.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -68,7 +68,7 @@ async def _serialize_payload(app_id: str, pushkey: str, user_id: str) -> JsonDic async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: app_id = content["app_id"] pushkey = content["pushkey"] @@ -110,7 +110,7 @@ async def _handle_request( # type: ignore[override] user_id: str, old_room_id: str, new_room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self._store.copy_push_rules_from_room_to_room_for_user( old_room_id, new_room_id, user_id ) @@ -144,7 +144,7 @@ async def _serialize_payload(user_id: str) -> JsonDict: # type: ignore[override async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self._store.delete_all_pushers_for_user(user_id) return 200, {} diff --git a/synapse/replication/http/register.py b/synapse/replication/http/register.py index 27d3504c3c..780fcc463a 100644 --- a/synapse/replication/http/register.py +++ b/synapse/replication/http/register.py @@ -19,7 +19,7 @@ # import logging -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from twisted.web.server import Request @@ -104,7 +104,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.registration_handler.check_registration_ratelimit(content["address"]) # Always default admin users to approved (since it means they were created by @@ -156,7 +156,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: auth_result = content["auth_result"] access_token = content["access_token"] diff --git a/synapse/replication/http/send_events.py b/synapse/replication/http/send_events.py index 6b1a5a9956..b020a0fe7c 100644 --- a/synapse/replication/http/send_events.py +++ b/synapse/replication/http/send_events.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, List, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -85,11 +85,11 @@ def __init__(self, hs: "HomeServer"): @staticmethod async def _serialize_payload( # type: ignore[override] - events_and_context: List[EventPersistencePair], + events_and_context: list[EventPersistencePair], store: "DataStore", requester: Requester, ratelimit: bool, - extra_users: List[UserID], + extra_users: list[UserID], ) -> JsonDict: """ Args: @@ -122,7 +122,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, payload: JsonDict - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: with Measure( self.clock, name="repl_send_events_parse", server_name=self.server_name ): diff --git a/synapse/replication/http/state.py b/synapse/replication/http/state.py index 3ec4ca5de3..823d330041 100644 --- a/synapse/replication/http/state.py +++ b/synapse/replication/http/state.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -65,7 +65,7 @@ async def _serialize_payload(room_id: str) -> JsonDict: # type: ignore[override async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: writer_instance = self._events_shard_config.get_instance(room_id) if writer_instance != self._instance_name: raise SynapseError( diff --git a/synapse/replication/http/streams.py b/synapse/replication/http/streams.py index 61f70d5790..42e78c976f 100644 --- a/synapse/replication/http/streams.py +++ b/synapse/replication/http/streams.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -79,7 +79,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, stream_name: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: stream = self.streams.get(stream_name) if stream is None: raise SynapseError(400, "Unknown stream") diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py index f2561bc0c5..f9605407af 100644 --- a/synapse/replication/tcp/client.py +++ b/synapse/replication/tcp/client.py @@ -21,7 +21,7 @@ """A replication client for use by synapse workers.""" import logging -from typing import TYPE_CHECKING, Dict, Iterable, Optional, Set, Tuple +from typing import TYPE_CHECKING, Iterable, Optional from sortedcontainers import SortedList @@ -95,8 +95,8 @@ def __init__(self, hs: "HomeServer"): # Map from stream and instance to list of deferreds waiting for the stream to # arrive at a particular position. The lists are sorted by stream position. - self._streams_to_waiters: Dict[ - Tuple[str, str], SortedList[Tuple[int, Deferred]] + self._streams_to_waiters: dict[ + tuple[str, str], SortedList[tuple[int, Deferred]] ] = {} async def on_rdata( @@ -113,7 +113,7 @@ async def on_rdata( token: stream token for this batch of rows rows: a list of Stream.ROW_TYPE objects as returned by Stream.parse_row. """ - all_room_ids: Set[str] = set() + all_room_ids: set[str] = set() if stream_name == DeviceListsStream.NAME: if any(not row.is_signature and not row.hosts_calculated for row in rows): # This only uses the minimum stream position on the device lists @@ -200,7 +200,7 @@ async def on_rdata( if row.data.rejected: continue - extra_users: Tuple[UserID, ...] = () + extra_users: tuple[UserID, ...] = () if row.data.type == EventTypes.Member and row.data.state_key: extra_users = (UserID.from_string(row.data.state_key),) diff --git a/synapse/replication/tcp/commands.py b/synapse/replication/tcp/commands.py index 8eec68c3dd..f115cc4db9 100644 --- a/synapse/replication/tcp/commands.py +++ b/synapse/replication/tcp/commands.py @@ -26,7 +26,7 @@ import abc import logging -from typing import List, Optional, Tuple, Type, TypeVar +from typing import Optional, TypeVar from synapse.replication.tcp.streams._base import StreamRow from synapse.util.json import json_decoder, json_encoder @@ -49,7 +49,7 @@ class Command(metaclass=abc.ABCMeta): @classmethod @abc.abstractmethod - def from_line(cls: Type[T], line: str) -> T: + def from_line(cls: type[T], line: str) -> T: """Deserialises a line from the wire into this command. `line` does not include the command. """ @@ -88,7 +88,7 @@ def __init__(self, data: str): self.data = data @classmethod - def from_line(cls: Type[SC], line: str) -> SC: + def from_line(cls: type[SC], line: str) -> SC: return cls(line) def to_line(self) -> str: @@ -145,7 +145,7 @@ def __init__( self.row = row @classmethod - def from_line(cls: Type["RdataCommand"], line: str) -> "RdataCommand": + def from_line(cls: type["RdataCommand"], line: str) -> "RdataCommand": stream_name, instance_name, token, row_json = line.split(" ", 3) return cls( stream_name, @@ -204,7 +204,7 @@ def __init__( self.new_token = new_token @classmethod - def from_line(cls: Type["PositionCommand"], line: str) -> "PositionCommand": + def from_line(cls: type["PositionCommand"], line: str) -> "PositionCommand": stream_name, instance_name, prev_token, new_token = line.split(" ", 3) return cls(stream_name, instance_name, int(prev_token), int(new_token)) @@ -249,7 +249,7 @@ class ReplicateCommand(Command): REPLICATE """ - __slots__: List[str] = [] + __slots__: list[str] = [] NAME = "REPLICATE" @@ -257,7 +257,7 @@ def __init__(self) -> None: pass @classmethod - def from_line(cls: Type[T], line: str) -> T: + def from_line(cls: type[T], line: str) -> T: return cls() def to_line(self) -> str: @@ -299,7 +299,7 @@ def __init__( self.last_sync_ms = last_sync_ms @classmethod - def from_line(cls: Type["UserSyncCommand"], line: str) -> "UserSyncCommand": + def from_line(cls: type["UserSyncCommand"], line: str) -> "UserSyncCommand": device_id: Optional[str] instance_id, user_id, device_id, state, last_sync_ms = line.split(" ", 4) @@ -343,7 +343,7 @@ def __init__(self, instance_id: str): @classmethod def from_line( - cls: Type["ClearUserSyncsCommand"], line: str + cls: type["ClearUserSyncsCommand"], line: str ) -> "ClearUserSyncsCommand": return cls(line) @@ -373,7 +373,7 @@ def __init__(self, instance_name: str, token: int): @classmethod def from_line( - cls: Type["FederationAckCommand"], line: str + cls: type["FederationAckCommand"], line: str ) -> "FederationAckCommand": instance_name, token = line.split(" ") return cls(instance_name, int(token)) @@ -418,7 +418,7 @@ def __init__( self.last_seen = last_seen @classmethod - def from_line(cls: Type["UserIpCommand"], line: str) -> "UserIpCommand": + def from_line(cls: type["UserIpCommand"], line: str) -> "UserIpCommand": user_id, jsn = line.split(" ", 1) access_token, ip, user_agent, device_id, last_seen = json_decoder.decode(jsn) @@ -485,7 +485,7 @@ def __init__( self.lock_key = lock_key @classmethod - def from_line(cls: Type["LockReleasedCommand"], line: str) -> "LockReleasedCommand": + def from_line(cls: type["LockReleasedCommand"], line: str) -> "LockReleasedCommand": instance_name, lock_name, lock_key = json_decoder.decode(line) return cls(instance_name, lock_name, lock_key) @@ -505,7 +505,7 @@ class NewActiveTaskCommand(_SimpleCommand): NAME = "NEW_ACTIVE_TASK" -_COMMANDS: Tuple[Type[Command], ...] = ( +_COMMANDS: tuple[type[Command], ...] = ( ServerCommand, RdataCommand, PositionCommand, diff --git a/synapse/replication/tcp/handler.py b/synapse/replication/tcp/handler.py index 4d0d3d44ab..bd1ee5ff9d 100644 --- a/synapse/replication/tcp/handler.py +++ b/synapse/replication/tcp/handler.py @@ -20,18 +20,14 @@ # # import logging +from collections import deque from typing import ( TYPE_CHECKING, Any, Awaitable, - Deque, - Dict, Iterable, Iterator, - List, Optional, - Set, - Tuple, TypeVar, Union, ) @@ -119,8 +115,8 @@ # the type of the entries in _command_queues_by_stream -_StreamCommandQueue = Deque[ - Tuple[Union[RdataCommand, PositionCommand], IReplicationConnection] +_StreamCommandQueue = deque[ + tuple[Union[RdataCommand, PositionCommand], IReplicationConnection] ] @@ -141,18 +137,18 @@ def __init__(self, hs: "HomeServer"): self._instance_name = hs.get_instance_name() # Additional Redis channel suffixes to subscribe to. - self._channels_to_subscribe_to: List[str] = [] + self._channels_to_subscribe_to: list[str] = [] self._is_presence_writer = ( hs.get_instance_name() in hs.config.worker.writers.presence ) - self._streams: Dict[str, Stream] = { + self._streams: dict[str, Stream] = { stream.NAME: stream(hs) for stream in STREAMS_MAP.values() } # List of streams that this instance is the source of - self._streams_to_replicate: List[Stream] = [] + self._streams_to_replicate: list[Stream] = [] for stream in self._streams.values(): if hs.config.redis.redis_enabled and stream.NAME == CachesStream.NAME: @@ -246,14 +242,14 @@ def __init__(self, hs: "HomeServer"): # Map of stream name to batched updates. See RdataCommand for info on # how batching works. - self._pending_batches: Dict[str, List[Any]] = {} + self._pending_batches: dict[str, list[Any]] = {} # The factory used to create connections. self._factory: Optional[ReconnectingClientFactory] = None # The currently connected connections. (The list of places we need to send # outgoing replication commands to.) - self._connections: List[IReplicationConnection] = [] + self._connections: list[IReplicationConnection] = [] tcp_resource_total_connections_gauge.register_hook( homeserver_instance_id=hs.get_instance_id(), @@ -264,7 +260,7 @@ def __init__(self, hs: "HomeServer"): # them in order in a separate background process. # the streams which are currently being processed by _unsafe_process_queue - self._processing_streams: Set[str] = set() + self._processing_streams: set[str] = set() # for each stream, a queue of commands that are awaiting processing, and the # connection that they arrived on. @@ -274,7 +270,7 @@ def __init__(self, hs: "HomeServer"): # For each connection, the incoming stream names that have received a POSITION # from that connection. - self._streams_by_connection: Dict[IReplicationConnection, Set[str]] = {} + self._streams_by_connection: dict[IReplicationConnection, set[str]] = {} tcp_command_queue_gauge.register_hook( homeserver_instance_id=hs.get_instance_id(), @@ -450,11 +446,11 @@ def start_replication(self, hs: "HomeServer") -> None: bindAddress=None, ) - def get_streams(self) -> Dict[str, Stream]: + def get_streams(self) -> dict[str, Stream]: """Get a map from stream name to all streams.""" return self._streams - def get_streams_to_replicate(self) -> List[Stream]: + def get_streams_to_replicate(self) -> list[Stream]: """Get a list of streams that this instances replicates.""" return self._streams_to_replicate @@ -902,8 +898,8 @@ def send_new_active_task(self, task_id: str) -> None: def _batch_updates( - updates: Iterable[Tuple[UpdateToken, UpdateRow]], -) -> Iterator[Tuple[UpdateToken, List[UpdateRow]]]: + updates: Iterable[tuple[UpdateToken, UpdateRow]], +) -> Iterator[tuple[UpdateToken, list[UpdateRow]]]: """Collect stream updates with the same token together Given a series of updates returned by Stream.get_updates_since(), collects diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index bcfc65c2c0..733643cb64 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -28,7 +28,7 @@ import logging import struct from inspect import isawaitable -from typing import TYPE_CHECKING, Any, Collection, List, Optional +from typing import TYPE_CHECKING, Any, Collection, Optional from prometheus_client import Counter from zope.interface import Interface, implementer @@ -82,7 +82,7 @@ # A list of all connected protocols. This allows us to send metrics about the # connections. -connected_connections: "List[BaseReplicationStreamProtocol]" = [] +connected_connections: "list[BaseReplicationStreamProtocol]" = [] logger = logging.getLogger(__name__) @@ -163,7 +163,7 @@ def __init__( self.conn_id = random_string(5) # To dedupe in case of name clashes. # List of pending commands to send once we've established the connection - self.pending_commands: List[Command] = [] + self.pending_commands: list[Command] = [] # The LoopingCall for sending pings. self._send_ping_loop: Optional[task.LoopingCall] = None diff --git a/synapse/replication/tcp/redis.py b/synapse/replication/tcp/redis.py index caffb2913e..4448117d62 100644 --- a/synapse/replication/tcp/redis.py +++ b/synapse/replication/tcp/redis.py @@ -21,7 +21,7 @@ import logging from inspect import isawaitable -from typing import TYPE_CHECKING, Any, Generic, List, Optional, Type, TypeVar, cast +from typing import TYPE_CHECKING, Any, Generic, Optional, TypeVar, cast import attr from txredisapi import ( @@ -72,7 +72,7 @@ class ConstantProperty(Generic[T, V]): constant: V = attr.ib() - def __get__(self, obj: Optional[T], objtype: Optional[Type[T]] = None) -> V: + def __get__(self, obj: Optional[T], objtype: Optional[type[T]] = None) -> V: return self.constant def __set__(self, obj: Optional[T], value: V) -> None: @@ -111,7 +111,7 @@ class RedisSubscriber(SubscriberProtocol): hs: "HomeServer" synapse_handler: "ReplicationCommandHandler" synapse_stream_prefix: str - synapse_channel_names: List[str] + synapse_channel_names: list[str] synapse_outbound_redis_connection: ConnectionHandler def __init__(self, *args: Any, **kwargs: Any): @@ -296,7 +296,7 @@ def __init__( dbid: Optional[int], poolsize: int, isLazy: bool = False, - handler: Type = ConnectionHandler, + handler: type = ConnectionHandler, charset: str = "utf-8", password: Optional[str] = None, replyTimeout: int = 30, @@ -381,7 +381,7 @@ def __init__( self, hs: "HomeServer", outbound_redis_connection: ConnectionHandler, - channel_names: List[str], + channel_names: list[str], ): super().__init__( hs, diff --git a/synapse/replication/tcp/resource.py b/synapse/replication/tcp/resource.py index ef72a0a532..8df0a3853f 100644 --- a/synapse/replication/tcp/resource.py +++ b/synapse/replication/tcp/resource.py @@ -22,7 +22,7 @@ import logging import random -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from prometheus_client import Counter @@ -320,8 +320,8 @@ async def _run_notifier_loop(self) -> None: def _batch_updates( - updates: List[Tuple[Token, StreamRow]], -) -> List[Tuple[Optional[Token], StreamRow]]: + updates: list[tuple[Token, StreamRow]], +) -> list[tuple[Optional[Token], StreamRow]]: """Takes a list of updates of form [(token, row)] and sets the token to None for all rows where the next row has the same token. This is used to implement batching. @@ -337,7 +337,7 @@ def _batch_updates( if not updates: return [] - new_updates: List[Tuple[Optional[Token], StreamRow]] = [] + new_updates: list[tuple[Optional[Token], StreamRow]] = [] for i, update in enumerate(updates[:-1]): if update[0] == updates[i + 1][0]: new_updates.append((None, update[1])) diff --git a/synapse/replication/tcp/streams/_base.py b/synapse/replication/tcp/streams/_base.py index ec7e935d6a..d80bdb9b35 100644 --- a/synapse/replication/tcp/streams/_base.py +++ b/synapse/replication/tcp/streams/_base.py @@ -26,9 +26,7 @@ Any, Awaitable, Callable, - List, Optional, - Tuple, TypeVar, ) @@ -56,7 +54,7 @@ # parsing with Stream.parse_row (which turns it into a `ROW_TYPE`). Normally it's # just a row from a database query, though this is dependent on the stream in question. # -StreamRow = TypeVar("StreamRow", bound=Tuple) +StreamRow = TypeVar("StreamRow", bound=tuple) # The type returned by the update_function of a stream, as well as get_updates(), # get_updates_since, etc. @@ -66,7 +64,7 @@ # * `new_last_token` is the new position in stream. # * `limited` is whether there are more updates to fetch. # -StreamUpdateResult = Tuple[List[Tuple[Token, StreamRow]], Token, bool] +StreamUpdateResult = tuple[list[tuple[Token, StreamRow]], Token, bool] # The type of an update_function for a stream # @@ -400,7 +398,7 @@ class TypingStreamRow: room_id: str # All the users that are 'typing' right now in the specified room. - user_ids: List[str] + user_ids: list[str] NAME = "typing" ROW_TYPE = TypingStreamRow @@ -410,7 +408,7 @@ def __init__(self, hs: "HomeServer"): # On the writer, query the typing handler typing_writer_handler = hs.get_typing_writer_handler() update_function: Callable[ - [str, int, int, int], Awaitable[Tuple[List[Tuple[int, Any]], int, bool]] + [str, int, int, int], Awaitable[tuple[list[tuple[int, Any]], int, bool]] ] = typing_writer_handler.get_all_typing_updates self.current_token_function = typing_writer_handler.get_current_token else: @@ -512,7 +510,7 @@ class CachesStreamRow: """ cache_func: str - keys: Optional[List[Any]] + keys: Optional[list[Any]] invalidation_ts: int NAME = "caches" diff --git a/synapse/replication/tcp/streams/events.py b/synapse/replication/tcp/streams/events.py index 05b55fb033..a6314b0c7d 100644 --- a/synapse/replication/tcp/streams/events.py +++ b/synapse/replication/tcp/streams/events.py @@ -20,7 +20,7 @@ # import heapq from collections import defaultdict -from typing import TYPE_CHECKING, Iterable, Optional, Tuple, Type, TypeVar, cast +from typing import TYPE_CHECKING, Iterable, Optional, TypeVar, cast import attr @@ -93,7 +93,7 @@ class BaseEventsStreamRow: TypeId: str @classmethod - def from_data(cls: Type[T], data: Iterable[Optional[str]]) -> T: + def from_data(cls: type[T], data: Iterable[Optional[str]]) -> T: """Parse the data from the replication stream into a row. By default we just call the constructor with the data list as arguments @@ -136,7 +136,7 @@ class EventsStreamAllStateRow(BaseEventsStreamRow): room_id: str -_EventRows: Tuple[Type[BaseEventsStreamRow], ...] = ( +_EventRows: tuple[type[BaseEventsStreamRow], ...] = ( EventsStreamEventRow, EventsStreamCurrentStateRow, EventsStreamAllStateRow, @@ -237,7 +237,7 @@ async def _update_function( # distinguish the row type). At the same time, we can limit the event_rows # to the max stream_id from state_rows. - event_updates: Iterable[Tuple[int, Tuple]] = ( + event_updates: Iterable[tuple[int, tuple]] = ( (stream_id, (EventsStreamEventRow.TypeId, rest)) for (stream_id, *rest) in event_rows if stream_id <= upper_limit @@ -254,20 +254,20 @@ async def _update_function( for room_id, stream_ids in state_updates_by_room.items() if len(stream_ids) >= _MAX_STATE_UPDATES_PER_ROOM ] - state_all_updates: Iterable[Tuple[int, Tuple]] = ( + state_all_updates: Iterable[tuple[int, tuple]] = ( (max_stream_id, (EventsStreamAllStateRow.TypeId, (room_id,))) for (max_stream_id, room_id) in state_all_rows ) # Any remaining state updates are sent individually. state_all_rooms = {room_id for _, room_id in state_all_rows} - state_updates: Iterable[Tuple[int, Tuple]] = ( + state_updates: Iterable[tuple[int, tuple]] = ( (stream_id, (EventsStreamCurrentStateRow.TypeId, rest)) for (stream_id, *rest) in state_rows if rest[0] not in state_all_rooms ) - ex_outliers_updates: Iterable[Tuple[int, Tuple]] = ( + ex_outliers_updates: Iterable[tuple[int, tuple]] = ( (stream_id, (EventsStreamEventRow.TypeId, rest)) for (stream_id, *rest) in ex_outliers_rows ) @@ -282,6 +282,6 @@ async def _update_function( @classmethod def parse_row(cls, row: StreamRow) -> "EventsStreamRow": - (typ, data) = cast(Tuple[str, Iterable[Optional[str]]], row) + (typ, data) = cast(tuple[str, Iterable[Optional[str]]], row) event_stream_row_data = TypeToRow[typ].from_data(data) return EventsStreamRow(typ, event_stream_row_data) diff --git a/synapse/replication/tcp/streams/federation.py b/synapse/replication/tcp/streams/federation.py index 1c2ffe86b7..c99e720381 100644 --- a/synapse/replication/tcp/streams/federation.py +++ b/synapse/replication/tcp/streams/federation.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import TYPE_CHECKING, Any, Awaitable, Callable, List, Tuple +from typing import TYPE_CHECKING, Any, Awaitable, Callable import attr @@ -58,7 +58,7 @@ def __init__(self, hs: "HomeServer"): federation_sender.get_current_token ) update_function: Callable[ - [str, int, int, int], Awaitable[Tuple[List[Tuple[int, Any]], int, bool]] + [str, int, int, int], Awaitable[tuple[list[tuple[int, Any]], int, bool]] ] = federation_sender.get_replication_rows elif hs.should_send_federation(): @@ -88,5 +88,5 @@ def _stub_current_token(instance_name: str) -> int: @staticmethod async def _stub_update_function( instance_name: str, from_token: int, upto_token: int, limit: int - ) -> Tuple[list, int, bool]: + ) -> tuple[list, int, bool]: return [], upto_token, False diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index db3bd46542..ea0e47ded4 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, Callable, Dict, Iterable, List, Optional, Tuple +from typing import TYPE_CHECKING, Callable, Iterable, Optional from synapse.http.server import HttpServer, JsonResource from synapse.rest import admin @@ -78,7 +78,7 @@ RegisterServletsFunc = Callable[["HomeServer", HttpServer], None] -CLIENT_SERVLET_FUNCTIONS: Tuple[RegisterServletsFunc, ...] = ( +CLIENT_SERVLET_FUNCTIONS: tuple[RegisterServletsFunc, ...] = ( versions.register_servlets, initial_sync.register_servlets, room.register_deprecated_servlets, @@ -128,7 +128,7 @@ thread_subscriptions.register_servlets, ) -SERVLET_GROUPS: Dict[str, Iterable[RegisterServletsFunc]] = { +SERVLET_GROUPS: dict[str, Iterable[RegisterServletsFunc]] = { "client": CLIENT_SERVLET_FUNCTIONS, } @@ -143,7 +143,7 @@ class ClientRestResource(JsonResource): * etc """ - def __init__(self, hs: "HomeServer", servlet_groups: Optional[List[str]] = None): + def __init__(self, hs: "HomeServer", servlet_groups: Optional[list[str]] = None): JsonResource.__init__(self, hs, canonical_json=False) if hs.config.media.can_load_media_repo: # This import is here to prevent a circular import failure diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 0386f8a34b..5e75dc4c00 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -35,7 +35,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse.api.errors import Codes, NotFoundError, SynapseError from synapse.handlers.pagination import PURGE_HISTORY_ACTION_NAME @@ -137,7 +137,7 @@ class VersionServlet(RestServlet): def __init__(self, hs: "HomeServer"): self.res = {"server_version": SYNAPSE_VERSION} - def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: return HTTPStatus.OK, self.res @@ -153,7 +153,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_id: str, event_id: Optional[str] - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) body = parse_json_object_from_request(request, allow_empty_body=True) @@ -237,7 +237,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, purge_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) purge_task = await self.pagination_handler.get_delete_task(purge_id) diff --git a/synapse/rest/admin/background_updates.py b/synapse/rest/admin/background_updates.py index 6fba616d3a..96190c416d 100644 --- a/synapse/rest/admin/background_updates.py +++ b/synapse/rest/admin/background_updates.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import SynapseError from synapse.http.servlet import ( @@ -47,7 +47,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() self._data_stores = hs.get_datastores() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) # We need to check that all configured databases have updates enabled. @@ -56,7 +56,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: return HTTPStatus.OK, {"enabled": enabled} - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) body = parse_json_object_from_request(request) @@ -88,7 +88,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() self._data_stores = hs.get_datastores() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) # We need to check that all configured databases have updates enabled. @@ -121,7 +121,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() self._store = hs.get_datastores().main - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) body = parse_json_object_from_request(request) diff --git a/synapse/rest/admin/devices.py b/synapse/rest/admin/devices.py index c488bce58e..c8e9242ce8 100644 --- a/synapse/rest/admin/devices.py +++ b/synapse/rest/admin/devices.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import NotFoundError, SynapseError from synapse.http.servlet import ( @@ -56,7 +56,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str, device_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) target_user = UserID.from_string(user_id) @@ -76,7 +76,7 @@ async def on_GET( async def on_DELETE( self, request: SynapseRequest, user_id: str, device_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) target_user = UserID.from_string(user_id) @@ -92,7 +92,7 @@ async def on_DELETE( async def on_PUT( self, request: SynapseRequest, user_id: str, device_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) target_user = UserID.from_string(user_id) @@ -128,7 +128,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) target_user = UserID.from_string(user_id) @@ -157,7 +157,7 @@ async def on_GET( async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """Creates a new device for the user.""" await assert_requester_is_admin(self.auth, request) @@ -201,7 +201,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) target_user = UserID.from_string(user_id) diff --git a/synapse/rest/admin/event_reports.py b/synapse/rest/admin/event_reports.py index ff1abc0697..5e8f85de7e 100644 --- a/synapse/rest/admin/event_reports.py +++ b/synapse/rest/admin/event_reports.py @@ -21,7 +21,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import Direction from synapse.api.errors import Codes, NotFoundError, SynapseError @@ -65,7 +65,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() self._store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) start = parse_integer(request, "from", default=0) @@ -123,7 +123,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, report_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) message = ( @@ -149,7 +149,7 @@ async def on_GET( async def on_DELETE( self, request: SynapseRequest, report_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) message = ( diff --git a/synapse/rest/admin/events.py b/synapse/rest/admin/events.py index 61b347f8f4..1c39d5caf3 100644 --- a/synapse/rest/admin/events.py +++ b/synapse/rest/admin/events.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import NotFoundError from synapse.events.utils import ( @@ -43,7 +43,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) await assert_user_is_admin(self._auth, requester) diff --git a/synapse/rest/admin/experimental_features.py b/synapse/rest/admin/experimental_features.py index 3d3015cef7..abdb937793 100644 --- a/synapse/rest/admin/experimental_features.py +++ b/synapse/rest/admin/experimental_features.py @@ -22,7 +22,7 @@ from enum import Enum from http import HTTPStatus -from typing import TYPE_CHECKING, Dict, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import SynapseError from synapse.http.servlet import RestServlet, parse_json_object_from_request @@ -74,7 +74,7 @@ async def on_GET( self, request: SynapseRequest, user_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ List which features are enabled for a given user """ @@ -99,7 +99,7 @@ async def on_GET( async def on_PUT( self, request: SynapseRequest, user_id: str - ) -> Tuple[HTTPStatus, Dict]: + ) -> tuple[HTTPStatus, dict]: """ Enable or disable the provided features for the requester """ diff --git a/synapse/rest/admin/federation.py b/synapse/rest/admin/federation.py index d85a04b825..e958ef9747 100644 --- a/synapse/rest/admin/federation.py +++ b/synapse/rest/admin/federation.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import Direction from synapse.api.errors import NotFoundError, SynapseError @@ -58,7 +58,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() self._store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) start = parse_integer(request, "from", default=0) @@ -115,7 +115,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, destination: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) if not await self._store.is_destination_known(destination): @@ -175,7 +175,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, destination: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) if not await self._store.is_destination_known(destination): @@ -224,7 +224,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, destination: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) if not await self._store.is_destination_known(destination): diff --git a/synapse/rest/admin/media.py b/synapse/rest/admin/media.py index 8732c0bf9d..cfdb314b1a 100644 --- a/synapse/rest/admin/media.py +++ b/synapse/rest/admin/media.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional import attr @@ -67,7 +67,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, server_name: str, media_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) @@ -134,7 +134,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) @@ -161,7 +161,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) @@ -190,7 +190,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, server_name: str, media_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) @@ -219,7 +219,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, server_name: str, media_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) logger.info("Remove from quarantine media by ID: %s/%s", server_name, media_id) @@ -241,7 +241,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, media_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) logger.info("Protecting local media by ID: %s", media_id) @@ -263,7 +263,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, media_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) logger.info("Unprotecting local media by ID: %s", media_id) @@ -285,7 +285,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) local_mxcs, remote_mxcs = await self.store.get_media_mxcs_in_room(room_id) @@ -300,7 +300,7 @@ def __init__(self, hs: "HomeServer"): self.media_repository = hs.get_media_repository() self.auth = hs.get_auth() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) before_ts = parse_integer(request, "before_ts", required=True) @@ -338,7 +338,7 @@ def __init__(self, hs: "HomeServer"): async def on_DELETE( self, request: SynapseRequest, server_name: str, media_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if not self._is_mine_server_name(server_name): @@ -375,7 +375,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, server_name: Optional[str] = None - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) before_ts = parse_integer(request, "before_ts", required=True) @@ -433,7 +433,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # This will always be set by the time Twisted calls us. assert request.args is not None @@ -477,7 +477,7 @@ async def on_GET( async def on_DELETE( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # This will always be set by the time Twisted calls us. assert request.args is not None diff --git a/synapse/rest/admin/registration_tokens.py b/synapse/rest/admin/registration_tokens.py index bec2331590..ea266403a0 100644 --- a/synapse/rest/admin/registration_tokens.py +++ b/synapse/rest/admin/registration_tokens.py @@ -22,7 +22,7 @@ import logging import string from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import Codes, NotFoundError, SynapseError from synapse.http.servlet import ( @@ -80,7 +80,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) valid = parse_boolean(request, "valid") token_list = await self.store.get_registration_tokens(valid) @@ -133,7 +133,7 @@ def __init__(self, hs: "HomeServer"): self.allowed_chars = string.ascii_letters + string.digits + "._~-" self.allowed_chars_set = set(self.allowed_chars) - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) body = parse_json_object_from_request(request) @@ -282,7 +282,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest, token: str) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest, token: str) -> tuple[int, JsonDict]: """Retrieve a registration token.""" await assert_requester_is_admin(self.auth, request) token_info = await self.store.get_one_registration_token(token) @@ -293,7 +293,7 @@ async def on_GET(self, request: SynapseRequest, token: str) -> Tuple[int, JsonDi return HTTPStatus.OK, token_info - async def on_PUT(self, request: SynapseRequest, token: str) -> Tuple[int, JsonDict]: + async def on_PUT(self, request: SynapseRequest, token: str) -> tuple[int, JsonDict]: """Update a registration token.""" await assert_requester_is_admin(self.auth, request) body = parse_json_object_from_request(request) @@ -348,7 +348,7 @@ async def on_PUT(self, request: SynapseRequest, token: str) -> Tuple[int, JsonDi async def on_DELETE( self, request: SynapseRequest, token: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """Delete a registration token.""" await assert_requester_is_admin(self.auth, request) diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py index 5bed89c2c4..216af29f9b 100644 --- a/synapse/rest/admin/rooms.py +++ b/synapse/rest/admin/rooms.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, List, Optional, Tuple, cast +from typing import TYPE_CHECKING, Optional, cast import attr from immutabledict import immutabledict @@ -88,7 +88,7 @@ def __init__(self, hs: "HomeServer"): async def on_DELETE( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) await assert_user_is_admin(self._auth, requester) @@ -167,7 +167,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) if not RoomID.is_valid(room_id): @@ -198,7 +198,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, delete_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) delete_task = await self._pagination_handler.get_delete_task(delete_id) @@ -224,7 +224,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.admin_handler = hs.get_admin_handler() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) # Extract query parameters @@ -319,7 +319,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) ret = await self.store.get_room_with_stats(room_id) @@ -337,7 +337,7 @@ async def on_GET( async def on_DELETE( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: return await self._delete_room( request, room_id, @@ -353,7 +353,7 @@ async def _delete_room( auth: "Auth", room_shutdown_handler: "RoomShutdownHandler", pagination_handler: "PaginationHandler", - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await auth.get_user_by_req(request) await assert_user_is_admin(auth, requester) @@ -429,7 +429,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) room = await self.store.get_room(room_id) @@ -458,7 +458,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) room = await self.store.get_room(room_id) @@ -498,7 +498,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_identifier: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # This will always be set by the time Twisted calls us. assert request.args is not None @@ -521,7 +521,7 @@ async def on_POST( # Get the room ID from the identifier. try: - remote_room_hosts: Optional[List[str]] = [ + remote_room_hosts: Optional[list[str]] = [ x.decode("ascii") for x in request.args[b"server_name"] ] except Exception: @@ -591,7 +591,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_identifier: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) content = parse_json_object_from_request(request, allow_empty_body=True) @@ -756,7 +756,7 @@ def __init__(self, hs: "HomeServer"): async def on_DELETE( self, request: SynapseRequest, room_identifier: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) room_id, _ = await self.resolve_room_id(room_identifier) @@ -766,7 +766,7 @@ async def on_DELETE( async def on_GET( self, request: SynapseRequest, room_identifier: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) room_id, _ = await self.resolve_room_id(room_identifier) @@ -805,7 +805,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=False) await assert_user_is_admin(self.auth, requester) @@ -871,7 +871,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) if not RoomID.is_valid(room_id): @@ -891,7 +891,7 @@ async def on_GET( async def on_PUT( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) await assert_user_is_admin(self._auth, requester) @@ -935,7 +935,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) await assert_user_is_admin(self._auth, requester) @@ -997,7 +997,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) await assert_user_is_admin(self._auth, requester) diff --git a/synapse/rest/admin/scheduled_tasks.py b/synapse/rest/admin/scheduled_tasks.py index 2ae13021b9..41c402b424 100644 --- a/synapse/rest/admin/scheduled_tasks.py +++ b/synapse/rest/admin/scheduled_tasks.py @@ -13,7 +13,7 @@ # # # -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http.servlet import RestServlet, parse_integer, parse_string from synapse.http.site import SynapseRequest @@ -35,7 +35,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() self._store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) # extract query params diff --git a/synapse/rest/admin/server_notice_servlet.py b/synapse/rest/admin/server_notice_servlet.py index f3150e88d7..0be04c0f90 100644 --- a/synapse/rest/admin/server_notice_servlet.py +++ b/synapse/rest/admin/server_notice_servlet.py @@ -18,7 +18,7 @@ # # from http import HTTPStatus -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse.api.constants import EventTypes from synapse.api.errors import NotFoundError, SynapseError @@ -81,7 +81,7 @@ async def _do( request: SynapseRequest, requester: Requester, txn_id: Optional[str], - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_user_is_admin(self.auth, requester) body = parse_json_object_from_request(request) assert_params_in_dict(body, ("user_id", "content")) @@ -118,13 +118,13 @@ async def _do( async def on_POST( self, request: SynapseRequest, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) return await self._do(request, requester, None) async def on_PUT( self, request: SynapseRequest, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) set_tag("txn_id", txn_id) return await self.txns.fetch_or_execute_request( diff --git a/synapse/rest/admin/statistics.py b/synapse/rest/admin/statistics.py index 0adc5b7005..3de1d4e9bd 100644 --- a/synapse/rest/admin/statistics.py +++ b/synapse/rest/admin/statistics.py @@ -21,7 +21,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import Direction from synapse.api.errors import Codes, SynapseError @@ -48,7 +48,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) order_by = parse_string( @@ -119,7 +119,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.stats_controller = hs.get_storage_controllers().stats - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) room_sizes = await self.stats_controller.get_room_db_size_estimate() diff --git a/synapse/rest/admin/username_available.py b/synapse/rest/admin/username_available.py index 2d642f7d6b..fb0cee42da 100644 --- a/synapse/rest/admin/username_available.py +++ b/synapse/rest/admin/username_available.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http.servlet import RestServlet, parse_string from synapse.http.site import SynapseRequest @@ -50,7 +50,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.registration_handler = hs.get_registration_handler() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) username = parse_string(request, "username", required=True) diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index 25a38dc4ac..e29b0d36e0 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -23,7 +23,7 @@ import logging import secrets from http import HTTPStatus -from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Optional, Union import attr @@ -113,7 +113,7 @@ def __init__(self, hs: "HomeServer"): hs.config.mas.enabled or hs.config.experimental.msc3861.enabled ) - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) start = parse_integer(request, "from", default=0) @@ -164,7 +164,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: direction = parse_enum(request, "dir", Direction, default=Direction.FORWARDS) # twisted.web.server.Request.args is incorrectly defined as Optional[Any] - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore not_user_types = parse_strings_from_args(args, "not_user_type") users, total = await self.store.get_users_paginate( @@ -256,7 +256,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonMapping]: + ) -> tuple[int, JsonMapping]: await assert_requester_is_admin(self.auth, request) target_user = UserID.from_string(user_id) @@ -271,7 +271,7 @@ async def on_GET( async def on_PUT( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonMapping]: + ) -> tuple[int, JsonMapping]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) @@ -349,14 +349,14 @@ async def on_PUT( "'approved' parameter is not of type boolean", ) - # convert List[Dict[str, str]] into List[Tuple[str, str]] + # convert list[dict[str, str]] into list[tuple[str, str]] if external_ids is not None: new_external_ids = [ (external_id["auth_provider"], external_id["external_id"]) for external_id in external_ids ] - # convert List[Dict[str, str]] into Set[Tuple[str, str]] + # convert list[dict[str, str]] into set[tuple[str, str]] if threepids is not None: new_threepids = { (threepid["medium"], threepid["address"]) for threepid in threepids @@ -545,7 +545,7 @@ class UserRegisterServlet(RestServlet): def __init__(self, hs: "HomeServer"): self.auth_handler = hs.get_auth_handler() self.reactor = hs.get_reactor() - self.nonces: Dict[str, int] = {} + self.nonces: dict[str, int] = {} self.hs = hs self._all_user_types = hs.config.user_types.all_user_types @@ -559,7 +559,7 @@ def _clear_old_nonces(self) -> None: if now - v > self.NONCE_TIMEOUT: del self.nonces[k] - def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: """ Generate a new nonce. """ @@ -569,7 +569,7 @@ def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: self.nonces[nonce] = int(self.reactor.seconds()) return HTTPStatus.OK, {"nonce": nonce} - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: self._clear_old_nonces() if not self.hs.config.registration.registration_shared_secret: @@ -730,7 +730,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonMapping]: + ) -> tuple[int, JsonMapping]: target_user = UserID.from_string(user_id) requester = await self.auth.get_user_by_req(request) @@ -756,7 +756,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, target_user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) @@ -801,7 +801,7 @@ class PutBody(RequestBodyModel): async def on_PUT( self, request: SynapseRequest, target_user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) @@ -828,7 +828,7 @@ def __init__(self, hs: "HomeServer"): ) self.auth = hs.get_auth() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if self.account_validity_module_callbacks.on_legacy_admin_request_callback: @@ -878,7 +878,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, target_user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """Post request to allow an administrator reset password for a user. This needs user to have administrator access in Synapse. """ @@ -920,7 +920,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, target_user_id: str - ) -> Tuple[int, Optional[List[JsonDict]]]: + ) -> tuple[int, Optional[list[JsonDict]]]: """Get request to search user table for specific users according to search term. This needs user to have a administrator access in Synapse. @@ -989,7 +989,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) target_user = UserID.from_string(user_id) @@ -1006,7 +1006,7 @@ async def on_GET( async def on_PUT( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) auth_user = requester.user @@ -1047,7 +1047,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) room_ids = await self.store.get_rooms_for_user(user_id) @@ -1079,7 +1079,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if not self.is_mine(UserID.from_string(user_id)): @@ -1122,7 +1122,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) auth_user = requester.user @@ -1190,7 +1190,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if not self.is_mine_id(user_id): @@ -1204,7 +1204,7 @@ async def on_POST( async def on_DELETE( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if not self.is_mine_id(user_id): @@ -1242,7 +1242,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if not self.is_mine_id(user_id): @@ -1273,7 +1273,7 @@ async def on_GET( async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if not self.is_mine_id(user_id): @@ -1321,7 +1321,7 @@ async def on_POST( async def on_DELETE( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if not self.is_mine_id(user_id): @@ -1349,7 +1349,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) if not self._is_mine_id(user_id): @@ -1390,7 +1390,7 @@ async def on_POST( self, request: SynapseRequest, user_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) if user_id is None: @@ -1424,7 +1424,7 @@ async def on_GET( request: SynapseRequest, provider: str, external_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) user_id = await self._store.get_user_by_external_id(provider, external_id) @@ -1449,7 +1449,7 @@ async def on_GET( request: SynapseRequest, medium: str, address: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) user_id = await self._store.get_user_id_by_threepid(medium, address) @@ -1475,14 +1475,14 @@ def __init__(self, hs: "HomeServer"): self.admin_handler = hs.get_admin_handler() class PostBody(RequestBodyModel): - rooms: List[StrictStr] + rooms: list[StrictStr] reason: Optional[StrictStr] limit: Optional[StrictInt] use_admin: Optional[StrictBool] async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) await assert_user_is_admin(self._auth, requester) @@ -1531,7 +1531,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, redact_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) task = await self.admin_handler.get_redact_task(redact_id) @@ -1574,7 +1574,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) from_ts = parse_integer(request, "from_ts", required=True) @@ -1599,7 +1599,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) from_ts = parse_integer(request, "from_ts", required=True) diff --git a/synapse/rest/client/_base.py b/synapse/rest/client/_base.py index 6cf37869d8..fad7234718 100644 --- a/synapse/rest/client/_base.py +++ b/synapse/rest/client/_base.py @@ -23,7 +23,7 @@ import logging import re -from typing import Any, Awaitable, Callable, Iterable, Pattern, Tuple, TypeVar, cast +from typing import Any, Awaitable, Callable, Iterable, Pattern, TypeVar, cast from synapse.api.errors import InteractiveAuthIncompleteError from synapse.api.urls import CLIENT_API_PREFIX @@ -86,7 +86,7 @@ def set_timeline_upper_limit(filter_json: JsonDict, filter_timeline_limit: int) ) -C = TypeVar("C", bound=Callable[..., Awaitable[Tuple[int, JsonDict]]]) +C = TypeVar("C", bound=Callable[..., Awaitable[tuple[int, JsonDict]]]) def interactive_auth_handler(orig: C) -> C: @@ -104,7 +104,7 @@ async def on_POST(self, request): await self.auth_handler.check_auth """ - async def wrapped(*args: Any, **kwargs: Any) -> Tuple[int, JsonDict]: + async def wrapped(*args: Any, **kwargs: Any) -> tuple[int, JsonDict]: try: return await orig(*args, **kwargs) except InteractiveAuthIncompleteError as e: diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py index d9f0c169e8..8f2f54f750 100644 --- a/synapse/rest/client/account.py +++ b/synapse/rest/client/account.py @@ -21,7 +21,7 @@ # import logging import random -from typing import TYPE_CHECKING, List, Literal, Optional, Tuple +from typing import TYPE_CHECKING, Literal, Optional from urllib.parse import urlparse import attr @@ -89,7 +89,7 @@ def __init__(self, hs: "HomeServer"): template_text=self.config.email.email_password_reset_template_text, ) - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: if not self.config.email.can_verify_email: logger.warning( "User password resets have been disabled due to lack of email config" @@ -169,7 +169,7 @@ class PostBody(RequestBodyModel): new_password: Optional[constr(max_length=512, strict=True)] = None @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: body = parse_and_validate_json_object_from_request(request, self.PostBody) # we do basic sanity checks here because the auth layer will store these @@ -296,7 +296,7 @@ class PostBody(RequestBodyModel): erase: StrictBool = False @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: body = parse_and_validate_json_object_from_request(request, self.PostBody) requester = await self.auth.get_user_by_req(request) @@ -341,7 +341,7 @@ def __init__(self, hs: "HomeServer"): template_text=self.config.email.email_add_threepid_template_text, ) - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: if not self.hs.config.registration.enable_3pid_changes: raise SynapseError( 400, "3PID changes are disabled on this server", Codes.FORBIDDEN @@ -418,7 +418,7 @@ def __init__(self, hs: "HomeServer"): self.store = self.hs.get_datastores().main self.identity_handler = hs.get_identity_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: body = parse_and_validate_json_object_from_request( request, MsisdnRequestTokenBody ) @@ -567,7 +567,7 @@ def __init__(self, hs: "HomeServer"): self.store = hs.get_datastores().main self.identity_handler = hs.get_identity_handler() - async def on_POST(self, request: Request) -> Tuple[int, JsonDict]: + async def on_POST(self, request: Request) -> tuple[int, JsonDict]: if not self.config.registration.account_threepid_delegate_msisdn: raise SynapseError( 400, @@ -601,7 +601,7 @@ def __init__(self, hs: "HomeServer"): self.auth_handler = hs.get_auth_handler() self.datastore = self.hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) threepids = await self.datastore.user_get_threepids(requester.user.to_string()) @@ -612,7 +612,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: # the endpoint is deprecated. (If you really want to, you could do this by reusing # ThreePidBindRestServelet.PostBody with an `alias_generator` to handle # `threePidCreds` versus `three_pid_creds`. - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: if self.hs.config.mas.enabled or self.hs.config.experimental.msc3861.enabled: raise NotFoundError(errcode=Codes.UNRECOGNIZED) @@ -669,7 +669,7 @@ class PostBody(RequestBodyModel): sid: StrictStr @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: if not self.hs.config.registration.enable_3pid_changes: raise SynapseError( 400, "3PID changes are disabled on this server", Codes.FORBIDDEN @@ -718,7 +718,7 @@ class PostBody(RequestBodyModel): id_server: StrictStr sid: StrictStr - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: body = parse_and_validate_json_object_from_request(request, self.PostBody) requester = await self.auth.get_user_by_req(request) @@ -746,7 +746,7 @@ class PostBody(RequestBodyModel): id_server: Optional[StrictStr] = None medium: Literal["email", "msisdn"] - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: """Unbind the given 3pid from a specific identity server, or identity servers that are known to have this 3pid bound """ @@ -775,7 +775,7 @@ class PostBody(RequestBodyModel): id_server: Optional[StrictStr] = None medium: Literal["email", "msisdn"] - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: if not self.hs.config.registration.enable_3pid_changes: raise SynapseError( 400, "3PID changes are disabled on this server", Codes.FORBIDDEN @@ -859,7 +859,7 @@ def __init__(self, hs: "HomeServer"): super().__init__() self.auth = hs.get_auth() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) response = { @@ -889,9 +889,9 @@ def __init__(self, hs: "HomeServer"): class PostBody(RequestBodyModel): # TODO: we could validate that each user id is an mxid here, and/or parse it # as a UserID - user_ids: List[StrictStr] + user_ids: list[StrictStr] - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self._auth.get_user_by_req(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) diff --git a/synapse/rest/client/account_data.py b/synapse/rest/client/account_data.py index 734c9e992f..0800c0f5b8 100644 --- a/synapse/rest/client/account_data.py +++ b/synapse/rest/client/account_data.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse.api.constants import AccountDataTypes, ReceiptTypes from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError @@ -75,7 +75,7 @@ def __init__(self, hs: "HomeServer"): async def on_PUT( self, request: SynapseRequest, user_id: str, account_data_type: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot add account data for other users.") @@ -101,7 +101,7 @@ async def on_PUT( async def on_GET( self, request: SynapseRequest, user_id: str, account_data_type: str - ) -> Tuple[int, JsonMapping]: + ) -> tuple[int, JsonMapping]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot get account data for other users.") @@ -152,7 +152,7 @@ async def on_DELETE( request: SynapseRequest, user_id: str, account_data_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot delete account data for other users.") @@ -191,7 +191,7 @@ async def on_PUT( user_id: str, room_id: str, account_data_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot add account data for other users.") @@ -230,7 +230,7 @@ async def on_GET( user_id: str, room_id: str, account_data_type: str, - ) -> Tuple[int, JsonMapping]: + ) -> tuple[int, JsonMapping]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot get account data for other users.") @@ -288,7 +288,7 @@ async def on_DELETE( user_id: str, room_id: str, account_data_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot delete account data for other users.") diff --git a/synapse/rest/client/account_validity.py b/synapse/rest/client/account_validity.py index ec7836b647..1c60539054 100644 --- a/synapse/rest/client/account_validity.py +++ b/synapse/rest/client/account_validity.py @@ -19,7 +19,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -90,7 +90,7 @@ def __init__(self, hs: "HomeServer"): hs.config.account_validity.account_validity_renew_by_email_enabled ) - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_expired=True) user_id = requester.user.to_string() await self.account_activity_handler.send_renewal_email_to_user(user_id) diff --git a/synapse/rest/client/appservice_ping.py b/synapse/rest/client/appservice_ping.py index 1f9662a95a..7e2ac15783 100644 --- a/synapse/rest/client/appservice_ping.py +++ b/synapse/rest/client/appservice_ping.py @@ -22,7 +22,7 @@ import logging import time from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Dict, Tuple +from typing import TYPE_CHECKING, Any from synapse.api.errors import ( CodeMessageException, @@ -58,7 +58,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, appservice_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if not requester.app_service: @@ -97,7 +97,7 @@ async def on_POST( Codes.AS_PING_CONNECTION_TIMEOUT, ) except CodeMessageException as e: - additional_fields: Dict[str, Any] = {"status": e.code} + additional_fields: dict[str, Any] = {"status": e.code} if isinstance(e, HttpResponseException): try: additional_fields["body"] = e.response.decode("utf-8") diff --git a/synapse/rest/client/auth_metadata.py b/synapse/rest/client/auth_metadata.py index 4b5d997478..702f550906 100644 --- a/synapse/rest/client/auth_metadata.py +++ b/synapse/rest/client/auth_metadata.py @@ -13,7 +13,7 @@ # limitations under the License. import logging import typing -from typing import Tuple, cast +from typing import cast from synapse.api.auth.mas import MasDelegatedAuth from synapse.api.errors import Codes, SynapseError @@ -48,7 +48,7 @@ def __init__(self, hs: "HomeServer"): self._config = hs.config self._auth = hs.get_auth() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: if self._config.mas.enabled: assert isinstance(self._auth, MasDelegatedAuth) return 200, {"issuer": await self._auth.issuer()} @@ -93,7 +93,7 @@ def __init__(self, hs: "HomeServer"): self._config = hs.config self._auth = hs.get_auth() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: if self._config.mas.enabled: assert isinstance(self._auth, MasDelegatedAuth) return 200, await self._auth.auth_metadata() diff --git a/synapse/rest/client/capabilities.py b/synapse/rest/client/capabilities.py index a279db1cc5..baff999ab0 100644 --- a/synapse/rest/client/capabilities.py +++ b/synapse/rest/client/capabilities.py @@ -19,7 +19,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, MSC3244_CAPABILITIES from synapse.http.server import HttpServer @@ -48,7 +48,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.auth_handler = hs.get_auth_handler() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self.auth.get_user_by_req(request, allow_guest=True) change_password = self.auth_handler.can_change_password() diff --git a/synapse/rest/client/delayed_events.py b/synapse/rest/client/delayed_events.py index 2dd5a60b2b..80abacbc9d 100644 --- a/synapse/rest/client/delayed_events.py +++ b/synapse/rest/client/delayed_events.py @@ -17,7 +17,7 @@ import logging from enum import Enum from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import Codes, SynapseError from synapse.http.server import HttpServer @@ -52,7 +52,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, delay_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) body = parse_json_object_from_request(request) @@ -95,7 +95,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.delayed_events_handler = hs.get_delayed_events_handler() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) # TODO: Support Pagination stream API ("from" query parameter) delayed_events = await self.delayed_events_handler.get_all_for_user(requester) diff --git a/synapse/rest/client/devices.py b/synapse/rest/client/devices.py index 37bc9ae513..092406b994 100644 --- a/synapse/rest/client/devices.py +++ b/synapse/rest/client/devices.py @@ -22,7 +22,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse._pydantic_compat import Extra, StrictStr from synapse.api import errors @@ -56,7 +56,7 @@ def __init__(self, hs: "HomeServer"): self.device_handler = hs.get_device_handler() self._msc3852_enabled = hs.config.experimental.msc3852_enabled - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) devices = await self.device_handler.get_devices_by_user( requester.user.to_string() @@ -95,10 +95,10 @@ def __init__(self, hs: "HomeServer"): class PostBody(RequestBodyModel): auth: Optional[AuthenticationData] - devices: List[StrictStr] + devices: list[StrictStr] @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) try: @@ -150,7 +150,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, device_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) device = await self.device_handler.get_device( requester.user.to_string(), device_id @@ -177,7 +177,7 @@ class DeleteBody(RequestBodyModel): @interactive_auth_handler async def on_DELETE( self, request: SynapseRequest, device_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) try: @@ -221,7 +221,7 @@ class PutBody(RequestBodyModel): async def on_PUT( self, request: SynapseRequest, device_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) body = parse_and_validate_json_object_from_request(request, self.PutBody) @@ -302,7 +302,7 @@ def __init__(self, hs: "HomeServer"): handler = hs.get_device_handler() self.device_handler = handler - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) dehydrated_device = await self.device_handler.get_dehydrated_device( requester.user.to_string() @@ -318,7 +318,7 @@ class PutBody(RequestBodyModel): device_data: DehydratedDeviceDataModel initial_device_display_name: Optional[StrictStr] - async def on_PUT(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_PUT(self, request: SynapseRequest) -> tuple[int, JsonDict]: submission = parse_and_validate_json_object_from_request(request, self.PutBody) requester = await self.auth.get_user_by_req(request) @@ -364,7 +364,7 @@ def __init__(self, hs: "HomeServer"): class PostBody(RequestBodyModel): device_id: StrictStr - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) submission = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -395,7 +395,7 @@ class PostBody(RequestBodyModel): async def on_POST( self, request: SynapseRequest, device_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) next_batch = parse_and_validate_json_object_from_request( @@ -501,7 +501,7 @@ def __init__(self, hs: "HomeServer"): self.e2e_keys_handler = hs.get_e2e_keys_handler() self.device_handler = handler - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) dehydrated_device = await self.device_handler.get_dehydrated_device( @@ -515,7 +515,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: else: raise errors.NotFoundError("No dehydrated device available") - async def on_DELETE(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_DELETE(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) dehydrated_device = await self.device_handler.get_dehydrated_device( @@ -543,7 +543,7 @@ class PutBody(RequestBodyModel): class Config: extra = Extra.allow - async def on_PUT(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_PUT(self, request: SynapseRequest) -> tuple[int, JsonDict]: submission = parse_and_validate_json_object_from_request(request, self.PutBody) requester = await self.auth.get_user_by_req(request) user_id = requester.user.to_string() diff --git a/synapse/rest/client/directory.py b/synapse/rest/client/directory.py index 479f489623..eccada67be 100644 --- a/synapse/rest/client/directory.py +++ b/synapse/rest/client/directory.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, List, Literal, Optional, Tuple +from typing import TYPE_CHECKING, Literal, Optional from twisted.web.server import Request @@ -59,7 +59,7 @@ def __init__(self, hs: "HomeServer"): self.directory_handler = hs.get_directory_handler() self.auth = hs.get_auth() - async def on_GET(self, request: Request, room_alias: str) -> Tuple[int, JsonDict]: + async def on_GET(self, request: Request, room_alias: str) -> tuple[int, JsonDict]: if not RoomAlias.is_valid(room_alias): raise SynapseError(400, "Room alias invalid", errcode=Codes.INVALID_PARAM) room_alias_obj = RoomAlias.from_string(room_alias) @@ -72,11 +72,11 @@ class PutBody(RequestBodyModel): # TODO: get Pydantic to validate that this is a valid room id? room_id: StrictStr # `servers` is unspecced - servers: Optional[List[StrictStr]] = None + servers: Optional[list[StrictStr]] = None async def on_PUT( self, request: SynapseRequest, room_alias: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if not RoomAlias.is_valid(room_alias): raise SynapseError(400, "Room alias invalid", errcode=Codes.INVALID_PARAM) room_alias_obj = RoomAlias.from_string(room_alias) @@ -103,7 +103,7 @@ async def on_PUT( async def on_DELETE( self, request: SynapseRequest, room_alias: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if not RoomAlias.is_valid(room_alias): raise SynapseError(400, "Room alias invalid", errcode=Codes.INVALID_PARAM) room_alias_obj = RoomAlias.from_string(room_alias) @@ -141,7 +141,7 @@ def __init__(self, hs: "HomeServer"): self.directory_handler = hs.get_directory_handler() self.auth = hs.get_auth() - async def on_GET(self, request: Request, room_id: str) -> Tuple[int, JsonDict]: + async def on_GET(self, request: Request, room_id: str) -> tuple[int, JsonDict]: room = await self.store.get_room(room_id) if room is None: raise NotFoundError("Unknown room") @@ -153,7 +153,7 @@ class PutBody(RequestBodyModel): async def on_PUT( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) content = parse_and_validate_json_object_from_request(request, self.PutBody) @@ -181,13 +181,13 @@ class PutBody(RequestBodyModel): async def on_PUT( self, request: SynapseRequest, network_id: str, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: content = parse_and_validate_json_object_from_request(request, self.PutBody) return await self._edit(request, network_id, room_id, content.visibility) async def on_DELETE( self, request: SynapseRequest, network_id: str, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: return await self._edit(request, network_id, room_id, "private") async def _edit( @@ -196,7 +196,7 @@ async def _edit( network_id: str, room_id: str, visibility: Literal["public", "private"], - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if not requester.app_service: raise AuthError( diff --git a/synapse/rest/client/events.py b/synapse/rest/client/events.py index ad23cc76ce..082bacade6 100644 --- a/synapse/rest/client/events.py +++ b/synapse/rest/client/events.py @@ -22,7 +22,7 @@ """This module contains REST servlets to do with event streaming, /events.""" import logging -from typing import TYPE_CHECKING, Dict, List, Tuple, Union +from typing import TYPE_CHECKING, Union from synapse.api.errors import SynapseError from synapse.events.utils import SerializeEventConfig @@ -51,9 +51,9 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore if requester.is_guest: if b"room_id" not in args: raise SynapseError(400, "Guest users must specify room_id param") @@ -96,7 +96,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, event_id: str - ) -> Tuple[int, Union[str, JsonDict]]: + ) -> tuple[int, Union[str, JsonDict]]: requester = await self.auth.get_user_by_req(request) event = await self.event_handler.get_event(requester.user, None, event_id) diff --git a/synapse/rest/client/filter.py b/synapse/rest/client/filter.py index f1e881975f..cfe82e1473 100644 --- a/synapse/rest/client/filter.py +++ b/synapse/rest/client/filter.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import AuthError, NotFoundError, StoreError, SynapseError from synapse.http.server import HttpServer @@ -48,7 +48,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str, filter_id: str - ) -> Tuple[int, JsonMapping]: + ) -> tuple[int, JsonMapping]: target_user = UserID.from_string(user_id) requester = await self.auth.get_user_by_req(request) @@ -87,7 +87,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: target_user = UserID.from_string(user_id) requester = await self.auth.get_user_by_req(request) diff --git a/synapse/rest/client/initial_sync.py b/synapse/rest/client/initial_sync.py index a2c50f5d58..c20e007c5b 100644 --- a/synapse/rest/client/initial_sync.py +++ b/synapse/rest/client/initial_sync.py @@ -19,7 +19,7 @@ # # -from typing import TYPE_CHECKING, Dict, List, Tuple +from typing import TYPE_CHECKING from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_boolean @@ -43,9 +43,9 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore as_client_event = b"raw" not in args pagination_config = await PaginationConfig.from_request( self.store, request, default_limit=10 diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index f8974e34a8..1f71359d55 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -24,7 +24,7 @@ import re from collections import Counter from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Mapping, Optional, Union from typing_extensions import Self @@ -129,7 +129,7 @@ class KeyUploadRequestBody(RequestBodyModel): """ class DeviceKeys(RequestBodyModel): - algorithms: List[StrictStr] + algorithms: list[StrictStr] """The encryption algorithms supported by this device.""" device_id: StrictStr @@ -225,7 +225,7 @@ def validate_one_time_keys(cls: Self, v: Any) -> Any: async def on_POST( self, request: SynapseRequest, device_id: Optional[str] - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) user_id = requester.user.to_string() @@ -343,7 +343,7 @@ def __init__(self, hs: "HomeServer"): self.e2e_keys_handler = hs.get_e2e_keys_handler() @cancellable - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) user_id = requester.user.to_string() device_id = requester.device_id @@ -388,7 +388,7 @@ def __init__(self, hs: "HomeServer"): self.store = hs.get_datastores().main @cancellable - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) from_token_string = parse_string(request, "from", required=True) @@ -442,13 +442,13 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.e2e_keys_handler = hs.get_e2e_keys_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) timeout = parse_integer(request, "timeout", 10 * 1000) body = parse_json_object_from_request(request) # Generate a count for each algorithm, which is hard-coded to 1. - query: Dict[str, Dict[str, Dict[str, int]]] = {} + query: dict[str, dict[str, dict[str, int]]] = {} for user_id, one_time_keys in body.get("one_time_keys", {}).items(): for device_id, algorithm in one_time_keys.items(): query.setdefault(user_id, {})[device_id] = {algorithm: 1} @@ -490,13 +490,13 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.e2e_keys_handler = hs.get_e2e_keys_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) timeout = parse_integer(request, "timeout", 10 * 1000) body = parse_json_object_from_request(request) # Generate a count for each algorithm. - query: Dict[str, Dict[str, Dict[str, int]]] = {} + query: dict[str, dict[str, dict[str, int]]] = {} for user_id, one_time_keys in body.get("one_time_keys", {}).items(): for device_id, algorithms in one_time_keys.items(): query.setdefault(user_id, {})[device_id] = Counter(algorithms) @@ -526,7 +526,7 @@ def __init__(self, hs: "HomeServer"): self.auth_handler = hs.get_auth_handler() @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user_id = requester.user.to_string() body = parse_json_object_from_request(request) @@ -659,7 +659,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.e2e_keys_handler = hs.get_e2e_keys_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) user_id = requester.user.to_string() body = parse_json_object_from_request(request) diff --git a/synapse/rest/client/knock.py b/synapse/rest/client/knock.py index d7a17e1b35..5e96079b66 100644 --- a/synapse/rest/client/knock.py +++ b/synapse/rest/client/knock.py @@ -20,7 +20,7 @@ # # import logging -from typing import TYPE_CHECKING, Dict, List, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import Membership from synapse.api.errors import SynapseError @@ -58,7 +58,7 @@ async def on_POST( self, request: SynapseRequest, room_identifier: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) content = parse_json_object_from_request(request) @@ -70,7 +70,7 @@ async def on_POST( room_id = room_identifier # twisted.web.server.Request.args is incorrectly defined as Optional[Any] - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore # Prefer via over server_name (deprecated with MSC4156) remote_room_hosts = parse_strings_from_args(args, "via", required=False) if remote_room_hosts is None: diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index 921232a3ea..bba6944982 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -26,10 +26,7 @@ Any, Awaitable, Callable, - Dict, - List, Optional, - Tuple, TypedDict, Union, ) @@ -75,7 +72,7 @@ class LoginResponse(TypedDict, total=False): expires_in_ms: Optional[int] refresh_token: Optional[str] device_id: Optional[str] - well_known: Optional[Dict[str, Any]] + well_known: Optional[dict[str, Any]] class LoginRestServlet(RestServlet): @@ -142,8 +139,8 @@ def __init__(self, hs: "HomeServer"): # counters are initialised for the auth_provider_ids. _load_sso_handlers(hs) - def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: - flows: List[JsonDict] = [] + def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: + flows: list[JsonDict] = [] if self.jwt_enabled: flows.append({"type": LoginRestServlet.JWT_TYPE}) @@ -178,7 +175,7 @@ def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: # fall back to the fallback API if they don't understand one of the # login flow types returned. if support_login_token_flow: - tokenTypeFlow: Dict[str, Any] = {"type": LoginRestServlet.TOKEN_TYPE} + tokenTypeFlow: dict[str, Any] = {"type": LoginRestServlet.TOKEN_TYPE} # If the login token flow is enabled advertise the get_login_token flag. if self._get_login_token_enabled: tokenTypeFlow["get_login_token"] = True @@ -190,7 +187,7 @@ def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: return 200, {"flows": flows} - async def on_POST(self, request: SynapseRequest) -> Tuple[int, LoginResponse]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, LoginResponse]: login_submission = parse_json_object_from_request(request) # Check to see if the client requested a refresh token. @@ -602,7 +599,7 @@ def __init__(self, hs: "HomeServer"): ) self.refresh_token_lifetime = hs.config.registration.refresh_token_lifetime - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: refresh_submission = parse_json_object_from_request(request) assert_params_in_dict(refresh_submission, ["refresh_token"]) @@ -626,7 +623,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: token, access_valid_until_ms, refresh_valid_until_ms ) - response: Dict[str, Union[str, int]] = { + response: dict[str, Union[str, int]] = { "access_token": access_token, "refresh_token": refresh_token, } @@ -684,7 +681,7 @@ async def on_GET( finish_request(request) return - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore client_redirect_url = parse_bytes_from_args(args, "redirectUrl", required=True) sso_url = await self._sso_handler.handle_redirect_request( request, diff --git a/synapse/rest/client/login_token_request.py b/synapse/rest/client/login_token_request.py index a053db8e55..f455e9c0b7 100644 --- a/synapse/rest/client/login_token_request.py +++ b/synapse/rest/client/login_token_request.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.ratelimiting import Ratelimiter from synapse.config.ratelimiting import RatelimitSettings @@ -89,7 +89,7 @@ def __init__(self, hs: "HomeServer"): ) @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) body = parse_json_object_from_request(request) diff --git a/synapse/rest/client/logout.py b/synapse/rest/client/logout.py index 39c62b9e26..d804552a4a 100644 --- a/synapse/rest/client/logout.py +++ b/synapse/rest/client/logout.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet @@ -43,7 +43,7 @@ def __init__(self, hs: "HomeServer"): self._auth_handler = hs.get_auth_handler() self._device_handler = hs.get_device_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req( request, allow_expired=True, allow_locked=True ) @@ -70,7 +70,7 @@ def __init__(self, hs: "HomeServer"): self._auth_handler = hs.get_auth_handler() self._device_handler = hs.get_device_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req( request, allow_expired=True, allow_locked=True ) diff --git a/synapse/rest/client/matrixrtc.py b/synapse/rest/client/matrixrtc.py index afe4d4fa83..22f8498f2f 100644 --- a/synapse/rest/client/matrixrtc.py +++ b/synapse/rest/client/matrixrtc.py @@ -15,7 +15,7 @@ # # -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet @@ -37,7 +37,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() self._transports = hs.config.matrix_rtc.transports - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: # Require authentication for this endpoint. await self._auth.get_user_by_req(request) diff --git a/synapse/rest/client/mutual_rooms.py b/synapse/rest/client/mutual_rooms.py index abb1fab0a3..7d0570d0cb 100644 --- a/synapse/rest/client/mutual_rooms.py +++ b/synapse/rest/client/mutual_rooms.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Dict, List, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import Codes, SynapseError from synapse.http.server import HttpServer @@ -51,9 +51,9 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: # twisted.web.server.Request.args is incorrectly defined as Optional[Any] - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore user_ids = parse_strings_from_args(args, "user_id", required=True) diff --git a/synapse/rest/client/notifications.py b/synapse/rest/client/notifications.py index 168ce50d3f..2420e9fffb 100644 --- a/synapse/rest/client/notifications.py +++ b/synapse/rest/client/notifications.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import ReceiptTypes from synapse.events.utils import ( @@ -53,7 +53,7 @@ def __init__(self, hs: "HomeServer"): self.clock = hs.get_clock() self._event_serializer = hs.get_event_client_serializer() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user_id = requester.user.to_string() diff --git a/synapse/rest/client/openid.py b/synapse/rest/client/openid.py index a2c2faa199..e624a48ce7 100644 --- a/synapse/rest/client/openid.py +++ b/synapse/rest/client/openid.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import AuthError from synapse.http.server import HttpServer @@ -80,7 +80,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot request tokens for other users.") diff --git a/synapse/rest/client/password_policy.py b/synapse/rest/client/password_policy.py index 7ec6dd3443..314c409fc2 100644 --- a/synapse/rest/client/password_policy.py +++ b/synapse/rest/client/password_policy.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -46,7 +46,7 @@ def __init__(self, hs: "HomeServer"): self.policy = hs.config.auth.password_policy self.enabled = hs.config.auth.password_policy_enabled - def on_GET(self, request: Request) -> Tuple[int, JsonDict]: + def on_GET(self, request: Request) -> tuple[int, JsonDict]: if not self.enabled or not self.policy: return 200, {} diff --git a/synapse/rest/client/presence.py b/synapse/rest/client/presence.py index 104d54cd89..de3ffdaa0b 100644 --- a/synapse/rest/client/presence.py +++ b/synapse/rest/client/presence.py @@ -22,7 +22,7 @@ """This module contains REST servlets to do with presence: /presence/""" import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import AuthError, Codes, LimitExceededError, SynapseError from synapse.api.ratelimiting import Ratelimiter @@ -60,7 +60,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user = UserID.from_string(user_id) @@ -84,7 +84,7 @@ async def on_GET( async def on_PUT( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user = UserID.from_string(user_id) diff --git a/synapse/rest/client/profile.py b/synapse/rest/client/profile.py index 8bc532c811..7f3128cb61 100644 --- a/synapse/rest/client/profile.py +++ b/synapse/rest/client/profile.py @@ -23,7 +23,7 @@ import re from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import ProfileFields from synapse.api.errors import Codes, SynapseError @@ -69,7 +69,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester_user = None if self.hs.config.server.require_auth_for_profile_requests: @@ -118,7 +118,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str, field_name: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester_user = None if self.hs.config.server.require_auth_for_profile_requests: @@ -156,7 +156,7 @@ async def on_GET( async def on_PUT( self, request: SynapseRequest, user_id: str, field_name: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if not UserID.is_valid(user_id): raise SynapseError( HTTPStatus.BAD_REQUEST, "Invalid user id", Codes.INVALID_PARAM @@ -221,7 +221,7 @@ async def on_PUT( async def on_DELETE( self, request: SynapseRequest, user_id: str, field_name: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if not UserID.is_valid(user_id): raise SynapseError( HTTPStatus.BAD_REQUEST, "Invalid user id", Codes.INVALID_PARAM diff --git a/synapse/rest/client/push_rule.py b/synapse/rest/client/push_rule.py index c1939a9b57..0a9b83af95 100644 --- a/synapse/rest/client/push_rule.py +++ b/synapse/rest/client/push_rule.py @@ -20,7 +20,7 @@ # from http import HTTPStatus -from typing import TYPE_CHECKING, List, Tuple, Union +from typing import TYPE_CHECKING, Union from synapse.api.errors import ( Codes, @@ -67,7 +67,7 @@ def __init__(self, hs: "HomeServer"): self._push_rules_handler = hs.get_push_rules_handler() self._push_rule_linearizer = Linearizer(name="push_rules", clock=hs.get_clock()) - async def on_PUT(self, request: SynapseRequest, path: str) -> Tuple[int, JsonDict]: + async def on_PUT(self, request: SynapseRequest, path: str) -> tuple[int, JsonDict]: if not self._is_push_worker: raise Exception("Cannot handle PUT /push_rules on worker") @@ -79,7 +79,7 @@ async def on_PUT(self, request: SynapseRequest, path: str) -> Tuple[int, JsonDic async def handle_put( self, request: SynapseRequest, path: str, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: spec = _rule_spec_from_path(path.split("/")) try: priority_class = _priority_class_from_spec(spec) @@ -140,7 +140,7 @@ async def handle_put( async def on_DELETE( self, request: SynapseRequest, path: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if not self._is_push_worker: raise Exception("Cannot handle DELETE /push_rules on worker") @@ -155,7 +155,7 @@ async def handle_delete( request: SynapseRequest, path: str, user_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: spec = _rule_spec_from_path(path.split("/")) namespaced_rule_id = f"global/{spec.template}/{spec.rule_id}" @@ -170,7 +170,7 @@ async def handle_delete( else: raise - async def on_GET(self, request: SynapseRequest, path: str) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest, path: str) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) requester.user.to_string() @@ -196,7 +196,7 @@ async def on_GET(self, request: SynapseRequest, path: str) -> Tuple[int, JsonDic raise UnrecognizedRequestError() -def _rule_spec_from_path(path: List[str]) -> RuleSpec: +def _rule_spec_from_path(path: list[str]) -> RuleSpec: """Turn a sequence of path components into a rule spec Args: @@ -240,7 +240,7 @@ def _rule_spec_from_path(path: List[str]) -> RuleSpec: def _rule_tuple_from_request_object( rule_template: str, rule_id: str, req_obj: JsonDict -) -> Tuple[List[JsonDict], List[Union[str, JsonDict]]]: +) -> tuple[list[JsonDict], list[Union[str, JsonDict]]]: if rule_template == "postcontent": # postcontent is from MSC4306, which says that clients # cannot create their own postcontent rules right now. @@ -279,7 +279,7 @@ def _rule_tuple_from_request_object( return conditions, actions -def _filter_ruleset_with_path(ruleset: JsonDict, path: List[str]) -> JsonDict: +def _filter_ruleset_with_path(ruleset: JsonDict, path: list[str]) -> JsonDict: if path == []: raise UnrecognizedRequestError( PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR diff --git a/synapse/rest/client/pusher.py b/synapse/rest/client/pusher.py index a455f95a26..66d7fec07e 100644 --- a/synapse/rest/client/pusher.py +++ b/synapse/rest/client/pusher.py @@ -21,7 +21,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import Codes, SynapseError from synapse.http.server import HttpServer @@ -52,7 +52,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self._store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user_id = requester.user.to_string() @@ -85,7 +85,7 @@ def __init__(self, hs: "HomeServer"): self.pusher_pool = self.hs.get_pusherpool() self._store = hs.get_datastores().main - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user_id = requester.user.to_string() diff --git a/synapse/rest/client/read_marker.py b/synapse/rest/client/read_marker.py index d3d3c7c41d..874e7487bf 100644 --- a/synapse/rest/client/read_marker.py +++ b/synapse/rest/client/read_marker.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import ReceiptTypes from synapse.http.server import HttpServer @@ -56,7 +56,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await self.presence_handler.bump_presence_active_time( diff --git a/synapse/rest/client/receipts.py b/synapse/rest/client/receipts.py index 4bf93f485c..d3a43537bb 100644 --- a/synapse/rest/client/receipts.py +++ b/synapse/rest/client/receipts.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import MAIN_TIMELINE, ReceiptTypes from synapse.api.errors import Codes, SynapseError @@ -59,7 +59,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_id: str, receipt_type: str, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if not RoomID.is_valid(room_id) or not event_id.startswith(EventID.SIGIL): diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index b42006e4ce..145dc6f569 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -21,7 +21,7 @@ # import logging import random -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from twisted.web.server import Request @@ -100,7 +100,7 @@ def __init__(self, hs: "HomeServer"): template_text=self.config.email.email_already_in_use_template_text, ) - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: if not self.hs.config.email.can_verify_email: logger.warning( "Email registration has been disabled due to lack of email config" @@ -183,7 +183,7 @@ def __init__(self, hs: "HomeServer"): self.server_name = hs.hostname self.identity_handler = hs.get_identity_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: body = parse_json_object_from_request(request) assert_params_in_dict( @@ -352,7 +352,7 @@ def __init__(self, hs: "HomeServer"): hs.config.registration.inhibit_user_in_use_error ) - async def on_GET(self, request: Request) -> Tuple[int, JsonDict]: + async def on_GET(self, request: Request) -> tuple[int, JsonDict]: if not self.hs.config.registration.enable_registration: raise SynapseError( 403, "Registration has been disabled", errcode=Codes.FORBIDDEN @@ -402,7 +402,7 @@ def __init__(self, hs: "HomeServer"): cfg=hs.config.ratelimiting.rc_registration_token_validity, ) - async def on_GET(self, request: Request) -> Tuple[int, JsonDict]: + async def on_GET(self, request: Request) -> tuple[int, JsonDict]: await self.ratelimiter.ratelimit(None, (request.getClientAddress().host,)) if not self.hs.config.registration.enable_registration: @@ -453,7 +453,7 @@ def __init__(self, hs: "HomeServer"): ) @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: body = parse_json_object_from_request(request) client_addr = request.getClientAddress().host @@ -853,7 +853,7 @@ async def _create_registration_details( async def _do_guest_registration( self, params: JsonDict, address: Optional[str] = None - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if not self.hs.config.registration.allow_guest_access: raise SynapseError(403, "Guest access is disabled") user_id = await self.registration_handler.register_user( @@ -913,7 +913,7 @@ def __init__(self, hs: "HomeServer"): self.ratelimiter = hs.get_registration_ratelimiter() @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: body = parse_json_object_from_request(request) client_addr = request.getClientAddress().host @@ -970,7 +970,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: def _calculate_registration_flows( config: HomeServerConfig, auth_handler: AuthHandler -) -> List[List[str]]: +) -> list[list[str]]: """Get a suitable flows list for registration Args: diff --git a/synapse/rest/client/relations.py b/synapse/rest/client/relations.py index 49943cf0c3..d6c7411816 100644 --- a/synapse/rest/client/relations.py +++ b/synapse/rest/client/relations.py @@ -20,7 +20,7 @@ import logging import re -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse.api.constants import Direction from synapse.handlers.relations import ThreadsListInclude @@ -63,7 +63,7 @@ async def on_GET( parent_id: str, relation_type: Optional[str] = None, event_type: Optional[str] = None, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) pagination_config = await PaginationConfig.from_request( @@ -105,7 +105,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) limit = parse_integer(request, "limit", default=5) diff --git a/synapse/rest/client/reporting.py b/synapse/rest/client/reporting.py index 81faf38a7f..f11f6b7b77 100644 --- a/synapse/rest/client/reporting.py +++ b/synapse/rest/client/reporting.py @@ -21,7 +21,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse._pydantic_compat import StrictStr from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError @@ -57,7 +57,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_id: str, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user_id = requester.user.to_string() @@ -138,7 +138,7 @@ class PostBody(RequestBodyModel): async def on_POST( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user_id = requester.user.to_string() @@ -191,7 +191,7 @@ class PostBody(RequestBodyModel): async def on_POST( self, request: SynapseRequest, target_user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index 1084139df0..38e315d0e7 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -25,7 +25,7 @@ import re from enum import Enum from http import HTTPStatus -from typing import TYPE_CHECKING, Awaitable, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Awaitable, Optional from urllib import parse as urlparse from prometheus_client.core import Histogram @@ -166,20 +166,20 @@ def register(self, http_server: HttpServer) -> None: async def on_PUT( self, request: SynapseRequest, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) set_tag("txn_id", txn_id) return await self.txns.fetch_or_execute_request( request, requester, self._do, request, requester ) - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) return await self._do(request, requester) async def _do( self, request: SynapseRequest, requester: Requester - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: room_id, _, _ = await self._room_creation_handler.create_room( requester, self.get_room_config(request) ) @@ -244,18 +244,18 @@ def register(self, http_server: HttpServer) -> None: @cancellable def on_GET_no_state_key( self, request: SynapseRequest, room_id: str, event_type: str - ) -> Awaitable[Tuple[int, JsonDict]]: + ) -> Awaitable[tuple[int, JsonDict]]: return self.on_GET(request, room_id, event_type, "") def on_PUT_no_state_key( self, request: SynapseRequest, room_id: str, event_type: str - ) -> Awaitable[Tuple[int, JsonDict]]: + ) -> Awaitable[tuple[int, JsonDict]]: return self.on_PUT(request, room_id, event_type, "") @cancellable async def on_GET( self, request: SynapseRequest, room_id: str, event_type: str, state_key: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) format = parse_string( request, "format", default="content", allowed_values=["content", "event"] @@ -295,7 +295,7 @@ async def on_PUT( event_type: str, state_key: str, txn_id: Optional[str] = None, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) if txn_id: @@ -408,7 +408,7 @@ async def _do( room_id: str, event_type: str, txn_id: Optional[str], - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: content = parse_json_object_from_request(request) origin_server_ts = None @@ -460,13 +460,13 @@ async def on_POST( request: SynapseRequest, room_id: str, event_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) return await self._do(request, requester, room_id, event_type, None) async def on_PUT( self, request: SynapseRequest, room_id: str, event_type: str, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) set_tag("txn_id", txn_id) @@ -545,11 +545,11 @@ async def _do( requester: Requester, room_identifier: str, txn_id: Optional[str], - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: content = parse_json_object_from_request(request, allow_empty_body=True) # twisted.web.server.Request.args is incorrectly defined as Optional[Any] - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore # Prefer via over server_name (deprecated with MSC4156) remote_room_hosts = parse_strings_from_args(args, "via", required=False) if remote_room_hosts is None: @@ -578,13 +578,13 @@ async def on_POST( self, request: SynapseRequest, room_identifier: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) return await self._do(request, requester, room_identifier, None) async def on_PUT( self, request: SynapseRequest, room_identifier: str, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) set_tag("txn_id", txn_id) @@ -603,7 +603,7 @@ def __init__(self, hs: "HomeServer"): self.hs = hs self.auth = hs.get_auth() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: server = parse_string(request, "server") try: @@ -652,7 +652,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: return 200, data - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self.auth.get_user_by_req(request, allow_guest=True) server = parse_string(request, "server") @@ -726,7 +726,7 @@ def __init__(self, hs: "HomeServer"): @cancellable async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # TODO support Pagination stream API (limit/tokens) requester = await self.auth.get_user_by_req(request, allow_guest=True) handler = self.message_handler @@ -780,7 +780,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) users_with_profile = await self.message_handler.get_joined_members( @@ -809,7 +809,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: processing_start_time = self.clock.time_msec() # Fire off and hope that we get a result by the end. # @@ -870,7 +870,7 @@ def __init__(self, hs: "HomeServer"): @cancellable async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, List[JsonDict]]: + ) -> tuple[int, list[JsonDict]]: requester = await self.auth.get_user_by_req(request, allow_guest=True) # Get all the current state for this room events = await self.message_handler.get_state_events( @@ -893,7 +893,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) pagination_config = await PaginationConfig.from_request( self.store, request, default_limit=10 @@ -925,7 +925,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) include_unredacted_content = self.msc2815_enabled and ( @@ -1013,7 +1013,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) limit = parse_integer(request, "limit", default=10) @@ -1072,20 +1072,20 @@ def register(self, http_server: HttpServer) -> None: PATTERNS = "/rooms/(?P[^/]*)/forget" register_txn_path(self, PATTERNS, http_server) - async def _do(self, requester: Requester, room_id: str) -> Tuple[int, JsonDict]: + async def _do(self, requester: Requester, room_id: str) -> tuple[int, JsonDict]: await self.room_member_handler.forget(user=requester.user, room_id=room_id) return 200, {} async def on_POST( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=False) return await self._do(requester, room_id) async def on_PUT( self, request: SynapseRequest, room_id: str, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=False) set_tag("txn_id", txn_id) @@ -1119,7 +1119,7 @@ async def _do( room_id: str, membership_action: str, txn_id: Optional[str], - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if requester.is_guest and membership_action not in { Membership.JOIN, Membership.LEAVE, @@ -1196,13 +1196,13 @@ async def on_POST( request: SynapseRequest, room_id: str, membership_action: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) return await self._do(request, requester, room_id, membership_action, None) async def on_PUT( self, request: SynapseRequest, room_id: str, membership_action: str, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) set_tag("txn_id", txn_id) @@ -1242,7 +1242,7 @@ async def _do( room_id: str, event_id: str, txn_id: Optional[str], - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: content = parse_json_object_from_request(request) requester_suspended = await self._store.get_user_suspended_status( @@ -1328,13 +1328,13 @@ async def on_POST( request: SynapseRequest, room_id: str, event_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) return await self._do(request, requester, room_id, event_id, None) async def on_PUT( self, request: SynapseRequest, room_id: str, event_id: str, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) set_tag("txn_id", txn_id) @@ -1363,7 +1363,7 @@ def __init__(self, hs: "HomeServer"): async def on_PUT( self, request: SynapseRequest, room_id: str, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if not self._is_typing_writer: @@ -1419,7 +1419,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) alias_list = await self.directory_handler.get_aliases_for_room( @@ -1438,7 +1438,7 @@ def __init__(self, hs: "HomeServer"): self.search_handler = hs.get_search_handler() self.auth = hs.get_auth() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) content = parse_json_object_from_request(request) @@ -1458,7 +1458,7 @@ def __init__(self, hs: "HomeServer"): self.store = hs.get_datastores().main self.auth = hs.get_auth() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) room_ids = await self.store.get_rooms_for_user(requester.user.to_string()) @@ -1533,7 +1533,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) await self._auth.check_user_in_room_or_world_readable(room_id, requester) @@ -1566,7 +1566,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request, allow_guest=True) max_depth = parse_integer(request, "max_depth") @@ -1575,7 +1575,7 @@ async def on_GET( # twisted.web.server.Request.args is incorrectly defined as Optional[Any] remote_room_hosts = None if self.msc4235_enabled: - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore via_param = parse_strings_from_args( args, "org.matrix.msc4235.via", required=False ) @@ -1614,7 +1614,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_identifier: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: try: requester = await self._auth.get_user_by_req(request, allow_guest=True) requester_user_id: Optional[str] = requester.user.to_string() @@ -1623,7 +1623,7 @@ async def on_GET( requester_user_id = None # twisted.web.server.Request.args is incorrectly defined as Optional[Any] - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore remote_room_hosts = parse_strings_from_args(args, "via", required=False) room_id, remote_room_hosts = await self.resolve_room_id( room_identifier, diff --git a/synapse/rest/client/room_keys.py b/synapse/rest/client/room_keys.py index 7be08ecb60..b2de591dc5 100644 --- a/synapse/rest/client/room_keys.py +++ b/synapse/rest/client/room_keys.py @@ -19,7 +19,7 @@ # import logging -from typing import TYPE_CHECKING, Optional, Tuple, cast +from typing import TYPE_CHECKING, Optional, cast from synapse.api.errors import Codes, NotFoundError, SynapseError from synapse.http.server import HttpServer @@ -52,7 +52,7 @@ def __init__(self, hs: "HomeServer"): async def on_PUT( self, request: SynapseRequest, room_id: Optional[str], session_id: Optional[str] - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ Uploads one or more encrypted E2E room keys for backup purposes. room_id: the ID of the room the keys are for (optional) @@ -147,7 +147,7 @@ async def on_PUT( async def on_GET( self, request: SynapseRequest, room_id: Optional[str], session_id: Optional[str] - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ Retrieves one or more encrypted E2E room keys for backup purposes. Symmetric with the PUT version of the API. @@ -234,7 +234,7 @@ async def on_GET( async def on_DELETE( self, request: SynapseRequest, room_id: Optional[str], session_id: Optional[str] - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ Deletes one or more encrypted E2E room keys for a user for backup purposes. @@ -267,7 +267,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.e2e_room_keys_handler = hs.get_e2e_room_keys_handler() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: """ Retrieve the version information about the most current backup version (if any) @@ -293,7 +293,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: raise SynapseError(404, "No backup found", Codes.NOT_FOUND) return 200, info - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: """ Create a new backup version for this user's room_keys with the given info. The version is allocated by the server and returned to the user @@ -345,7 +345,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, version: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ Retrieve the version information about a given version of the user's room_keys backup. @@ -374,7 +374,7 @@ async def on_GET( async def on_DELETE( self, request: SynapseRequest, version: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ Delete the information about a given version of the user's room_keys backup. Doesn't delete the actual room data. @@ -391,7 +391,7 @@ async def on_DELETE( async def on_PUT( self, request: SynapseRequest, version: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ Update the information about a given version of the user's room_keys backup. diff --git a/synapse/rest/client/room_upgrade_rest_servlet.py b/synapse/rest/client/room_upgrade_rest_servlet.py index a9717781b0..1c87b86ecb 100644 --- a/synapse/rest/client/room_upgrade_rest_servlet.py +++ b/synapse/rest/client/room_upgrade_rest_servlet.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import Codes, ShadowBanError, SynapseError from synapse.api.room_versions import KNOWN_ROOM_VERSIONS @@ -73,7 +73,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) content = parse_json_object_from_request(request) diff --git a/synapse/rest/client/sendtodevice.py b/synapse/rest/client/sendtodevice.py index 2a67514560..597cb1fecc 100644 --- a/synapse/rest/client/sendtodevice.py +++ b/synapse/rest/client/sendtodevice.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http import servlet from synapse.http.server import HttpServer @@ -53,7 +53,7 @@ def __init__(self, hs: "HomeServer"): async def on_PUT( self, request: SynapseRequest, message_type: str, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) set_tag("txn_id", txn_id) return await self.txns.fetch_or_execute_request( @@ -70,7 +70,7 @@ async def _put( request: SynapseRequest, requester: Requester, message_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: content = parse_json_object_from_request(request) assert_params_in_dict(content, ("messages",)) diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index 01868de60b..9c03eecea4 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -21,7 +21,7 @@ import itertools import logging from collections import defaultdict -from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Mapping, Optional, Union import attr @@ -138,7 +138,7 @@ def __init__(self, hs: "HomeServer"): cfg=hs.config.ratelimiting.rc_presence_per_user, ) - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: # This will always be set by the time Twisted calls us. assert request.args is not None @@ -380,7 +380,7 @@ async def encode_response( return response @staticmethod - def encode_presence(events: List[UserPresenceState], time_now: int) -> JsonDict: + def encode_presence(events: list[UserPresenceState], time_now: int) -> JsonDict: return { "events": [ { @@ -398,7 +398,7 @@ def encode_presence(events: List[UserPresenceState], time_now: int) -> JsonDict: async def encode_joined( self, sync_config: SyncConfig, - rooms: List[JoinedSyncResult], + rooms: list[JoinedSyncResult], time_now: int, serialize_options: SerializeEventConfig, ) -> JsonDict: @@ -428,7 +428,7 @@ async def encode_joined( @trace_with_opname("sync.encode_invited") async def encode_invited( self, - rooms: List[InvitedSyncResult], + rooms: list[InvitedSyncResult], time_now: int, serialize_options: SerializeEventConfig, ) -> JsonDict: @@ -464,10 +464,10 @@ async def encode_invited( @trace_with_opname("sync.encode_knocked") async def encode_knocked( self, - rooms: List[KnockedSyncResult], + rooms: list[KnockedSyncResult], time_now: int, serialize_options: SerializeEventConfig, - ) -> Dict[str, Dict[str, Any]]: + ) -> dict[str, dict[str, Any]]: """ Encode the rooms we've knocked on in a sync result. @@ -517,7 +517,7 @@ async def encode_knocked( async def encode_archived( self, sync_config: SyncConfig, - rooms: List[ArchivedSyncResult], + rooms: list[ArchivedSyncResult], time_now: int, serialize_options: SerializeEventConfig, ) -> JsonDict: @@ -768,7 +768,7 @@ def __init__(self, hs: "HomeServer"): self.sliding_sync_handler = hs.get_sliding_sync_handler() self.event_serializer = hs.get_event_client_serializer() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req_experimental_feature( request, allow_guest=True, feature=ExperimentalFeature.MSC3575 ) @@ -900,7 +900,7 @@ def encode_operation( async def encode_rooms( self, requester: Requester, - rooms: Dict[str, SlidingSyncResult.RoomResult], + rooms: dict[str, SlidingSyncResult.RoomResult], ) -> JsonDict: time_now = self.clock.time_msec() @@ -909,7 +909,7 @@ async def encode_rooms( requester=requester, ) - serialized_rooms: Dict[str, JsonDict] = {} + serialized_rooms: dict[str, JsonDict] = {} for room_id, room_result in rooms.items(): serialized_rooms[room_id] = { "notification_count": room_result.notification_count, diff --git a/synapse/rest/client/tags.py b/synapse/rest/client/tags.py index fb59efb11f..5699ff35c7 100644 --- a/synapse/rest/client/tags.py +++ b/synapse/rest/client/tags.py @@ -21,7 +21,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import AuthError, Codes, SynapseError from synapse.http.server import HttpServer @@ -56,7 +56,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot get tags for other users.") @@ -85,7 +85,7 @@ def __init__(self, hs: "HomeServer"): async def on_PUT( self, request: SynapseRequest, user_id: str, room_id: str, tag: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot add tags for other users.") @@ -114,7 +114,7 @@ async def on_PUT( async def on_DELETE( self, request: SynapseRequest, user_id: str, room_id: str, tag: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot add tags for other users.") diff --git a/synapse/rest/client/thirdparty.py b/synapse/rest/client/thirdparty.py index f972591ebf..c17335eb48 100644 --- a/synapse/rest/client/thirdparty.py +++ b/synapse/rest/client/thirdparty.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Dict, List, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import ThirdPartyEntityKind from synapse.http.server import HttpServer @@ -45,7 +45,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.appservice_handler = hs.get_application_service_handler() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self.auth.get_user_by_req(request, allow_guest=True) protocols = await self.appservice_handler.get_3pe_protocols() @@ -63,7 +63,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, protocol: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.auth.get_user_by_req(request, allow_guest=True) protocols = await self.appservice_handler.get_3pe_protocols( @@ -86,10 +86,10 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, protocol: str - ) -> Tuple[int, List[JsonDict]]: + ) -> tuple[int, list[JsonDict]]: await self.auth.get_user_by_req(request, allow_guest=True) - fields: Dict[bytes, List[bytes]] = request.args # type: ignore[assignment] + fields: dict[bytes, list[bytes]] = request.args # type: ignore[assignment] fields.pop(b"access_token", None) results = await self.appservice_handler.query_3pe( @@ -110,10 +110,10 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, protocol: str - ) -> Tuple[int, List[JsonDict]]: + ) -> tuple[int, list[JsonDict]]: await self.auth.get_user_by_req(request, allow_guest=True) - fields: Dict[bytes, List[bytes]] = request.args # type: ignore[assignment] + fields: dict[bytes, list[bytes]] = request.args # type: ignore[assignment] fields.pop(b"access_token", None) results = await self.appservice_handler.query_3pe( diff --git a/synapse/rest/client/thread_subscriptions.py b/synapse/rest/client/thread_subscriptions.py index 039aba1721..f879c7589c 100644 --- a/synapse/rest/client/thread_subscriptions.py +++ b/synapse/rest/client/thread_subscriptions.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import TYPE_CHECKING, Dict, Optional, Tuple +from typing import TYPE_CHECKING, Optional import attr from typing_extensions import TypeAlias @@ -59,7 +59,7 @@ class PutBody(RequestBodyModel): async def on_GET( self, request: SynapseRequest, room_id: str, thread_root_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: RoomID.from_string(room_id) if not thread_root_id.startswith("$"): raise SynapseError( @@ -80,7 +80,7 @@ async def on_GET( async def on_PUT( self, request: SynapseRequest, room_id: str, thread_root_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: RoomID.from_string(room_id) if not thread_root_id.startswith("$"): raise SynapseError( @@ -101,7 +101,7 @@ async def on_PUT( async def on_DELETE( self, request: SynapseRequest, room_id: str, thread_root_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: RoomID.from_string(room_id) if not thread_root_id.startswith("$"): raise SynapseError( @@ -134,7 +134,7 @@ def __init__(self, hs: "HomeServer"): self.is_mine = hs.is_mine self.store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) limit = min( @@ -204,8 +204,8 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: ) ) - subscribed_threads: Dict[str, Dict[str, JsonDict]] = {} - unsubscribed_threads: Dict[str, Dict[str, JsonDict]] = {} + subscribed_threads: dict[str, dict[str, JsonDict]] = {} + unsubscribed_threads: dict[str, dict[str, JsonDict]] = {} for stream_id, room_id, thread_root_id, subscribed, automatic in subscriptions: if subscribed: subscribed_threads.setdefault(room_id, {})[thread_root_id] = ( diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py index 571ba2fa62..4b3656a597 100644 --- a/synapse/rest/client/transactions.py +++ b/synapse/rest/client/transactions.py @@ -23,7 +23,7 @@ to ensure idempotency when performing PUTs using the REST API.""" import logging -from typing import TYPE_CHECKING, Awaitable, Callable, Dict, Hashable, Tuple +from typing import TYPE_CHECKING, Awaitable, Callable, Hashable from typing_extensions import ParamSpec @@ -51,8 +51,8 @@ def __init__(self, hs: "HomeServer"): self.hs = hs self.clock = self.hs.get_clock() # $txn_key: (ObservableDeferred<(res_code, res_json_body)>, timestamp) - self.transactions: Dict[ - Hashable, Tuple[ObservableDeferred[Tuple[int, JsonDict]], int] + self.transactions: dict[ + Hashable, tuple[ObservableDeferred[tuple[int, JsonDict]], int] ] = {} # Try to clean entries every 30 mins. This means entries will exist # for at *LEAST* 30 mins, and at *MOST* 60 mins. @@ -103,10 +103,10 @@ def fetch_or_execute_request( self, request: IRequest, requester: Requester, - fn: Callable[P, Awaitable[Tuple[int, JsonDict]]], + fn: Callable[P, Awaitable[tuple[int, JsonDict]]], *args: P.args, **kwargs: P.kwargs, - ) -> "Deferred[Tuple[int, JsonDict]]": + ) -> "Deferred[tuple[int, JsonDict]]": """Fetches the response for this transaction, or executes the given function to produce a response for this transaction. diff --git a/synapse/rest/client/user_directory.py b/synapse/rest/client/user_directory.py index 94fcb11c0c..0f561c2e61 100644 --- a/synapse/rest/client/user_directory.py +++ b/synapse/rest/client/user_directory.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import SynapseError from synapse.http.server import HttpServer @@ -46,7 +46,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.user_directory_handler = hs.get_user_directory_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonMapping]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonMapping]: """Searches for users in directory Returns: diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 20395430d7..dee2cdb637 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -23,7 +23,7 @@ import logging import re -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import RoomCreationPreset from synapse.http.server import HttpServer @@ -62,7 +62,7 @@ def __init__(self, hs: "HomeServer"): in self.config.room.encryption_enabled_by_default_for_room_presets ) - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: msc3881_enabled = self.config.experimental.msc3881_enabled msc3575_enabled = self.config.experimental.msc3575_enabled diff --git a/synapse/rest/client/voip.py b/synapse/rest/client/voip.py index fbed3a3bae..581829a790 100644 --- a/synapse/rest/client/voip.py +++ b/synapse/rest/client/voip.py @@ -22,7 +22,7 @@ import base64 import hashlib import hmac -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet @@ -43,7 +43,7 @@ def __init__(self, hs: "HomeServer"): self.hs = hs self.auth = hs.get_auth() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req( request, self.hs.config.voip.turn_allow_guests ) diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py index 3961f82894..a1d2364bed 100644 --- a/synapse/rest/consent/consent_resource.py +++ b/synapse/rest/consent/consent_resource.py @@ -23,7 +23,7 @@ from hashlib import sha256 from http import HTTPStatus from os import path -from typing import TYPE_CHECKING, Any, Dict, List +from typing import TYPE_CHECKING, Any import jinja2 from jinja2 import TemplateNotFound @@ -121,7 +121,7 @@ async def _async_render_GET(self, request: Request) -> None: has_consented = False public_version = username == "" if not public_version: - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore userhmac_bytes = parse_bytes_from_args(args, "h", required=True) self._check_hash(username, userhmac_bytes) @@ -154,7 +154,7 @@ async def _async_render_GET(self, request: Request) -> None: async def _async_render_POST(self, request: Request) -> None: version = parse_string(request, "v", required=True) username = parse_string(request, "u", required=True) - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore userhmac = parse_bytes_from_args(args, "h", required=True) self._check_hash(username, userhmac) diff --git a/synapse/rest/key/v2/local_key_resource.py b/synapse/rest/key/v2/local_key_resource.py index 608da25a6c..f783acdb83 100644 --- a/synapse/rest/key/v2/local_key_resource.py +++ b/synapse/rest/key/v2/local_key_resource.py @@ -21,7 +21,7 @@ import logging import re -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from signedjson.sign import sign_json from unpaddedbase64 import encode_base64 @@ -108,7 +108,7 @@ def response_json_object(self) -> JsonDict: def on_GET( self, request: Request, key_id: Optional[str] = None - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # Matrix 1.6 drops support for passing the key_id, this is incompatible # with earlier versions and is allowed in order to support both. # A warning is issued to help determine when it is safe to drop this. diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index 94c679b9e7..51cb077496 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -21,7 +21,7 @@ import logging import re -from typing import TYPE_CHECKING, Dict, Mapping, Optional, Set, Tuple +from typing import TYPE_CHECKING, Mapping, Optional from signedjson.sign import sign_json @@ -113,7 +113,7 @@ class RemoteKey(RestServlet): CATEGORY = "Federation requests" class PostBody(RequestBodyModel): - server_keys: Dict[StrictStr, Dict[StrictStr, _KeyQueryCriteriaDataModel]] + server_keys: dict[StrictStr, dict[StrictStr, _KeyQueryCriteriaDataModel]] def __init__(self, hs: "HomeServer"): self.fetcher = ServerKeyFetcher(hs) @@ -144,7 +144,7 @@ def register(self, http_server: HttpServer) -> None: async def on_GET( self, request: Request, server: str, key_id: Optional[str] = None - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if server and key_id: # Matrix 1.6 drops support for passing the key_id, this is incompatible # with earlier versions and is allowed in order to support both. @@ -168,7 +168,7 @@ async def on_GET( return 200, await self.query_keys(query, query_remote_on_cache_miss=True) - async def on_POST(self, request: Request) -> Tuple[int, JsonDict]: + async def on_POST(self, request: Request) -> tuple[int, JsonDict]: content = parse_and_validate_json_object_from_request(request, self.PostBody) query = content.server_keys @@ -177,12 +177,12 @@ async def on_POST(self, request: Request) -> Tuple[int, JsonDict]: async def query_keys( self, - query: Dict[str, Dict[str, _KeyQueryCriteriaDataModel]], + query: dict[str, dict[str, _KeyQueryCriteriaDataModel]], query_remote_on_cache_miss: bool = False, ) -> JsonDict: logger.info("Handling query for keys %r", query) - server_keys: Dict[Tuple[str, str], Optional[FetchKeyResultForRemote]] = {} + server_keys: dict[tuple[str, str], Optional[FetchKeyResultForRemote]] = {} for server_name, key_ids in query.items(): if key_ids: results: Mapping[ @@ -199,13 +199,13 @@ async def query_keys( ((server_name, key_id), res) for key_id, res in results.items() ) - json_results: Set[bytes] = set() + json_results: set[bytes] = set() time_now_ms = self.clock.time_msec() # Map server_name->key_id->int. Note that the value of the int is unused. # XXX: why don't we just use a set? - cache_misses: Dict[str, Dict[str, int]] = {} + cache_misses: dict[str, dict[str, int]] = {} for (server_name, key_id), key_result in server_keys.items(): if not query[server_name]: # all keys were requested. Just return what we have without worrying diff --git a/synapse/rest/media/upload_resource.py b/synapse/rest/media/upload_resource.py index 74d8280582..484749dbe6 100644 --- a/synapse/rest/media/upload_resource.py +++ b/synapse/rest/media/upload_resource.py @@ -22,7 +22,7 @@ import logging import re -from typing import IO, TYPE_CHECKING, Dict, List, Optional, Tuple +from typing import IO, TYPE_CHECKING, Optional from synapse.api.errors import Codes, SynapseError from synapse.http.server import respond_with_json @@ -56,7 +56,7 @@ def __init__(self, hs: "HomeServer", media_repo: "MediaRepository"): async def _get_file_metadata( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, Optional[str], str]: + ) -> tuple[int, Optional[str], str]: raw_content_length = request.getHeader("Content-Length") if raw_content_length is None: raise SynapseError(msg="Request must specify a Content-Length", code=400) @@ -78,7 +78,7 @@ async def _get_file_metadata( code=413, errcode=Codes.TOO_LARGE, ) - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore upload_name_bytes = parse_bytes_from_args(args, "filename") if upload_name_bytes: try: diff --git a/synapse/rest/synapse/client/federation_whitelist.py b/synapse/rest/synapse/client/federation_whitelist.py index f59daf8428..0382fef1e2 100644 --- a/synapse/rest/synapse/client/federation_whitelist.py +++ b/synapse/rest/synapse/client/federation_whitelist.py @@ -13,7 +13,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http.server import DirectServeJsonResource from synapse.http.site import SynapseRequest @@ -50,7 +50,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() - async def _async_render_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def _async_render_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self._auth.get_user_by_req(request) whitelist = [] diff --git a/synapse/rest/synapse/client/jwks.py b/synapse/rest/synapse/client/jwks.py index e9a7c24e3b..15ff6f47c1 100644 --- a/synapse/rest/synapse/client/jwks.py +++ b/synapse/rest/synapse/client/jwks.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http.server import DirectServeJsonResource from synapse.http.site import SynapseRequest @@ -73,5 +73,5 @@ def __init__(self, hs: "HomeServer"): "keys": keys, } - async def _async_render_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def _async_render_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: return 200, self.res diff --git a/synapse/rest/synapse/client/password_reset.py b/synapse/rest/synapse/client/password_reset.py index 377578ef8a..1ccdf23da8 100644 --- a/synapse/rest/synapse/client/password_reset.py +++ b/synapse/rest/synapse/client/password_reset.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -65,7 +65,7 @@ def __init__(self, hs: "HomeServer"): # This resource should only be mounted if email validation is enabled assert hs.config.email.can_verify_email - async def _async_render_GET(self, request: Request) -> Tuple[int, bytes]: + async def _async_render_GET(self, request: Request) -> tuple[int, bytes]: sid = parse_string(request, "sid", required=True) token = parse_string(request, "token", required=True) client_secret = parse_string(request, "client_secret", required=True) @@ -83,7 +83,7 @@ async def _async_render_GET(self, request: Request) -> Tuple[int, bytes]: self._confirmation_email_template.render(**template_vars).encode("utf-8"), ) - async def _async_render_POST(self, request: Request) -> Tuple[int, bytes]: + async def _async_render_POST(self, request: Request) -> tuple[int, bytes]: sid = parse_string(request, "sid", required=True) token = parse_string(request, "token", required=True) client_secret = parse_string(request, "client_secret", required=True) diff --git a/synapse/rest/synapse/client/pick_username.py b/synapse/rest/synapse/client/pick_username.py index 1727bb63b7..867ea1866d 100644 --- a/synapse/rest/synapse/client/pick_username.py +++ b/synapse/rest/synapse/client/pick_username.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Generator, List, Tuple +from typing import TYPE_CHECKING, Generator from twisted.web.resource import Resource from twisted.web.server import Request @@ -65,7 +65,7 @@ def __init__(self, hs: "HomeServer"): super().__init__(clock=hs.get_clock()) self._sso_handler = hs.get_sso_handler() - async def _async_render_GET(self, request: Request) -> Tuple[int, JsonDict]: + async def _async_render_GET(self, request: Request) -> tuple[int, JsonDict]: localpart = parse_string(request, "username", required=True) session_id = get_username_mapping_session_cookie_from_request(request) @@ -138,7 +138,7 @@ async def _async_render_POST(self, request: SynapseRequest) -> None: use_avatar = parse_boolean(request, "use_avatar", default=False) try: - emails_to_use: List[str] = [ + emails_to_use: list[str] = [ val.decode("utf-8") for val in request.args.get(b"use_email", []) ] except ValueError: diff --git a/synapse/rest/synapse/client/rendezvous.py b/synapse/rest/synapse/client/rendezvous.py index 5278c35572..24c10dee82 100644 --- a/synapse/rest/synapse/client/rendezvous.py +++ b/synapse/rest/synapse/client/rendezvous.py @@ -14,7 +14,7 @@ # import logging -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING from synapse.api.errors import UnrecognizedRequestError from synapse.http.server import DirectServeJsonResource @@ -34,7 +34,7 @@ def __init__(self, hs: "HomeServer") -> None: self._handler = hs.get_rendezvous_handler() async def _async_render_GET(self, request: SynapseRequest) -> None: - postpath: List[bytes] = request.postpath # type: ignore + postpath: list[bytes] = request.postpath # type: ignore if len(postpath) != 1: raise UnrecognizedRequestError() session_id = postpath[0].decode("ascii") @@ -42,7 +42,7 @@ async def _async_render_GET(self, request: SynapseRequest) -> None: self._handler.handle_get(request, session_id) def _async_render_PUT(self, request: SynapseRequest) -> None: - postpath: List[bytes] = request.postpath # type: ignore + postpath: list[bytes] = request.postpath # type: ignore if len(postpath) != 1: raise UnrecognizedRequestError() session_id = postpath[0].decode("ascii") @@ -50,7 +50,7 @@ def _async_render_PUT(self, request: SynapseRequest) -> None: self._handler.handle_put(request, session_id) def _async_render_DELETE(self, request: SynapseRequest) -> None: - postpath: List[bytes] = request.postpath # type: ignore + postpath: list[bytes] = request.postpath # type: ignore if len(postpath) != 1: raise UnrecognizedRequestError() session_id = postpath[0].decode("ascii") diff --git a/synapse/rest/synapse/mas/devices.py b/synapse/rest/synapse/mas/devices.py index 6cc1153590..654fed8c03 100644 --- a/synapse/rest/synapse/mas/devices.py +++ b/synapse/rest/synapse/mas/devices.py @@ -15,7 +15,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse._pydantic_compat import StrictStr from synapse.api.errors import NotFoundError @@ -56,7 +56,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -97,7 +97,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -138,7 +138,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -180,7 +180,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) diff --git a/synapse/rest/synapse/mas/users.py b/synapse/rest/synapse/mas/users.py index 09aa13bebb..a802887270 100644 --- a/synapse/rest/synapse/mas/users.py +++ b/synapse/rest/synapse/mas/users.py @@ -15,7 +15,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Optional, Tuple, TypedDict +from typing import TYPE_CHECKING, Any, Optional, TypedDict from synapse._pydantic_compat import StrictBool, StrictStr, root_validator from synapse.api.errors import NotFoundError, SynapseError @@ -58,7 +58,7 @@ class Response(TypedDict): async def _async_render_GET( self, request: "SynapseRequest" - ) -> Tuple[int, Response]: + ) -> tuple[int, Response]: self.assert_request_is_from_mas(request) localpart = parse_string(request, "localpart", required=True) @@ -128,7 +128,7 @@ def validate_exclusive(cls, values: Any) -> Any: async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -239,7 +239,7 @@ def __init__(self, hs: "HomeServer"): async def _async_render_GET( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) localpart = parse_string(request, "localpart") if localpart is None: @@ -272,7 +272,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -312,7 +312,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -350,7 +350,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -394,7 +394,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -440,7 +440,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py index ae8c6a8fc0..00965cfb82 100644 --- a/synapse/rest/well_known.py +++ b/synapse/rest/well_known.py @@ -18,7 +18,7 @@ # # import logging -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from twisted.web.resource import Resource from twisted.web.server import Request @@ -97,7 +97,7 @@ def __init__(self, hs: "HomeServer"): super().__init__(clock=hs.get_clock()) self._well_known_builder = WellKnownBuilder(hs) - async def _async_render_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def _async_render_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: r = await self._well_known_builder.get_well_known() if not r: raise NotFoundError(".well-known not available") diff --git a/synapse/server.py b/synapse/server.py index b63a11273a..2c252ce86f 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -34,11 +34,7 @@ Any, Awaitable, Callable, - Dict, - List, Optional, - Tuple, - Type, TypeVar, cast, ) @@ -278,7 +274,7 @@ class ShutdownInfo: func: Callable[..., Any] trigger_id: _SystemEventID - kwargs: Dict[str, object] + kwargs: dict[str, object] class HomeServer(metaclass=abc.ABCMeta): @@ -313,7 +309,7 @@ class HomeServer(metaclass=abc.ABCMeta): @property @abc.abstractmethod - def DATASTORE_CLASS(self) -> Type["SQLBaseStore"]: + def DATASTORE_CLASS(self) -> type["SQLBaseStore"]: # This is overridden in derived application classes # (such as synapse.app.homeserver.SynapseHomeServer) and gives the class to be # instantiated during setup() for future return by get_datastores() @@ -341,8 +337,8 @@ def __init__( # the key we use to sign events and requests self.signing_key = config.key.signing_key[0] self.config = config - self._listening_services: List[Port] = [] - self._metrics_listeners: List[Tuple[WSGIServer, Thread]] = [] + self._listening_services: list[Port] = [] + self._metrics_listeners: list[tuple[WSGIServer, Thread]] = [] self.start_time: Optional[int] = None self._instance_id = random_string(5) @@ -352,15 +348,15 @@ def __init__( self.datastores: Optional[Databases] = None - self._module_web_resources: Dict[str, Resource] = {} + self._module_web_resources: dict[str, Resource] = {} self._module_web_resources_consumed = False # This attribute is set by the free function `refresh_certificate`. self.tls_server_context_factory: Optional[IOpenSSLContextFactory] = None self._is_shutdown = False - self._async_shutdown_handlers: List[ShutdownInfo] = [] - self._sync_shutdown_handlers: List[ShutdownInfo] = [] + self._async_shutdown_handlers: list[ShutdownInfo] = [] + self._sync_shutdown_handlers: list[ShutdownInfo] = [] self._background_processes: set[defer.Deferred[Optional[Any]]] = set() def run_as_background_process( @@ -1108,7 +1104,7 @@ def get_replication_data_handler(self) -> ReplicationDataHandler: return ReplicationDataHandler(self) @cache_in_self - def get_replication_streams(self) -> Dict[str, Stream]: + def get_replication_streams(self) -> dict[str, Stream]: return {stream.NAME: stream(self) for stream in STREAMS_MAP.values()} @cache_in_self diff --git a/synapse/server_notices/consent_server_notices.py b/synapse/server_notices/consent_server_notices.py index d937a3034e..99b362f5ff 100644 --- a/synapse/server_notices/consent_server_notices.py +++ b/synapse/server_notices/consent_server_notices.py @@ -18,7 +18,7 @@ # # import logging -from typing import TYPE_CHECKING, Any, Set +from typing import TYPE_CHECKING, Any from synapse.api.errors import SynapseError from synapse.api.urls import ConsentURIBuilder @@ -40,7 +40,7 @@ def __init__(self, hs: "HomeServer"): self._server_notices_manager = hs.get_server_notices_manager() self._store = hs.get_datastores().main - self._users_in_progress: Set[str] = set() + self._users_in_progress: set[str] = set() self._current_consent_version = hs.config.consent.user_consent_version self._server_notice_content = ( diff --git a/synapse/server_notices/resource_limits_server_notices.py b/synapse/server_notices/resource_limits_server_notices.py index e88e8c9b45..493b8cb62b 100644 --- a/synapse/server_notices/resource_limits_server_notices.py +++ b/synapse/server_notices/resource_limits_server_notices.py @@ -18,7 +18,7 @@ # # import logging -from typing import TYPE_CHECKING, List, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import ( EventTypes, @@ -127,7 +127,7 @@ async def maybe_send_server_notice_to_user(self, user_id: str) -> None: logger.error("Error sending resource limits server notice: %s", e) async def _remove_limit_block_notification( - self, user_id: str, ref_events: List[str] + self, user_id: str, ref_events: list[str] ) -> None: """Utility method to remove limit block notifications from the server notices room. @@ -170,7 +170,7 @@ async def _apply_limit_block_notification( user_id, content, EventTypes.Pinned, "" ) - async def _is_room_currently_blocked(self, room_id: str) -> Tuple[bool, List[str]]: + async def _is_room_currently_blocked(self, room_id: str) -> tuple[bool, list[str]]: """ Determines if the room is currently blocked @@ -198,7 +198,7 @@ async def _is_room_currently_blocked(self, room_id: str) -> Tuple[bool, List[str # The user has yet to join the server notices room pass - referenced_events: List[str] = [] + referenced_events: list[str] = [] if pinned_state_event is not None: referenced_events = list(pinned_state_event.content.get("pinned", [])) diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index 394dc72fa6..991e1f847a 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -26,15 +26,9 @@ Any, Awaitable, Callable, - DefaultDict, - Dict, - FrozenSet, - List, Mapping, Optional, Sequence, - Set, - Tuple, ) import attr @@ -246,7 +240,7 @@ async def compute_state_after_events( async def get_current_user_ids_in_room( self, room_id: str, latest_event_ids: StrCollection - ) -> Set[str]: + ) -> set[str]: """ Get the users IDs who are currently in a room. @@ -271,7 +265,7 @@ async def get_current_user_ids_in_room( async def get_hosts_in_room_at_events( self, room_id: str, event_ids: StrCollection - ) -> FrozenSet[str]: + ) -> frozenset[str]: """Get the hosts that were in a room at the given event ids Args: @@ -647,7 +641,7 @@ def __init__(self, hs: "HomeServer"): ) # dict of set of event_ids -> _StateCacheEntry. - self._state_cache: ExpiringCache[FrozenSet[int], _StateCacheEntry] = ( + self._state_cache: ExpiringCache[frozenset[int], _StateCacheEntry] = ( ExpiringCache( cache_name="state_cache", server_name=self.server_name, @@ -665,7 +659,7 @@ def __init__(self, hs: "HomeServer"): # # tracks the amount of work done on state res per room - self._state_res_metrics: DefaultDict[str, _StateResMetrics] = defaultdict( + self._state_res_metrics: defaultdict[str, _StateResMetrics] = defaultdict( _StateResMetrics ) @@ -676,7 +670,7 @@ async def resolve_state_groups( room_id: str, room_version: str, state_groups_ids: Mapping[int, StateMap[str]], - event_map: Optional[Dict[str, EventBase]], + event_map: Optional[dict[str, EventBase]], state_res_store: "StateResolutionStore", ) -> _StateCacheEntry: """Resolves conflicts between a set of state groups @@ -776,7 +770,7 @@ async def resolve_events_with_store( room_id: str, room_version: str, state_sets: Sequence[StateMap[str]], - event_map: Optional[Dict[str, EventBase]], + event_map: Optional[dict[str, EventBase]], state_res_store: "StateResolutionStore", ) -> StateMap[str]: """ @@ -884,7 +878,7 @@ def _report_biggest( items = self._state_res_metrics.items() # log the N biggest rooms - biggest: List[Tuple[str, _StateResMetrics]] = heapq.nlargest( + biggest: list[tuple[str, _StateResMetrics]] = heapq.nlargest( n_to_log, items, key=lambda i: extract_key(i[1]) ) metrics_logger.debug( @@ -975,7 +969,7 @@ class StateResolutionStore: def get_events( self, event_ids: StrCollection, allow_rejected: bool = False - ) -> Awaitable[Dict[str, EventBase]]: + ) -> Awaitable[dict[str, EventBase]]: """Get events from the database Args: @@ -996,9 +990,9 @@ def get_events( def get_auth_chain_difference( self, room_id: str, - state_sets: List[Set[str]], - conflicted_state: Optional[Set[str]], - additional_backwards_reachable_conflicted_events: Optional[Set[str]], + state_sets: list[set[str]], + conflicted_state: Optional[set[str]], + additional_backwards_reachable_conflicted_events: Optional[set[str]], ) -> Awaitable[StateDifference]: """ "Given sets of state events figure out the auth chain difference (as per state res v2 algorithm). diff --git a/synapse/state/v1.py b/synapse/state/v1.py index a2e9eb0a42..a219347264 100644 --- a/synapse/state/v1.py +++ b/synapse/state/v1.py @@ -23,13 +23,9 @@ from typing import ( Awaitable, Callable, - Dict, Iterable, - List, Optional, Sequence, - Set, - Tuple, ) from synapse import event_auth @@ -49,8 +45,8 @@ async def resolve_events_with_store( room_id: str, room_version: RoomVersion, state_sets: Sequence[StateMap[str]], - event_map: Optional[Dict[str, EventBase]], - state_map_factory: Callable[[StrCollection], Awaitable[Dict[str, EventBase]]], + event_map: Optional[dict[str, EventBase]], + state_map_factory: Callable[[StrCollection], Awaitable[dict[str, EventBase]]], ) -> StateMap[str]: """ Args: @@ -145,7 +141,7 @@ async def resolve_events_with_store( def _seperate( state_sets: Iterable[StateMap[str]], -) -> Tuple[MutableStateMap[str], MutableStateMap[Set[str]]]: +) -> tuple[MutableStateMap[str], MutableStateMap[set[str]]]: """Takes the state_sets and figures out which keys are conflicted and which aren't. i.e., which have multiple different event_ids associated with them in different state sets. @@ -166,7 +162,7 @@ def _seperate( """ state_set_iterator = iter(state_sets) unconflicted_state = dict(next(state_set_iterator)) - conflicted_state: MutableStateMap[Set[str]] = {} + conflicted_state: MutableStateMap[set[str]] = {} for state_set in state_set_iterator: for key, value in state_set.items(): @@ -196,8 +192,8 @@ def _seperate( def _create_auth_events_from_maps( room_version: RoomVersion, unconflicted_state: StateMap[str], - conflicted_state: StateMap[Set[str]], - state_map: Dict[str, EventBase], + conflicted_state: StateMap[set[str]], + state_map: dict[str, EventBase], ) -> StateMap[str]: """ @@ -228,9 +224,9 @@ def _create_auth_events_from_maps( def _resolve_with_state( room_version: RoomVersion, unconflicted_state_ids: MutableStateMap[str], - conflicted_state_ids: StateMap[Set[str]], + conflicted_state_ids: StateMap[set[str]], auth_event_ids: StateMap[str], - state_map: Dict[str, EventBase], + state_map: dict[str, EventBase], ) -> MutableStateMap[str]: conflicted_state = {} for key, event_ids in conflicted_state_ids.items(): @@ -263,7 +259,7 @@ def _resolve_with_state( def _resolve_state_events( room_version: RoomVersion, - conflicted_state: StateMap[List[EventBase]], + conflicted_state: StateMap[list[EventBase]], auth_events: MutableStateMap[EventBase], ) -> StateMap[EventBase]: """This is where we actually decide which of the conflicted state to @@ -312,7 +308,7 @@ def _resolve_state_events( def _resolve_auth_events( - room_version: RoomVersion, events: List[EventBase], auth_events: StateMap[EventBase] + room_version: RoomVersion, events: list[EventBase], auth_events: StateMap[EventBase] ) -> EventBase: reverse = list(reversed(_ordered_events(events))) @@ -347,7 +343,7 @@ def _resolve_auth_events( def _resolve_normal_events( - events: List[EventBase], auth_events: StateMap[EventBase] + events: list[EventBase], auth_events: StateMap[EventBase] ) -> EventBase: for event in _ordered_events(events): try: @@ -365,8 +361,8 @@ def _resolve_normal_events( return event -def _ordered_events(events: Iterable[EventBase]) -> List[EventBase]: - def key_func(e: EventBase) -> Tuple[int, str]: +def _ordered_events(events: Iterable[EventBase]) -> list[EventBase]: + def key_func(e: EventBase) -> tuple[int, str]: # we have to use utf-8 rather than ascii here because it turns out we allow # people to send us events with non-ascii event IDs :/ return -int(e.depth), hashlib.sha1(e.event_id.encode("utf-8")).hexdigest() diff --git a/synapse/state/v2.py b/synapse/state/v2.py index 8bf6706434..683f0c1dcc 100644 --- a/synapse/state/v2.py +++ b/synapse/state/v2.py @@ -25,16 +25,12 @@ Any, Awaitable, Callable, - Dict, Generator, Iterable, - List, Literal, Optional, Protocol, Sequence, - Set, - Tuple, overload, ) @@ -61,13 +57,13 @@ class StateResolutionStore(Protocol): # TestStateResolutionStore in tests. def get_events( self, event_ids: StrCollection, allow_rejected: bool = False - ) -> Awaitable[Dict[str, EventBase]]: ... + ) -> Awaitable[dict[str, EventBase]]: ... def get_auth_chain_difference( self, room_id: str, - state_sets: List[Set[str]], - conflicted_state: Optional[Set[str]], + state_sets: list[set[str]], + conflicted_state: Optional[set[str]], additional_backwards_reachable_conflicted_events: Optional[set[str]], ) -> Awaitable[StateDifference]: ... @@ -88,7 +84,7 @@ async def resolve_events_with_store( room_id: str, room_version: RoomVersion, state_sets: Sequence[StateMap[str]], - event_map: Optional[Dict[str, EventBase]], + event_map: Optional[dict[str, EventBase]], state_res_store: StateResolutionStore, ) -> StateMap[str]: """Resolves the state using the v2 state resolution algorithm @@ -128,7 +124,7 @@ async def resolve_events_with_store( logger.debug("%d conflicted state entries", len(conflicted_state)) logger.debug("Calculating auth chain difference") - conflicted_set: Optional[Set[str]] = None + conflicted_set: Optional[set[str]] = None if room_version.state_res == StateResolutionVersions.V2_1: # calculate the conflicted subgraph conflicted_set = set(itertools.chain.from_iterable(conflicted_state.values())) @@ -242,7 +238,7 @@ async def resolve_events_with_store( async def _get_power_level_for_sender( room_id: str, event_id: str, - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, ) -> int: """Return the power level of the sender of the given event according to @@ -315,10 +311,10 @@ async def _get_power_level_for_sender( async def _get_auth_chain_difference( room_id: str, state_sets: Sequence[StateMap[str]], - unpersisted_events: Dict[str, EventBase], + unpersisted_events: dict[str, EventBase], state_res_store: StateResolutionStore, - conflicted_state: Optional[Set[str]], -) -> Set[str]: + conflicted_state: Optional[set[str]], +) -> set[str]: """Compare the auth chains of each state set and return the set of events that only appear in some, but not all of the auth chains. @@ -356,10 +352,10 @@ async def _get_auth_chain_difference( # event IDs if they appear in the `unpersisted_events`. This is the intersection of # the event's auth chain with the events in `unpersisted_events` *plus* their # auth event IDs. - events_to_auth_chain: Dict[str, Set[str]] = {} + events_to_auth_chain: dict[str, set[str]] = {} # remember the forward links when doing the graph traversal, we'll need it for v2.1 checks # This is a map from an event to the set of events that contain it as an auth event. - event_to_next_event: Dict[str, Set[str]] = {} + event_to_next_event: dict[str, set[str]] = {} for event in unpersisted_events.values(): chain = {event.event_id} events_to_auth_chain[event.event_id] = chain @@ -379,8 +375,8 @@ async def _get_auth_chain_difference( # # Note: If there are no `unpersisted_events` (which is the common case), we can do a # much simpler calculation. - additional_backwards_reachable_conflicted_events: Set[str] = set() - unpersisted_conflicted_events: Set[str] = set() + additional_backwards_reachable_conflicted_events: set[str] = set() + unpersisted_conflicted_events: set[str] = set() if unpersisted_events: # The list of state sets to pass to the store, where each state set is a set # of the event ids making up the state. This is similar to `state_sets`, @@ -388,17 +384,17 @@ async def _get_auth_chain_difference( # ((type, state_key)->event_id) mappings; and (b) we have stripped out # unpersisted events and replaced them with the persisted events in # their auth chain. - state_sets_ids: List[Set[str]] = [] + state_sets_ids: list[set[str]] = [] # For each state set, the unpersisted event IDs reachable (by their auth # chain) from the events in that set. - unpersisted_set_ids: List[Set[str]] = [] + unpersisted_set_ids: list[set[str]] = [] for state_set in state_sets: - set_ids: Set[str] = set() + set_ids: set[str] = set() state_sets_ids.append(set_ids) - unpersisted_ids: Set[str] = set() + unpersisted_ids: set[str] = set() unpersisted_set_ids.append(unpersisted_ids) for event_id in state_set.values(): @@ -479,7 +475,7 @@ async def _get_auth_chain_difference( # but NOT the backwards conflicted set. This mirrors what the DB layer does but in reverse: # we supplied events which are backwards reachable to the DB and now the DB is providing # forwards reachable events from the DB. - forwards_conflicted_set: Set[str] = set() + forwards_conflicted_set: set[str] = set() # we include unpersisted conflicted events here to process exclusive unpersisted subgraphs search_queue = subgraph_frontier.union(unpersisted_conflicted_events) while search_queue: @@ -490,7 +486,7 @@ async def _get_auth_chain_difference( # we've already calculated the backwards form as this is the auth chain for each # unpersisted conflicted event. - backwards_conflicted_set: Set[str] = set() + backwards_conflicted_set: set[str] = set() for uce in unpersisted_conflicted_events: backwards_conflicted_set.update(events_to_auth_chain.get(uce, [])) @@ -526,7 +522,7 @@ async def _get_auth_chain_difference( def _seperate( state_sets: Iterable[StateMap[str]], -) -> Tuple[StateMap[str], StateMap[Set[str]]]: +) -> tuple[StateMap[str], StateMap[set[str]]]: """Return the unconflicted and conflicted state. This is different than in the original algorithm, as this defines a key to be conflicted if one of the state sets doesn't have that key. @@ -550,7 +546,7 @@ def _seperate( conflicted_state[key] = event_ids # mypy doesn't understand that discarding None above means that conflicted - # state is StateMap[Set[str]], not StateMap[Set[Optional[Str]]]. + # state is StateMap[set[str]], not StateMap[set[Optional[Str]]]. return unconflicted_state, conflicted_state # type: ignore[return-value] @@ -579,12 +575,12 @@ def _is_power_event(event: EventBase) -> bool: async def _add_event_and_auth_chain_to_graph( - graph: Dict[str, Set[str]], + graph: dict[str, set[str]], room_id: str, event_id: str, - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, - full_conflicted_set: Set[str], + full_conflicted_set: set[str], ) -> None: """Helper function for _reverse_topological_power_sort that add the event and its auth chain (that is in the auth diff) to the graph @@ -616,10 +612,10 @@ async def _reverse_topological_power_sort( clock: Clock, room_id: str, event_ids: Iterable[str], - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, - full_conflicted_set: Set[str], -) -> List[str]: + full_conflicted_set: set[str], +) -> list[str]: """Returns a list of the event_ids sorted by reverse topological ordering, and then by power level and origin_server_ts @@ -635,7 +631,7 @@ async def _reverse_topological_power_sort( The sorted list """ - graph: Dict[str, Set[str]] = {} + graph: dict[str, set[str]] = {} for idx, event_id in enumerate(event_ids, start=1): await _add_event_and_auth_chain_to_graph( graph, room_id, event_id, event_map, state_res_store, full_conflicted_set @@ -658,7 +654,7 @@ async def _reverse_topological_power_sort( if idx % _AWAIT_AFTER_ITERATIONS == 0: await clock.sleep(0) - def _get_power_order(event_id: str) -> Tuple[int, int, str]: + def _get_power_order(event_id: str) -> tuple[int, int, str]: ev = event_map[event_id] pl = event_to_pl[event_id] @@ -675,9 +671,9 @@ async def _iterative_auth_checks( clock: Clock, room_id: str, room_version: RoomVersion, - event_ids: List[str], + event_ids: list[str], base_state: StateMap[str], - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, ) -> MutableStateMap[str]: """Sequentially apply auth checks to each event in given list, updating the @@ -758,11 +754,11 @@ async def _iterative_auth_checks( async def _mainline_sort( clock: Clock, room_id: str, - event_ids: List[str], + event_ids: list[str], resolved_power_event_id: Optional[str], - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, -) -> List[str]: +) -> list[str]: """Returns a sorted list of event_ids sorted by mainline ordering based on the given event resolved_power_event_id @@ -829,8 +825,8 @@ async def _mainline_sort( async def _get_mainline_depth_for_event( clock: Clock, event: EventBase, - mainline_map: Dict[str, int], - event_map: Dict[str, EventBase], + mainline_map: dict[str, int], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, ) -> int: """Get the mainline depths for the given event based on the mainline map @@ -880,7 +876,7 @@ async def _get_mainline_depth_for_event( async def _get_event( room_id: str, event_id: str, - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, allow_none: Literal[False] = False, ) -> EventBase: ... @@ -890,7 +886,7 @@ async def _get_event( async def _get_event( room_id: str, event_id: str, - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, allow_none: Literal[True], ) -> Optional[EventBase]: ... @@ -899,7 +895,7 @@ async def _get_event( async def _get_event( room_id: str, event_id: str, - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, allow_none: bool = False, ) -> Optional[EventBase]: @@ -936,7 +932,7 @@ async def _get_event( def lexicographical_topological_sort( - graph: Dict[str, Set[str]], key: Callable[[str], Any] + graph: dict[str, set[str]], key: Callable[[str], Any] ) -> Generator[str, None, None]: """Performs a lexicographic reverse topological sort on the graph. @@ -960,7 +956,7 @@ def lexicographical_topological_sort( # outgoing edges, c.f. # https://en.wikipedia.org/wiki/Topological_sorting#Kahn's_algorithm outdegree_map = graph - reverse_graph: Dict[str, Set[str]] = {} + reverse_graph: dict[str, set[str]] = {} # Lists of nodes with zero out degree. Is actually a tuple of # `(key(node), node)` so that sorting does the right thing diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 1fddcc0799..b6958ef06b 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -21,7 +21,7 @@ # import logging from abc import ABCMeta -from typing import TYPE_CHECKING, Any, Collection, Dict, Iterable, Optional, Union +from typing import TYPE_CHECKING, Any, Collection, Iterable, Optional, Union from synapse.storage.database import ( DatabasePool, @@ -60,7 +60,7 @@ def __init__( self.database_engine = database.engine self.db_pool = database - self.external_cached_functions: Dict[str, CachedFunction] = {} + self.external_cached_functions: dict[str, CachedFunction] = {} def process_replication_rows( # noqa: B027 (no-op by design) self, diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index e3e793d5f5..ce213050a9 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -28,13 +28,9 @@ AsyncContextManager, Awaitable, Callable, - Dict, Iterable, - List, Optional, Sequence, - Tuple, - Type, cast, ) @@ -96,7 +92,7 @@ class ForeignKeyConstraint(Constraint): """ referenced_table: str - columns: Sequence[Tuple[str, str]] + columns: Sequence[tuple[str, str]] deferred: bool def make_check_clause(self, table: str) -> str: @@ -173,7 +169,7 @@ async def __aenter__(self) -> int: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType], ) -> None: @@ -260,8 +256,8 @@ def __init__(self, hs: "HomeServer", database: "DatabasePool"): self._default_batch_size_callback: Optional[DEFAULT_BATCH_SIZE_CALLBACK] = None self._min_batch_size_callback: Optional[MIN_BATCH_SIZE_CALLBACK] = None - self._background_update_performance: Dict[str, BackgroundUpdatePerformance] = {} - self._background_update_handlers: Dict[str, _BackgroundUpdateHandler] = {} + self._background_update_performance: dict[str, BackgroundUpdatePerformance] = {} + self._background_update_handlers: dict[str, _BackgroundUpdateHandler] = {} # TODO: all these bool flags make me feel icky---can we combine into a status # enum? self._all_done = False @@ -530,14 +526,14 @@ async def do_next_background_update(self, sleep: bool = True) -> bool: True if we have finished running all the background updates, otherwise False """ - def get_background_updates_txn(txn: Cursor) -> List[Tuple[str, Optional[str]]]: + def get_background_updates_txn(txn: Cursor) -> list[tuple[str, Optional[str]]]: txn.execute( """ SELECT update_name, depends_on FROM background_updates ORDER BY ordering, update_name """ ) - return cast(List[Tuple[str, Optional[str]]], txn.fetchall()) + return cast(list[tuple[str, Optional[str]]], txn.fetchall()) if not self._current_background_update: all_pending_updates = await self.db_pool.runInteraction( @@ -965,7 +961,7 @@ async def validate_constraint_and_delete_in_background( order_columns = ", ".join(unique_columns) where_clause = "" - args: List[Any] = [] + args: list[Any] = [] if parsed_progress.lower_bound: where_clause = f"""WHERE ({order_columns}) > ({", ".join("?" for _ in unique_columns)})""" args.extend(parsed_progress.lower_bound) diff --git a/synapse/storage/controllers/persist_events.py b/synapse/storage/controllers/persist_events.py index 646e2cf115..0daf4830d9 100644 --- a/synapse/storage/controllers/persist_events.py +++ b/synapse/storage/controllers/persist_events.py @@ -31,15 +31,10 @@ Callable, ClassVar, Collection, - Deque, - Dict, Generator, Generic, Iterable, - List, Optional, - Set, - Tuple, TypeVar, Union, ) @@ -143,7 +138,7 @@ class _PersistEventsTask: name: ClassVar[str] = "persist_event_batch" # used for opentracing - events_and_contexts: List[EventPersistencePair] + events_and_contexts: list[EventPersistencePair] backfilled: bool def try_merge(self, task: "_EventPersistQueueTask") -> bool: @@ -178,7 +173,7 @@ class _EventPersistQueueItem(Generic[_PersistResult]): task: _EventPersistQueueTask deferred: ObservableDeferred[_PersistResult] - parent_opentracing_span_contexts: List = attr.ib(factory=list) + parent_opentracing_span_contexts: list = attr.ib(factory=list) """A list of opentracing spans waiting for this batch""" opentracing_span_context: Any = None @@ -208,8 +203,8 @@ def __init__( """ self.server_name = server_name self.hs = hs - self._event_persist_queues: Dict[str, Deque[_EventPersistQueueItem]] = {} - self._currently_persisting_rooms: Set[str] = set() + self._event_persist_queues: dict[str, deque[_EventPersistQueueItem]] = {} + self._currently_persisting_rooms: set[str] = set() self._per_item_callback = per_item_callback async def add_to_queue( @@ -365,7 +360,7 @@ async def _process_event_persist_queue_task( self, room_id: str, task: _EventPersistQueueTask, - ) -> Dict[str, str]: + ) -> dict[str, str]: """Callback for the _event_persist_queue Returns: @@ -394,7 +389,7 @@ async def persist_events( self, events_and_contexts: Iterable[EventPersistencePair], backfilled: bool = False, - ) -> Tuple[List[EventBase], RoomStreamToken]: + ) -> tuple[list[EventBase], RoomStreamToken]: """ Write events to the database Args: @@ -414,8 +409,8 @@ async def persist_events( PartialStateConflictError: if attempting to persist a partial state event in a room that has been un-partial stated. """ - event_ids: List[str] = [] - partitioned: Dict[str, List[EventPersistencePair]] = {} + event_ids: list[str] = [] + partitioned: dict[str, list[EventPersistencePair]] = {} for event, ctx in events_and_contexts: partitioned.setdefault(event.room_id, []).append((event, ctx)) event_ids.append(event.event_id) @@ -431,8 +426,8 @@ async def persist_events( set_tag(SynapseTags.FUNC_ARG_PREFIX + "backfilled", str(backfilled)) async def enqueue( - item: Tuple[str, List[EventPersistencePair]], - ) -> Dict[str, str]: + item: tuple[str, list[EventPersistencePair]], + ) -> dict[str, str]: room_id, evs_ctxs = item return await self._event_persist_queue.add_to_queue( room_id, @@ -447,7 +442,7 @@ async def enqueue( # # Since we use `yieldable_gather_results` we need to merge the returned list # of dicts into one. - replaced_events: Dict[str, str] = {} + replaced_events: dict[str, str] = {} for d in ret_vals: replaced_events.update(d) @@ -469,7 +464,7 @@ async def enqueue( @trace async def persist_event( self, event: EventBase, context: EventContext, backfilled: bool = False - ) -> Tuple[EventBase, PersistedEventPosition, RoomStreamToken]: + ) -> tuple[EventBase, PersistedEventPosition, RoomStreamToken]: """ Returns: The event, stream ordering of `event`, and the stream ordering of the @@ -573,7 +568,7 @@ async def _calculate_current_state(self, room_id: str) -> StateMap[str]: async def _persist_event_batch( self, room_id: str, task: _PersistEventsTask - ) -> Dict[str, str]: + ) -> dict[str, str]: """Callback for the _event_persist_queue Calculates the change to current state and forward extremities, and @@ -592,7 +587,7 @@ async def _persist_event_batch( events_and_contexts = task.events_and_contexts backfilled = task.backfilled - replaced_events: Dict[str, str] = {} + replaced_events: dict[str, str] = {} if not events_and_contexts: return replaced_events @@ -678,8 +673,8 @@ async def _persist_event_batch( return replaced_events async def _calculate_new_forward_extremities_and_state_delta( - self, room_id: str, ev_ctx_rm: List[EventPersistencePair] - ) -> Tuple[Optional[Set[str]], Optional[DeltaState]]: + self, room_id: str, ev_ctx_rm: list[EventPersistencePair] + ) -> tuple[Optional[set[str]], Optional[DeltaState]]: """Calculates the new forward extremities and state delta for a room given events to persist. @@ -803,9 +798,9 @@ async def _calculate_new_forward_extremities_and_state_delta( async def _calculate_new_extremities( self, room_id: str, - event_contexts: List[EventPersistencePair], + event_contexts: list[EventPersistencePair], latest_event_ids: AbstractSet[str], - ) -> Set[str]: + ) -> set[str]: """Calculates the new forward extremities for a room given events to persist. @@ -863,10 +858,10 @@ async def _calculate_new_extremities( async def _get_new_state_after_events( self, room_id: str, - events_context: List[EventPersistencePair], + events_context: list[EventPersistencePair], old_latest_event_ids: AbstractSet[str], - new_latest_event_ids: Set[str], - ) -> Tuple[Optional[StateMap[str]], Optional[StateMap[str]], Set[str]]: + new_latest_event_ids: set[str], + ) -> tuple[Optional[StateMap[str]], Optional[StateMap[str]], set[str]]: """Calculate the current state dict after adding some new events to a room @@ -1037,11 +1032,11 @@ async def _get_new_state_after_events( async def _prune_extremities( self, room_id: str, - new_latest_event_ids: Set[str], + new_latest_event_ids: set[str], resolved_state_group: int, - event_id_to_state_group: Dict[str, int], - events_context: List[EventPersistencePair], - ) -> Set[str]: + event_id_to_state_group: dict[str, int], + events_context: list[EventPersistencePair], + ) -> set[str]: """See if we can prune any of the extremities after calculating the resolved state. """ @@ -1108,7 +1103,7 @@ async def _prune_extremities( # as a first cut. events_to_check: Collection[EventBase] = [event] while events_to_check: - new_events: Set[str] = set() + new_events: set[str] = set() for event_to_check in events_to_check: if self.is_mine_id(event_to_check.sender): if event_to_check.type != EventTypes.Dummy: @@ -1177,7 +1172,7 @@ async def _calculate_state_delta( async def _is_server_still_joined( self, room_id: str, - ev_ctx_rm: List[EventPersistencePair], + ev_ctx_rm: list[EventPersistencePair], delta: DeltaState, ) -> bool: """Check if the server will still be joined after the given events have diff --git a/synapse/storage/controllers/purge_events.py b/synapse/storage/controllers/purge_events.py index ded9cb0567..6606fdcc30 100644 --- a/synapse/storage/controllers/purge_events.py +++ b/synapse/storage/controllers/purge_events.py @@ -26,7 +26,6 @@ Collection, Mapping, Optional, - Set, ) from synapse.logging.context import nested_logging_context @@ -99,7 +98,7 @@ async def purge_history( async def _find_unreferenced_groups( self, state_groups: Collection[int], - ) -> Set[int]: + ) -> set[int]: """Used when purging history to figure out which state groups can be deleted. @@ -316,7 +315,7 @@ async def _find_unreferenced_groups_for_background_deletion( self, last_checked_state_group: int, batch_size: int, - ) -> tuple[Set[int], int, bool]: + ) -> tuple[set[int], int, bool]: """Used when deleting unreferenced state groups in the background to figure out which state groups can be deleted. To avoid increased DB usage due to de-deltaing state groups, this returns only diff --git a/synapse/storage/controllers/state.py b/synapse/storage/controllers/state.py index 76978402b9..690a0dde2e 100644 --- a/synapse/storage/controllers/state.py +++ b/synapse/storage/controllers/state.py @@ -25,13 +25,9 @@ AbstractSet, Callable, Collection, - Dict, - FrozenSet, Iterable, - List, Mapping, Optional, - Tuple, Union, ) @@ -95,7 +91,7 @@ def notify_room_un_partial_stated(self, room_id: str) -> None: @tag_args async def get_state_group_delta( self, state_group: int - ) -> Tuple[Optional[int], Optional[StateMap[str]]]: + ) -> tuple[Optional[int], Optional[StateMap[str]]]: """Given a state group try to return a previous group and a delta between the old and the new. @@ -114,7 +110,7 @@ async def get_state_group_delta( @tag_args async def get_state_groups_ids( self, _room_id: str, event_ids: Collection[str], await_full_state: bool = True - ) -> Dict[int, MutableStateMap[str]]: + ) -> dict[int, MutableStateMap[str]]: """Get the event IDs of all the state for the state groups for the given events Args: @@ -164,7 +160,7 @@ async def get_state_ids_for_group( @tag_args async def get_state_groups( self, room_id: str, event_ids: Collection[str] - ) -> Dict[int, List[EventBase]]: + ) -> dict[int, list[EventBase]]: """Get the state groups for the given list of event_ids Args: @@ -200,8 +196,8 @@ async def get_state_groups( @trace @tag_args async def _get_state_groups_from_groups( - self, groups: List[int], state_filter: StateFilter - ) -> Dict[int, StateMap[str]]: + self, groups: list[int], state_filter: StateFilter + ) -> dict[int, StateMap[str]]: """Returns the state groups for a given set of groups, filtering on types of state events. @@ -222,7 +218,7 @@ async def _get_state_groups_from_groups( @tag_args async def get_state_for_events( self, event_ids: Collection[str], state_filter: Optional[StateFilter] = None - ) -> Dict[str, StateMap[EventBase]]: + ) -> dict[str, StateMap[EventBase]]: """Given a list of event_ids and type tuples, return a list of state dicts for each event. @@ -277,7 +273,7 @@ async def get_state_ids_for_events( event_ids: Collection[str], state_filter: Optional[StateFilter] = None, await_full_state: bool = True, - ) -> Dict[str, StateMap[str]]: + ) -> dict[str, StateMap[str]]: """ Get the room states after each of a list of events. @@ -505,7 +501,7 @@ async def get_state_at( @tag_args async def get_state_for_groups( self, groups: Iterable[int], state_filter: Optional[StateFilter] = None - ) -> Dict[int, MutableStateMap[str]]: + ) -> dict[int, MutableStateMap[str]]: """Gets the state at each of a list of state groups, optionally filtering by type/state_key @@ -671,7 +667,7 @@ async def get_server_acl_for_room( @tag_args async def get_current_state_deltas( self, prev_stream_id: int, max_stream_id: int - ) -> Tuple[int, List[StateDelta]]: + ) -> tuple[int, list[StateDelta]]: """Fetch a list of room state changes since the given stream id Args: @@ -745,7 +741,7 @@ async def get_current_hosts_in_room(self, room_id: str) -> AbstractSet[str]: @trace @tag_args - async def get_current_hosts_in_room_ordered(self, room_id: str) -> Tuple[str, ...]: + async def get_current_hosts_in_room_ordered(self, room_id: str) -> tuple[str, ...]: """Get current hosts in room based on current state. Blocks until we have full state for the given room. This only happens for rooms @@ -807,7 +803,7 @@ async def get_users_in_room_with_profiles( async def get_joined_hosts( self, room_id: str, state_entry: "_StateCacheEntry" - ) -> FrozenSet[str]: + ) -> frozenset[str]: state_group: Union[object, int] = state_entry.state_group if not state_group: # If state_group is None it means it has yet to be assigned a @@ -828,7 +824,7 @@ async def _get_joined_hosts( room_id: str, state_group: Union[object, int], state_entry: "_StateCacheEntry", - ) -> FrozenSet[str]: + ) -> frozenset[str]: # We don't use `state_group`, it's there so that we can cache based on # it. However, its important that its never None, since two # current_state's with a state_group of None are likely to be different. diff --git a/synapse/storage/controllers/stats.py b/synapse/storage/controllers/stats.py index 9445a86240..18e27e0878 100644 --- a/synapse/storage/controllers/stats.py +++ b/synapse/storage/controllers/stats.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Collection, Counter, List, Tuple +from typing import TYPE_CHECKING, Collection, Counter from synapse.api.errors import SynapseError from synapse.storage.database import LoggingTransaction @@ -39,7 +39,7 @@ class StatsController: def __init__(self, hs: "HomeServer", stores: Databases): self.stores = stores - async def get_room_db_size_estimate(self) -> List[Tuple[str, int]]: + async def get_room_db_size_estimate(self) -> list[tuple[str, int]]: """Get an estimate of the largest rooms and how much database space they use, in bytes. diff --git a/synapse/storage/database.py b/synapse/storage/database.py index a4b2b26795..764ca9f229 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -31,16 +31,12 @@ Awaitable, Callable, Collection, - Dict, Iterable, Iterator, - List, Literal, Mapping, Optional, Sequence, - Tuple, - Type, TypeVar, cast, overload, @@ -218,9 +214,9 @@ def cursor( self, *, txn_name: Optional[str] = None, - after_callbacks: Optional[List["_CallbackListEntry"]] = None, - async_after_callbacks: Optional[List["_AsyncCallbackListEntry"]] = None, - exception_callbacks: Optional[List["_CallbackListEntry"]] = None, + after_callbacks: Optional[list["_CallbackListEntry"]] = None, + async_after_callbacks: Optional[list["_AsyncCallbackListEntry"]] = None, + exception_callbacks: Optional[list["_CallbackListEntry"]] = None, ) -> "LoggingTransaction": if not txn_name: txn_name = self.default_txn_name @@ -250,7 +246,7 @@ def __enter__(self) -> "LoggingDatabaseConnection": def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[types.TracebackType], ) -> Optional[bool]: @@ -262,9 +258,9 @@ def __getattr__(self, name: str) -> Any: # The type of entry which goes on our after_callbacks and exception_callbacks lists. -_CallbackListEntry = Tuple[Callable[..., object], Tuple[object, ...], Dict[str, object]] -_AsyncCallbackListEntry = Tuple[ - Callable[..., Awaitable], Tuple[object, ...], Dict[str, object] +_CallbackListEntry = tuple[Callable[..., object], tuple[object, ...], dict[str, object]] +_AsyncCallbackListEntry = tuple[ + Callable[..., Awaitable], tuple[object, ...], dict[str, object] ] P = ParamSpec("P") @@ -311,9 +307,9 @@ def __init__( name: str, server_name: str, database_engine: BaseDatabaseEngine, - after_callbacks: Optional[List[_CallbackListEntry]] = None, - async_after_callbacks: Optional[List[_AsyncCallbackListEntry]] = None, - exception_callbacks: Optional[List[_CallbackListEntry]] = None, + after_callbacks: Optional[list[_CallbackListEntry]] = None, + async_after_callbacks: Optional[list[_AsyncCallbackListEntry]] = None, + exception_callbacks: Optional[list[_CallbackListEntry]] = None, ): self.txn = txn self.name = name @@ -383,16 +379,16 @@ def call_on_exception( assert self.exception_callbacks is not None self.exception_callbacks.append((callback, args, kwargs)) - def fetchone(self) -> Optional[Tuple]: + def fetchone(self) -> Optional[tuple]: return self.txn.fetchone() - def fetchmany(self, size: Optional[int] = None) -> List[Tuple]: + def fetchmany(self, size: Optional[int] = None) -> list[tuple]: return self.txn.fetchmany(size=size) - def fetchall(self) -> List[Tuple]: + def fetchall(self) -> list[tuple]: return self.txn.fetchall() - def __iter__(self) -> Iterator[Tuple]: + def __iter__(self) -> Iterator[tuple]: return self.txn.__iter__() @property @@ -435,7 +431,7 @@ def execute_values( values: Iterable[Iterable[Any]], template: Optional[str] = None, fetch: bool = True, - ) -> List[Tuple]: + ) -> list[tuple]: """Corresponds to psycopg2.extras.execute_values. Only available when using postgres. @@ -540,7 +536,7 @@ def __enter__(self) -> "LoggingTransaction": def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[types.TracebackType], ) -> None: @@ -549,8 +545,8 @@ def __exit__( class PerformanceCounters: def __init__(self) -> None: - self.current_counters: Dict[str, Tuple[int, float]] = {} - self.previous_counters: Dict[str, Tuple[int, float]] = {} + self.current_counters: dict[str, tuple[int, float]] = {} + self.previous_counters: dict[str, tuple[int, float]] = {} def update(self, key: str, duration_secs: float) -> None: count, cum_time = self.current_counters.get(key, (0, 0.0)) @@ -616,7 +612,7 @@ def __init__( self._previous_loop_ts = 0.0 # Transaction counter: key is the twisted thread id, value is the current count - self._txn_counters: Dict[int, int] = defaultdict(int) + self._txn_counters: dict[int, int] = defaultdict(int) # TODO(paul): These can eventually be removed once the metrics code # is running in mainline, and we have some nice monitoring frontends @@ -666,7 +662,7 @@ async def _check_safe_to_upsert(self) -> None: If the background updates have not completed, wait 15 sec and check again. """ updates = cast( - List[Tuple[str]], + list[tuple[str]], await self.simple_select_list( "background_updates", keyvalues=None, @@ -717,9 +713,9 @@ def new_transaction( self, conn: LoggingDatabaseConnection, desc: str, - after_callbacks: List[_CallbackListEntry], - async_after_callbacks: List[_AsyncCallbackListEntry], - exception_callbacks: List[_CallbackListEntry], + after_callbacks: list[_CallbackListEntry], + async_after_callbacks: list[_AsyncCallbackListEntry], + exception_callbacks: list[_CallbackListEntry], func: Callable[Concatenate[LoggingTransaction, P], R], *args: P.args, **kwargs: P.kwargs, @@ -956,9 +952,9 @@ async def runInteraction( """ async def _runInteraction() -> R: - after_callbacks: List[_CallbackListEntry] = [] - async_after_callbacks: List[_AsyncCallbackListEntry] = [] - exception_callbacks: List[_CallbackListEntry] = [] + after_callbacks: list[_CallbackListEntry] = [] + async_after_callbacks: list[_AsyncCallbackListEntry] = [] + exception_callbacks: list[_CallbackListEntry] = [] if not current_context(): logger.warning("Starting db txn '%s' from sentinel context", desc) @@ -1105,7 +1101,7 @@ def inner_func(conn: _PoolConnection, *args: P.args, **kwargs: P.kwargs) -> R: self._db_pool.runWithConnection(inner_func, *args, **kwargs) ) - async def execute(self, desc: str, query: str, *args: Any) -> List[Tuple[Any, ...]]: + async def execute(self, desc: str, query: str, *args: Any) -> list[tuple[Any, ...]]: """Runs a single query for a result set. Args: @@ -1116,7 +1112,7 @@ async def execute(self, desc: str, query: str, *args: Any) -> List[Tuple[Any, .. The result of decoder(results) """ - def interaction(txn: LoggingTransaction) -> List[Tuple[Any, ...]]: + def interaction(txn: LoggingTransaction) -> list[tuple[Any, ...]]: txn.execute(query, args) return txn.fetchall() @@ -1128,7 +1124,7 @@ def interaction(txn: LoggingTransaction) -> List[Tuple[Any, ...]]: async def simple_insert( self, table: str, - values: Dict[str, Any], + values: dict[str, Any], desc: str = "simple_insert", ) -> None: """Executes an INSERT query on the named table. @@ -1142,7 +1138,7 @@ async def simple_insert( @staticmethod def simple_insert_txn( - txn: LoggingTransaction, table: str, values: Dict[str, Any] + txn: LoggingTransaction, table: str, values: dict[str, Any] ) -> None: keys, vals = zip(*values.items()) @@ -1158,9 +1154,9 @@ def simple_insert_txn( def simple_insert_returning_txn( txn: LoggingTransaction, table: str, - values: Dict[str, Any], + values: dict[str, Any], returning: StrCollection, - ) -> Tuple[Any, ...]: + ) -> tuple[Any, ...]: """Executes a `INSERT INTO... RETURNING...` statement (or equivalent for SQLite versions that don't support it). """ @@ -1261,9 +1257,9 @@ def simple_insert_many_txn( async def simple_upsert( self, table: str, - keyvalues: Dict[str, Any], - values: Dict[str, Any], - insertion_values: Optional[Dict[str, Any]] = None, + keyvalues: dict[str, Any], + values: dict[str, Any], + insertion_values: Optional[dict[str, Any]] = None, where_clause: Optional[str] = None, desc: str = "simple_upsert", ) -> bool: @@ -1463,7 +1459,7 @@ def _getwhere(key: str) -> str: return True # We didn't find any existing rows, so insert a new one - allvalues: Dict[str, Any] = {} + allvalues: dict[str, Any] = {} allvalues.update(keyvalues) allvalues.update(values) allvalues.update(insertion_values) @@ -1500,7 +1496,7 @@ def simple_upsert_txn_native_upsert( Returns True if a row was inserted or updated (i.e. if `values` is not empty then this always returns True) """ - allvalues: Dict[str, Any] = {} + allvalues: dict[str, Any] = {} allvalues.update(keyvalues) allvalues.update(insertion_values or {}) @@ -1694,7 +1690,7 @@ def simple_upsert_many_txn_native_upsert( value_values: A list of each row's value column values. Ignored if value_names is empty. """ - allnames: List[str] = [] + allnames: list[str] = [] allnames.extend(key_names) allnames.extend(value_names) @@ -1737,30 +1733,30 @@ def simple_upsert_many_txn_native_upsert( async def simple_select_one( self, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcols: Collection[str], allow_none: Literal[False] = False, desc: str = "simple_select_one", - ) -> Tuple[Any, ...]: ... + ) -> tuple[Any, ...]: ... @overload async def simple_select_one( self, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcols: Collection[str], allow_none: Literal[True] = True, desc: str = "simple_select_one", - ) -> Optional[Tuple[Any, ...]]: ... + ) -> Optional[tuple[Any, ...]]: ... async def simple_select_one( self, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcols: Collection[str], allow_none: bool = False, desc: str = "simple_select_one", - ) -> Optional[Tuple[Any, ...]]: + ) -> Optional[tuple[Any, ...]]: """Executes a SELECT query on the named table, which is expected to return a single row, returning multiple columns from it. @@ -1786,7 +1782,7 @@ async def simple_select_one( async def simple_select_one_onecol( self, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcol: str, allow_none: Literal[False] = False, desc: str = "simple_select_one_onecol", @@ -1796,7 +1792,7 @@ async def simple_select_one_onecol( async def simple_select_one_onecol( self, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcol: str, allow_none: Literal[True] = True, desc: str = "simple_select_one_onecol", @@ -1805,7 +1801,7 @@ async def simple_select_one_onecol( async def simple_select_one_onecol( self, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcol: str, allow_none: bool = False, desc: str = "simple_select_one_onecol", @@ -1837,7 +1833,7 @@ def simple_select_one_onecol_txn( cls, txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcol: str, allow_none: Literal[False] = False, ) -> Any: ... @@ -1848,7 +1844,7 @@ def simple_select_one_onecol_txn( cls, txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcol: str, allow_none: Literal[True] = True, ) -> Optional[Any]: ... @@ -1858,7 +1854,7 @@ def simple_select_one_onecol_txn( cls, txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcol: str, allow_none: bool = False, ) -> Optional[Any]: @@ -1878,9 +1874,9 @@ def simple_select_one_onecol_txn( def simple_select_onecol_txn( txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcol: str, - ) -> List[Any]: + ) -> list[Any]: sql = ("SELECT %(retcol)s FROM %(table)s") % {"retcol": retcol, "table": table} if keyvalues: @@ -1894,10 +1890,10 @@ def simple_select_onecol_txn( async def simple_select_onecol( self, table: str, - keyvalues: Optional[Dict[str, Any]], + keyvalues: Optional[dict[str, Any]], retcol: str, desc: str = "simple_select_onecol", - ) -> List[Any]: + ) -> list[Any]: """Executes a SELECT query on the named table, which returns a list comprising of the values of the named column from the selected rows. @@ -1922,10 +1918,10 @@ async def simple_select_onecol( async def simple_select_list( self, table: str, - keyvalues: Optional[Dict[str, Any]], + keyvalues: Optional[dict[str, Any]], retcols: Collection[str], desc: str = "simple_select_list", - ) -> List[Tuple[Any, ...]]: + ) -> list[tuple[Any, ...]]: """Executes a SELECT query on the named table, which may return zero or more rows, returning the result as a list of tuples. @@ -1954,9 +1950,9 @@ def simple_select_list_txn( cls, txn: LoggingTransaction, table: str, - keyvalues: Optional[Dict[str, Any]], + keyvalues: Optional[dict[str, Any]], retcols: Iterable[str], - ) -> List[Tuple[Any, ...]]: + ) -> list[tuple[Any, ...]]: """Executes a SELECT query on the named table, which may return zero or more rows, returning the result as a list of tuples. @@ -1990,10 +1986,10 @@ async def simple_select_many_batch( column: str, iterable: Iterable[Any], retcols: Collection[str], - keyvalues: Optional[Dict[str, Any]] = None, + keyvalues: Optional[dict[str, Any]] = None, desc: str = "simple_select_many_batch", batch_size: int = 100, - ) -> List[Tuple[Any, ...]]: + ) -> list[tuple[Any, ...]]: """Executes a SELECT query on the named table, which may return zero or more rows. @@ -2013,7 +2009,7 @@ async def simple_select_many_batch( """ keyvalues = keyvalues or {} - results: List[Tuple[Any, ...]] = [] + results: list[tuple[Any, ...]] = [] for chunk in batch_iter(iterable, batch_size): rows = await self.runInteraction( @@ -2038,9 +2034,9 @@ def simple_select_many_txn( table: str, column: str, iterable: Collection[Any], - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcols: Iterable[str], - ) -> List[Tuple[Any, ...]]: + ) -> list[tuple[Any, ...]]: """Executes a SELECT query on the named table, which may return zero or more rows. @@ -2080,8 +2076,8 @@ def simple_select_many_txn( async def simple_update( self, table: str, - keyvalues: Dict[str, Any], - updatevalues: Dict[str, Any], + keyvalues: dict[str, Any], + updatevalues: dict[str, Any], desc: str, ) -> int: """ @@ -2217,8 +2213,8 @@ def simple_update_many_txn( async def simple_update_one( self, table: str, - keyvalues: Dict[str, Any], - updatevalues: Dict[str, Any], + keyvalues: dict[str, Any], + updatevalues: dict[str, Any], desc: str = "simple_update_one", ) -> None: """Executes an UPDATE query on the named table, setting new values for @@ -2244,8 +2240,8 @@ def simple_update_one_txn( cls, txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], - updatevalues: Dict[str, Any], + keyvalues: dict[str, Any], + updatevalues: dict[str, Any], ) -> None: rowcount = cls.simple_update_txn(txn, table, keyvalues, updatevalues) @@ -2259,29 +2255,29 @@ def simple_update_one_txn( def simple_select_one_txn( txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcols: Collection[str], allow_none: Literal[False] = False, - ) -> Tuple[Any, ...]: ... + ) -> tuple[Any, ...]: ... @overload @staticmethod def simple_select_one_txn( txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcols: Collection[str], allow_none: Literal[True] = True, - ) -> Optional[Tuple[Any, ...]]: ... + ) -> Optional[tuple[Any, ...]]: ... @staticmethod def simple_select_one_txn( txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcols: Collection[str], allow_none: bool = False, - ) -> Optional[Tuple[Any, ...]]: + ) -> Optional[tuple[Any, ...]]: select_sql = "SELECT %s FROM %s" % (", ".join(retcols), table) if keyvalues: @@ -2302,7 +2298,7 @@ def simple_select_one_txn( return row async def simple_delete_one( - self, table: str, keyvalues: Dict[str, Any], desc: str = "simple_delete_one" + self, table: str, keyvalues: dict[str, Any], desc: str = "simple_delete_one" ) -> None: """Executes a DELETE query on the named table, expecting to delete a single row. @@ -2322,7 +2318,7 @@ async def simple_delete_one( @staticmethod def simple_delete_one_txn( - txn: LoggingTransaction, table: str, keyvalues: Dict[str, Any] + txn: LoggingTransaction, table: str, keyvalues: dict[str, Any] ) -> None: """Executes a DELETE query on the named table, expecting to delete a single row. @@ -2343,7 +2339,7 @@ def simple_delete_one_txn( raise StoreError(500, "More than one row matched (%s)" % (table,)) async def simple_delete( - self, table: str, keyvalues: Dict[str, Any], desc: str + self, table: str, keyvalues: dict[str, Any], desc: str ) -> int: """Executes a DELETE query on the named table. @@ -2363,7 +2359,7 @@ async def simple_delete( @staticmethod def simple_delete_txn( - txn: LoggingTransaction, table: str, keyvalues: Dict[str, Any] + txn: LoggingTransaction, table: str, keyvalues: dict[str, Any] ) -> int: """Executes a DELETE query on the named table. @@ -2389,7 +2385,7 @@ async def simple_delete_many( table: str, column: str, iterable: Collection[Any], - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], desc: str, ) -> int: """Executes a DELETE query on the named table. @@ -2423,7 +2419,7 @@ def simple_delete_many_txn( table: str, column: str, values: Collection[Any], - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], ) -> int: """Executes a DELETE query on the named table. @@ -2503,7 +2499,7 @@ def get_cache_dict( stream_column: str, max_value: int, limit: int = 100000, - ) -> Tuple[Dict[Any, int], int]: + ) -> tuple[dict[Any, int], int]: """Gets roughly the last N changes in the given stream table as a map from entity to the stream ID of the most recent change. @@ -2528,7 +2524,7 @@ def get_cache_dict( # The rows come out in reverse stream ID order, so we want to keep the # stream ID of the first row for each entity. - cache: Dict[Any, int] = {} + cache: dict[Any, int] = {} for row in txn: cache.setdefault(row[0], int(row[1])) @@ -2552,11 +2548,11 @@ def simple_select_list_paginate_txn( start: int, limit: int, retcols: Iterable[str], - filters: Optional[Dict[str, Any]] = None, - keyvalues: Optional[Dict[str, Any]] = None, - exclude_keyvalues: Optional[Dict[str, Any]] = None, + filters: Optional[dict[str, Any]] = None, + keyvalues: Optional[dict[str, Any]] = None, + exclude_keyvalues: Optional[dict[str, Any]] = None, order_direction: str = "ASC", - ) -> List[Tuple[Any, ...]]: + ) -> list[tuple[Any, ...]]: """ Executes a SELECT query on the named table with start and limit, of row numbers, which may return zero or number of rows from start to limit, @@ -2591,7 +2587,7 @@ def simple_select_list_paginate_txn( raise ValueError("order_direction must be one of 'ASC' or 'DESC'.") where_clause = "WHERE " if filters or keyvalues or exclude_keyvalues else "" - arg_list: List[Any] = [] + arg_list: list[Any] = [] if filters: where_clause += " AND ".join("%s LIKE ?" % (k,) for k in filters) arg_list += list(filters.values()) @@ -2621,7 +2617,7 @@ def make_in_list_sql_clause( iterable: Collection[Any], *, negative: bool = False, -) -> Tuple[str, list]: +) -> tuple[str, list]: """Returns an SQL clause that checks the given column is in the iterable. On SQLite this expands to `column IN (?, ?, ...)`, whereas on Postgres @@ -2661,24 +2657,24 @@ def make_in_list_sql_clause( @overload def make_tuple_in_list_sql_clause( database_engine: BaseDatabaseEngine, - columns: Tuple[str, str], - iterable: Collection[Tuple[Any, Any]], -) -> Tuple[str, list]: ... + columns: tuple[str, str], + iterable: Collection[tuple[Any, Any]], +) -> tuple[str, list]: ... @overload def make_tuple_in_list_sql_clause( database_engine: BaseDatabaseEngine, - columns: Tuple[str, str, str], - iterable: Collection[Tuple[Any, Any, Any]], -) -> Tuple[str, list]: ... + columns: tuple[str, str, str], + iterable: Collection[tuple[Any, Any, Any]], +) -> tuple[str, list]: ... def make_tuple_in_list_sql_clause( database_engine: BaseDatabaseEngine, - columns: Tuple[str, ...], - iterable: Collection[Tuple[Any, ...]], -) -> Tuple[str, list]: + columns: tuple[str, ...], + iterable: Collection[tuple[Any, ...]], +) -> tuple[str, list]: """Returns an SQL clause that checks the given tuple of columns is in the iterable. Args: @@ -2726,7 +2722,7 @@ def make_tuple_in_list_sql_clause( KV = TypeVar("KV") -def make_tuple_comparison_clause(keys: List[Tuple[str, KV]]) -> Tuple[str, List[KV]]: +def make_tuple_comparison_clause(keys: list[tuple[str, KV]]) -> tuple[str, list[KV]]: """Returns a tuple comparison SQL clause Builds a SQL clause that looks like "(a, b) > (?, ?)" diff --git a/synapse/storage/databases/__init__.py b/synapse/storage/databases/__init__.py index a4aba96686..f145d21096 100644 --- a/synapse/storage/databases/__init__.py +++ b/synapse/storage/databases/__init__.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Generic, List, Optional, Type, TypeVar +from typing import TYPE_CHECKING, Generic, Optional, TypeVar from synapse.metrics import SERVER_NAME_LABEL, LaterGauge from synapse.storage._base import SQLBaseStore @@ -61,13 +61,13 @@ class Databases(Generic[DataStoreT]): state_deletion """ - databases: List[DatabasePool] + databases: list[DatabasePool] main: "DataStore" # FIXME: https://github.com/matrix-org/synapse/issues/11165: actually an instance of `main_store_class` state: StateGroupDataStore persist_events: Optional[PersistEventsStore] state_deletion: StateDeletionDataStore - def __init__(self, main_store_class: Type[DataStoreT], hs: "HomeServer"): + def __init__(self, main_store_class: type[DataStoreT], hs: "HomeServer"): # Note we pass in the main store class here as workers use a different main # store. diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py index 83b480adaf..9f23c1a4e0 100644 --- a/synapse/storage/databases/main/__init__.py +++ b/synapse/storage/databases/main/__init__.py @@ -20,7 +20,7 @@ # # import logging -from typing import TYPE_CHECKING, List, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, Optional, Union, cast import attr @@ -188,9 +188,9 @@ async def get_users_paginate( order_by: str = UserSortOrder.NAME.value, direction: Direction = Direction.FORWARDS, approved: bool = True, - not_user_types: Optional[List[str]] = None, + not_user_types: Optional[list[str]] = None, locked: bool = False, - ) -> Tuple[List[UserPaginateResponse], int]: + ) -> tuple[list[UserPaginateResponse], int]: """Function to retrieve a paginated list of users from users list. This will return a json list of users and the total number of users matching the filter criteria. @@ -216,7 +216,7 @@ async def get_users_paginate( def get_users_paginate_txn( txn: LoggingTransaction, - ) -> Tuple[List[UserPaginateResponse], int]: + ) -> tuple[list[UserPaginateResponse], int]: filters = [] args: list = [] @@ -311,7 +311,7 @@ def get_users_paginate_txn( """ sql = "SELECT COUNT(*) as total_users " + sql_base txn.execute(sql, args) - count = cast(Tuple[int], txn.fetchone())[0] + count = cast(tuple[int], txn.fetchone())[0] sql = f""" SELECT name, user_type, is_guest, admin, deactivated, shadow_banned, @@ -351,8 +351,8 @@ def get_users_paginate_txn( async def search_users( self, term: str - ) -> List[ - Tuple[str, Optional[str], Union[int, bool], Union[int, bool], Optional[str]] + ) -> list[ + tuple[str, Optional[str], Union[int, bool], Union[int, bool], Optional[str]] ]: """Function to search users list for one or more users with the matched term. @@ -366,8 +366,8 @@ async def search_users( def search_users( txn: LoggingTransaction, - ) -> List[ - Tuple[str, Optional[str], Union[int, bool], Union[int, bool], Optional[str]] + ) -> list[ + tuple[str, Optional[str], Union[int, bool], Union[int, bool], Optional[str]] ]: search_term = "%%" + term + "%%" @@ -379,8 +379,8 @@ def search_users( txn.execute(sql, (search_term,)) return cast( - List[ - Tuple[ + list[ + tuple[ str, Optional[str], Union[int, bool], diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py index 16876e5461..f1fb5fe188 100644 --- a/synapse/storage/databases/main/account_data.py +++ b/synapse/storage/databases/main/account_data.py @@ -23,13 +23,9 @@ from typing import ( TYPE_CHECKING, Any, - Dict, - FrozenSet, Iterable, - List, Mapping, Optional, - Tuple, cast, ) @@ -140,7 +136,7 @@ async def get_global_account_data_for_user( def get_global_account_data_for_user( txn: LoggingTransaction, - ) -> Dict[str, JsonDict]: + ) -> dict[str, JsonDict]: # The 'content != '{}' condition below prevents us from using # `simple_select_list_txn` here, as it doesn't support conditions # other than 'equals'. @@ -185,7 +181,7 @@ async def get_room_account_data_for_user( def get_room_account_data_for_user_txn( txn: LoggingTransaction, - ) -> Dict[str, Dict[str, JsonMapping]]: + ) -> dict[str, dict[str, JsonMapping]]: # The 'content != '{}' condition below prevents us from using # `simple_select_list_txn` here, as it doesn't support conditions # other than 'equals'. @@ -202,7 +198,7 @@ def get_room_account_data_for_user_txn( txn.execute(sql, (user_id,)) - by_room: Dict[str, Dict[str, JsonMapping]] = {} + by_room: dict[str, dict[str, JsonMapping]] = {} for room_id, account_data_type, content in txn: room_data = by_room.setdefault(room_id, {}) @@ -281,9 +277,9 @@ async def get_account_data_for_room( def get_account_data_for_room_txn( txn: LoggingTransaction, - ) -> Dict[str, JsonMapping]: + ) -> dict[str, JsonMapping]: rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], self.db_pool.simple_select_list_txn( txn, table="room_account_data", @@ -338,7 +334,7 @@ def get_account_data_for_room_and_type_txn( async def get_updated_global_account_data( self, last_id: int, current_id: int, limit: int - ) -> List[Tuple[int, str, str]]: + ) -> list[tuple[int, str, str]]: """Get the global account_data that has changed, for the account_data stream Args: @@ -355,14 +351,14 @@ async def get_updated_global_account_data( def get_updated_global_account_data_txn( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str]]: + ) -> list[tuple[int, str, str]]: sql = ( "SELECT stream_id, user_id, account_data_type" " FROM account_data WHERE ? < stream_id AND stream_id <= ?" " ORDER BY stream_id ASC LIMIT ?" ) txn.execute(sql, (last_id, current_id, limit)) - return cast(List[Tuple[int, str, str]], txn.fetchall()) + return cast(list[tuple[int, str, str]], txn.fetchall()) return await self.db_pool.runInteraction( "get_updated_global_account_data", get_updated_global_account_data_txn @@ -370,7 +366,7 @@ def get_updated_global_account_data_txn( async def get_updated_room_account_data( self, last_id: int, current_id: int, limit: int - ) -> List[Tuple[int, str, str, str]]: + ) -> list[tuple[int, str, str, str]]: """Get the global account_data that has changed, for the account_data stream Args: @@ -387,14 +383,14 @@ async def get_updated_room_account_data( def get_updated_room_account_data_txn( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str, str]]: + ) -> list[tuple[int, str, str, str]]: sql = ( "SELECT stream_id, user_id, room_id, account_data_type" " FROM room_account_data WHERE ? < stream_id AND stream_id <= ?" " ORDER BY stream_id ASC LIMIT ?" ) txn.execute(sql, (last_id, current_id, limit)) - return cast(List[Tuple[int, str, str, str]], txn.fetchall()) + return cast(list[tuple[int, str, str, str]], txn.fetchall()) return await self.db_pool.runInteraction( "get_updated_room_account_data", get_updated_room_account_data_txn @@ -402,7 +398,7 @@ def get_updated_room_account_data_txn( async def get_updated_global_account_data_for_user( self, user_id: str, stream_id: int - ) -> Dict[str, JsonMapping]: + ) -> dict[str, JsonMapping]: """Get all the global account_data that's changed for a user. Args: @@ -415,7 +411,7 @@ async def get_updated_global_account_data_for_user( def get_updated_global_account_data_for_user( txn: LoggingTransaction, - ) -> Dict[str, JsonMapping]: + ) -> dict[str, JsonMapping]: sql = """ SELECT account_data_type, content FROM account_data WHERE user_id = ? AND stream_id > ? @@ -437,7 +433,7 @@ def get_updated_global_account_data_for_user( async def get_updated_room_account_data_for_user( self, user_id: str, stream_id: int - ) -> Dict[str, Dict[str, JsonMapping]]: + ) -> dict[str, dict[str, JsonMapping]]: """Get all the room account_data that's changed for a user. Args: @@ -450,14 +446,14 @@ async def get_updated_room_account_data_for_user( def get_updated_room_account_data_for_user_txn( txn: LoggingTransaction, - ) -> Dict[str, Dict[str, JsonMapping]]: + ) -> dict[str, dict[str, JsonMapping]]: sql = """ SELECT room_id, account_data_type, content FROM room_account_data WHERE user_id = ? AND stream_id > ? """ txn.execute(sql, (user_id, stream_id)) - account_data_by_room: Dict[str, Dict[str, JsonMapping]] = {} + account_data_by_room: dict[str, dict[str, JsonMapping]] = {} for row in txn: room_account_data = account_data_by_room.setdefault(row[0], {}) room_account_data[row[1]] = db_to_json(row[2]) @@ -484,7 +480,7 @@ async def get_updated_room_account_data_for_user_for_room( room_id: str, from_stream_id: int, to_stream_id: int, - ) -> Dict[str, JsonMapping]: + ) -> dict[str, JsonMapping]: """Get the room account_data that's changed for a user in a room. (> `from_stream_id` and <= `to_stream_id`) @@ -501,14 +497,14 @@ async def get_updated_room_account_data_for_user_for_room( def get_updated_room_account_data_for_user_for_room_txn( txn: LoggingTransaction, - ) -> Dict[str, JsonMapping]: + ) -> dict[str, JsonMapping]: sql = """ SELECT account_data_type, content FROM room_account_data WHERE user_id = ? AND room_id = ? AND stream_id > ? AND stream_id <= ? """ txn.execute(sql, (user_id, room_id, from_stream_id, to_stream_id)) - room_account_data: Dict[str, JsonMapping] = {} + room_account_data: dict[str, JsonMapping] = {} for row in txn: room_account_data[row[0]] = db_to_json(row[1]) @@ -526,7 +522,7 @@ def get_updated_room_account_data_for_user_for_room_txn( ) @cached(max_entries=5000, iterable=True) - async def ignored_by(self, user_id: str) -> FrozenSet[str]: + async def ignored_by(self, user_id: str) -> frozenset[str]: """ Get users which ignore the given user. @@ -546,7 +542,7 @@ async def ignored_by(self, user_id: str) -> FrozenSet[str]: ) @cached(max_entries=5000, iterable=True) - async def ignored_users(self, user_id: str) -> FrozenSet[str]: + async def ignored_users(self, user_id: str) -> frozenset[str]: """ Get users which the given user ignores. diff --git a/synapse/storage/databases/main/appservice.py b/synapse/storage/databases/main/appservice.py index 97dbbb1493..7558672905 100644 --- a/synapse/storage/databases/main/appservice.py +++ b/synapse/storage/databases/main/appservice.py @@ -20,7 +20,7 @@ # import logging import re -from typing import TYPE_CHECKING, List, Optional, Pattern, Sequence, Tuple, cast +from typing import TYPE_CHECKING, Optional, Pattern, Sequence, cast from synapse.appservice import ( ApplicationService, @@ -52,7 +52,7 @@ def _make_exclusive_regex( - services_cache: List[ApplicationService], + services_cache: list[ApplicationService], ) -> Optional[Pattern]: # We precompile a regex constructed from all the regexes that the AS's # have registered for exclusive users. @@ -93,7 +93,7 @@ def get_max_as_txn_id(txn: Cursor) -> int: txn.execute( "SELECT COALESCE(max(txn_id), 0) FROM application_services_txns" ) - return cast(Tuple[int], txn.fetchone())[0] + return cast(tuple[int], txn.fetchone())[0] self._as_txn_seq_gen = build_sequence_generator( db_conn, @@ -106,7 +106,7 @@ def get_max_as_txn_id(txn: Cursor) -> int: super().__init__(database, db_conn, hs) - def get_app_services(self) -> List[ApplicationService]: + def get_app_services(self) -> list[ApplicationService]: return self.services_cache def get_if_app_services_interested_in_user(self, user_id: str) -> bool: @@ -199,7 +199,7 @@ class ApplicationServiceTransactionWorkerStore( ): async def get_appservices_by_state( self, state: ApplicationServiceState - ) -> List[ApplicationService]: + ) -> list[ApplicationService]: """Get a list of application services based on their state. Args: @@ -208,7 +208,7 @@ async def get_appservices_by_state( A list of ApplicationServices, which may be empty. """ results = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_list( table="application_services_state", keyvalues={"state": state.value}, @@ -273,8 +273,8 @@ async def create_appservice_txn( self, service: ApplicationService, events: Sequence[EventBase], - ephemeral: List[JsonMapping], - to_device_messages: List[JsonMapping], + ephemeral: list[JsonMapping], + to_device_messages: list[JsonMapping], one_time_keys_count: TransactionOneTimeKeysCount, unused_fallback_keys: TransactionUnusedFallbackKeys, device_list_summary: DeviceListUpdates, @@ -358,7 +358,7 @@ async def get_oldest_unsent_txn( def _get_oldest_unsent_txn( txn: LoggingTransaction, - ) -> Optional[Tuple[int, str]]: + ) -> Optional[tuple[int, str]]: # Monotonically increasing txn ids, so just select the smallest # one in the txns table (we delete them when they are sent) txn.execute( @@ -366,7 +366,7 @@ def _get_oldest_unsent_txn( " ORDER BY txn_id ASC LIMIT 1", (service.id,), ) - return cast(Optional[Tuple[int, str]], txn.fetchone()) + return cast(Optional[tuple[int, str]], txn.fetchone()) entry = await self.db_pool.runInteraction( "get_oldest_unsent_appservice_txn", _get_oldest_unsent_txn diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py index 674c6b921e..5a96510b13 100644 --- a/synapse/storage/databases/main/cache.py +++ b/synapse/storage/databases/main/cache.py @@ -23,7 +23,7 @@ import itertools import json import logging -from typing import TYPE_CHECKING, Any, Collection, Iterable, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Collection, Iterable, Optional from synapse.api.constants import EventTypes from synapse.config._base import Config @@ -145,7 +145,7 @@ def __init__( async def get_all_updated_caches( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: """Get updates for caches replication stream. Args: @@ -172,7 +172,7 @@ async def get_all_updated_caches( def get_all_updated_caches_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: # We purposefully don't bound by the current token, as we want to # send across cache invalidations as quickly as possible. Cache # invalidations are idempotent, so duplicates are fine. @@ -597,7 +597,7 @@ def _invalidate_caches_for_room(self, room_id: str) -> None: self._invalidate_state_caches_all(room_id) async def invalidate_cache_and_stream( - self, cache_name: str, keys: Tuple[Any, ...] + self, cache_name: str, keys: tuple[Any, ...] ) -> None: """Invalidates the cache and adds it to the cache stream so other workers will know to invalidate their caches. @@ -620,7 +620,7 @@ def _invalidate_cache_and_stream( self, txn: LoggingTransaction, cache_func: CachedFunction, - keys: Tuple[Any, ...], + keys: tuple[Any, ...], ) -> None: """Invalidates the cache and adds it to the cache stream so other workers will know to invalidate their caches. @@ -636,7 +636,7 @@ def _invalidate_cache_and_stream_bulk( self, txn: LoggingTransaction, cache_func: CachedFunction, - key_tuples: Collection[Tuple[Any, ...]], + key_tuples: Collection[tuple[Any, ...]], ) -> None: """A bulk version of _invalidate_cache_and_stream. @@ -759,7 +759,7 @@ def _send_invalidation_to_replication_bulk( self, txn: LoggingTransaction, cache_name: str, - key_tuples: Collection[Tuple[Any, ...]], + key_tuples: Collection[tuple[Any, ...]], ) -> None: """Announce the invalidation of multiple (but not all) cache entries. diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py index dc6ab99a6c..1033d85a40 100644 --- a/synapse/storage/databases/main/client_ips.py +++ b/synapse/storage/databases/main/client_ips.py @@ -22,11 +22,8 @@ import logging from typing import ( TYPE_CHECKING, - Dict, - List, Mapping, Optional, - Tuple, TypedDict, Union, cast, @@ -190,7 +187,7 @@ def get_last_seen(txn: LoggingTransaction) -> Optional[int]: """, (begin_last_seen, batch_size), ) - row = cast(Optional[Tuple[int]], txn.fetchone()) + row = cast(Optional[tuple[int]], txn.fetchone()) if row: return row[0] else: @@ -222,7 +219,7 @@ def remove(txn: LoggingTransaction) -> None: # Define the search space, which requires handling the last batch in # a different way - args: Tuple[int, ...] + args: tuple[int, ...] if last: clause = "? <= last_seen" args = (begin_last_seen,) @@ -251,7 +248,7 @@ def remove(txn: LoggingTransaction) -> None: args, ) res = cast( - List[Tuple[str, str, str, Optional[str], str, int, int]], txn.fetchall() + list[tuple[str, str, str, Optional[str], str, int, int]], txn.fetchall() ) # We've got some duplicates @@ -361,7 +358,7 @@ def _devices_last_seen_update_txn(txn: LoggingTransaction) -> int: # we'll just end up updating the same device row multiple # times, which is fine. - where_args: List[Union[str, int]] + where_args: list[Union[str, int]] where_clause, where_args = make_tuple_comparison_clause( [("user_id", last_user_id), ("device_id", last_device_id)], ) @@ -383,7 +380,7 @@ def _devices_last_seen_update_txn(txn: LoggingTransaction) -> int: """ % {"where_clause": where_clause} txn.execute(sql, where_args + [batch_size]) - rows = cast(List[Tuple[int, str, str, str, str]], txn.fetchall()) + rows = cast(list[tuple[int, str, str, str, str]], txn.fetchall()) if not rows: return 0 @@ -434,7 +431,7 @@ def __init__( self.user_ips_max_age = hs.config.server.user_ips_max_age # (user_id, access_token, ip,) -> last_seen - self.client_ip_last_seen = LruCache[Tuple[str, str, str], int]( + self.client_ip_last_seen = LruCache[tuple[str, str, str], int]( cache_name="client_ip_last_seen", server_name=self.server_name, max_size=50000, @@ -449,8 +446,8 @@ def __init__( # tables. # (user_id, access_token, ip,) -> (user_agent, device_id, last_seen) - self._batch_row_update: Dict[ - Tuple[str, str, str], Tuple[str, Optional[str], int] + self._batch_row_update: dict[ + tuple[str, str, str], tuple[str, Optional[str], int] ] = {} self.clock.looping_call(self._update_client_ips_batch, 5 * 1000) @@ -504,7 +501,7 @@ def _prune_old_user_ips_txn(txn: LoggingTransaction) -> None: async def _get_last_client_ip_by_device_from_database( self, user_id: str, device_id: Optional[str] - ) -> Dict[Tuple[str, str], DeviceLastConnectionInfo]: + ) -> dict[tuple[str, str], DeviceLastConnectionInfo]: """For each device_id listed, give the user_ip it was last seen on. The result might be slightly out of date as client IPs are inserted in batches. @@ -522,7 +519,7 @@ async def _get_last_client_ip_by_device_from_database( keyvalues["device_id"] = device_id res = cast( - List[Tuple[str, Optional[str], Optional[str], str, Optional[int]]], + list[tuple[str, Optional[str], Optional[str], str, Optional[int]]], await self.db_pool.simple_select_list( table="devices", keyvalues=keyvalues, @@ -543,7 +540,7 @@ async def _get_last_client_ip_by_device_from_database( async def _get_user_ip_and_agents_from_database( self, user: UserID, since_ts: int = 0 - ) -> List[LastConnectionInfo]: + ) -> list[LastConnectionInfo]: """Fetch the IPs and user agents for a user since the given timestamp. The result might be slightly out of date as client IPs are inserted in batches. @@ -567,7 +564,7 @@ async def _get_user_ip_and_agents_from_database( """ user_id = user.to_string() - def get_recent(txn: LoggingTransaction) -> List[Tuple[str, str, str, int]]: + def get_recent(txn: LoggingTransaction) -> list[tuple[str, str, str, int]]: txn.execute( """ SELECT access_token, ip, user_agent, last_seen FROM user_ips @@ -577,7 +574,7 @@ def get_recent(txn: LoggingTransaction) -> List[Tuple[str, str, str, int]]: """, (since_ts, user_id), ) - return cast(List[Tuple[str, str, str, int]], txn.fetchall()) + return cast(list[tuple[str, str, str, int]], txn.fetchall()) rows = await self.db_pool.runInteraction( desc="get_user_ip_and_agents", func=get_recent @@ -673,7 +670,7 @@ async def _update_client_ips_batch(self) -> None: def _update_client_ips_batch_txn( self, txn: LoggingTransaction, - to_update: Mapping[Tuple[str, str, str], Tuple[str, Optional[str], int]], + to_update: Mapping[tuple[str, str, str], tuple[str, Optional[str], int]], ) -> None: assert self._update_on_this_worker, ( "This worker is not designated to update client IPs" @@ -719,7 +716,7 @@ def _update_client_ips_batch_txn( async def get_last_client_ip_by_device( self, user_id: str, device_id: Optional[str] - ) -> Dict[Tuple[str, str], DeviceLastConnectionInfo]: + ) -> dict[tuple[str, str], DeviceLastConnectionInfo]: """For each device_id listed, give the user_ip it was last seen on Args: @@ -759,7 +756,7 @@ async def get_last_client_ip_by_device( async def get_user_ip_and_agents( self, user: UserID, since_ts: int = 0 - ) -> List[LastConnectionInfo]: + ) -> list[LastConnectionInfo]: """Fetch the IPs and user agents for a user since the given timestamp. Args: @@ -786,7 +783,7 @@ async def get_user_ip_and_agents( # the result return rows_from_db - results: Dict[Tuple[str, str], LastConnectionInfo] = { + results: dict[tuple[str, str], LastConnectionInfo] = { (connection["access_token"], connection["ip"]): connection for connection in rows_from_db } diff --git a/synapse/storage/databases/main/delayed_events.py b/synapse/storage/databases/main/delayed_events.py index 78f55b983f..33101327f5 100644 --- a/synapse/storage/databases/main/delayed_events.py +++ b/synapse/storage/databases/main/delayed_events.py @@ -13,7 +13,7 @@ # import logging -from typing import List, NewType, Optional, Tuple +from typing import NewType, Optional import attr @@ -93,7 +93,7 @@ async def add_delayed_event( origin_server_ts: Optional[int], content: JsonDict, delay: int, - ) -> Tuple[DelayID, Timestamp]: + ) -> tuple[DelayID, Timestamp]: """ Inserts a new delayed event in the DB. @@ -201,7 +201,7 @@ def _get_count_of_delayed_events(txn: LoggingTransaction) -> int: async def get_all_delayed_events_for_user( self, user_localpart: str, - ) -> List[JsonDict]: + ) -> list[JsonDict]: """Returns all pending delayed events owned by the given user.""" # TODO: Support Pagination stream API ("next_batch" field) rows = await self.db_pool.execute( @@ -236,8 +236,8 @@ async def get_all_delayed_events_for_user( async def process_timeout_delayed_events( self, current_ts: Timestamp - ) -> Tuple[ - List[DelayedEventDetails], + ) -> tuple[ + list[DelayedEventDetails], Optional[Timestamp], ]: """ @@ -250,8 +250,8 @@ async def process_timeout_delayed_events( def process_timeout_delayed_events_txn( txn: LoggingTransaction, - ) -> Tuple[ - List[DelayedEventDetails], + ) -> tuple[ + list[DelayedEventDetails], Optional[Timestamp], ]: sql_cols = ", ".join( @@ -322,7 +322,7 @@ async def process_target_delayed_event( *, delay_id: str, user_localpart: str, - ) -> Tuple[ + ) -> tuple[ EventDetails, Optional[Timestamp], ]: @@ -343,7 +343,7 @@ async def process_target_delayed_event( def process_target_delayed_event_txn( txn: LoggingTransaction, - ) -> Tuple[ + ) -> tuple[ EventDetails, Optional[Timestamp], ]: diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py index a66e11f738..49a82b98d3 100644 --- a/synapse/storage/databases/main/deviceinbox.py +++ b/synapse/storage/databases/main/deviceinbox.py @@ -24,12 +24,8 @@ from typing import ( TYPE_CHECKING, Collection, - Dict, Iterable, - List, Optional, - Set, - Tuple, cast, ) @@ -92,7 +88,7 @@ def __init__( # Map of (user_id, device_id) to the last stream_id that has been # deleted up to. This is so that we can no op deletions. self._last_device_delete_cache: ExpiringCache[ - Tuple[str, Optional[str]], int + tuple[str, Optional[str]], int ] = ExpiringCache( cache_name="last_device_delete_cache", server_name=self.server_name, @@ -203,7 +199,7 @@ async def get_messages_for_user_devices( user_ids: Collection[str], from_stream_id: int, to_stream_id: int, - ) -> Dict[Tuple[str, str], List[JsonDict]]: + ) -> dict[tuple[str, str], list[JsonDict]]: """ Retrieve to-device messages for a given set of users. @@ -242,7 +238,7 @@ async def get_messages_for_device( from_stream_id: int, to_stream_id: int, limit: int = 100, - ) -> Tuple[List[JsonDict], int]: + ) -> tuple[list[JsonDict], int]: """ Retrieve to-device messages for a single user device. @@ -271,7 +267,7 @@ async def get_messages_for_device( def get_device_messages_txn( txn: LoggingTransaction, - ) -> Tuple[List[JsonDict], int]: + ) -> tuple[list[JsonDict], int]: sql = """ SELECT stream_id, message_json FROM device_inbox WHERE user_id = ? AND device_id = ? @@ -284,7 +280,7 @@ def get_device_messages_txn( # Create and fill a dictionary of (user ID, device ID) -> list of messages # intended for each device. last_processed_stream_pos = to_stream_id - to_device_messages: List[JsonDict] = [] + to_device_messages: list[JsonDict] = [] rowcount = 0 for row in txn: rowcount += 1 @@ -331,7 +327,7 @@ async def _get_device_messages( user_ids: Collection[str], from_stream_id: int, to_stream_id: int, - ) -> Tuple[Dict[Tuple[str, str], List[JsonDict]], int]: + ) -> tuple[dict[tuple[str, str], list[JsonDict]], int]: """ Retrieve pending to-device messages for a collection of user devices. @@ -363,7 +359,7 @@ async def _get_device_messages( logger.warning("No users provided upon querying for device IDs") return {}, to_stream_id - user_ids_to_query: Set[str] = set() + user_ids_to_query: set[str] = set() # Determine which users have devices with pending messages for user_id in user_ids: @@ -378,7 +374,7 @@ async def _get_device_messages( def get_device_messages_txn( txn: LoggingTransaction, - ) -> Tuple[Dict[Tuple[str, str], List[JsonDict]], int]: + ) -> tuple[dict[tuple[str, str], list[JsonDict]], int]: # Build a query to select messages from any of the given devices that # are between the given stream id bounds. @@ -389,7 +385,7 @@ def get_device_messages_txn( # since device_inbox has an index on `(user_id, device_id, stream_id)` user_device_dicts = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_many_txn( txn, table="devices", @@ -436,7 +432,7 @@ def get_device_messages_txn( # Create and fill a dictionary of (user ID, device ID) -> list of messages # intended for each device. - recipient_device_to_messages: Dict[Tuple[str, str], List[JsonDict]] = {} + recipient_device_to_messages: dict[tuple[str, str], list[JsonDict]] = {} rowcount = 0 for row in txn: rowcount += 1 @@ -535,7 +531,7 @@ async def delete_messages_for_device_between( from_stream_id: Optional[int], to_stream_id: int, limit: int, - ) -> Tuple[Optional[int], int]: + ) -> tuple[Optional[int], int]: """Delete N device messages between the stream IDs, returning the highest stream ID deleted (or None if all messages in the range have been deleted) and the number of messages deleted. @@ -555,7 +551,7 @@ async def delete_messages_for_device_between( def delete_messages_for_device_between_txn( txn: LoggingTransaction, - ) -> Tuple[Optional[int], int]: + ) -> tuple[Optional[int], int]: txn.execute( """ SELECT MAX(stream_id) FROM ( @@ -598,7 +594,7 @@ def delete_messages_for_device_between_txn( @trace async def get_new_device_msgs_for_remote( self, destination: str, last_stream_id: int, current_stream_id: int, limit: int - ) -> Tuple[List[JsonDict], int]: + ) -> tuple[list[JsonDict], int]: """ Args: destination: The name of the remote server. @@ -628,7 +624,7 @@ async def get_new_device_msgs_for_remote( @trace def get_new_messages_for_remote_destination_txn( txn: LoggingTransaction, - ) -> Tuple[List[JsonDict], int]: + ) -> tuple[list[JsonDict], int]: sql = ( "SELECT stream_id, messages_json FROM device_federation_outbox" " WHERE destination = ?" @@ -684,7 +680,7 @@ def delete_messages_for_remote_destination_txn(txn: LoggingTransaction) -> None: async def get_all_new_device_messages( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: """Get updates for to device replication stream. Args: @@ -711,7 +707,7 @@ async def get_all_new_device_messages( def get_all_new_device_messages_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: # We limit like this as we might have multiple rows per stream_id, and # we want to make sure we always get all entries for any stream_id # we return. @@ -746,8 +742,8 @@ def get_all_new_device_messages_txn( @trace async def add_messages_to_device_inbox( self, - local_messages_by_user_then_device: Dict[str, Dict[str, JsonDict]], - remote_messages_by_destination: Dict[str, JsonDict], + local_messages_by_user_then_device: dict[str, dict[str, JsonDict]], + remote_messages_by_destination: dict[str, JsonDict], ) -> int: """Used to send messages from this server. @@ -844,7 +840,7 @@ async def add_messages_from_remote_to_device_inbox( self, origin: str, message_id: str, - local_messages_by_user_then_device: Dict[str, Dict[str, JsonDict]], + local_messages_by_user_then_device: dict[str, dict[str, JsonDict]], ) -> int: assert self._can_write_to_device @@ -898,7 +894,7 @@ def _add_messages_to_local_device_inbox_txn( self, txn: LoggingTransaction, stream_id: int, - messages_by_user_then_device: Dict[str, Dict[str, JsonDict]], + messages_by_user_then_device: dict[str, dict[str, JsonDict]], ) -> None: assert self._can_write_to_device @@ -929,7 +925,7 @@ def _add_messages_to_local_device_inbox_txn( # We exclude hidden devices (such as cross-signing keys) here as they are # not expected to receive to-device messages. rows = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_many_txn( txn, table="devices", @@ -1055,7 +1051,7 @@ def get_devices_with_messages_txn( txn.execute(sql, args) return {row[0] for row in txn} - results: Set[str] = set() + results: set[str] = set() for batch_device_ids in batch_iter(device_ids, 1000): batch_results = await self.db_pool.runInteraction( "get_devices_with_messages", @@ -1143,7 +1139,7 @@ async def _remove_dead_devices_from_device_inbox( def _remove_dead_devices_from_device_inbox_txn( txn: LoggingTransaction, - ) -> Tuple[int, bool]: + ) -> tuple[int, bool]: if "max_stream_id" in progress: max_stream_id = progress["max_stream_id"] else: @@ -1151,7 +1147,7 @@ def _remove_dead_devices_from_device_inbox_txn( # There's a type mismatch here between how we want to type the row and # what fetchone says it returns, but we silence it because we know that # res can't be None. - res = cast(Tuple[Optional[int]], txn.fetchone()) + res = cast(tuple[Optional[int]], txn.fetchone()) if res[0] is None: # this can only happen if the `device_inbox` table is empty, in which # case we have no work to do. @@ -1214,7 +1210,7 @@ def _cleanup_device_federation_outbox_txn( max_stream_id = progress["max_stream_id"] else: txn.execute("SELECT max(stream_id) FROM device_federation_outbox") - res = cast(Tuple[Optional[int]], txn.fetchone()) + res = cast(tuple[Optional[int]], txn.fetchone()) if res[0] is None: # this can only happen if the `device_inbox` table is empty, in which # case we have no work to do. diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index d4b9ce0ea0..bf5e05ea51 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -24,13 +24,9 @@ TYPE_CHECKING, Any, Collection, - Dict, Iterable, - List, Mapping, Optional, - Set, - Tuple, cast, ) @@ -284,7 +280,7 @@ def count_devices_by_users_txn( ) txn.execute(sql + clause, args) - return cast(Tuple[int], txn.fetchone())[0] + return cast(tuple[int], txn.fetchone())[0] if not user_ids: return 0 @@ -381,7 +377,7 @@ async def delete_devices(self, user_id: str, device_ids: StrCollection) -> None: device_ids: The IDs of the devices to delete """ - def _delete_devices_txn(txn: LoggingTransaction, device_ids: List[str]) -> None: + def _delete_devices_txn(txn: LoggingTransaction, device_ids: list[str]) -> None: self.db_pool.simple_delete_many_txn( txn, table="devices", @@ -497,7 +493,7 @@ async def get_device( async def get_devices_by_user( self, user_id: str - ) -> Dict[str, Dict[str, Optional[str]]]: + ) -> dict[str, dict[str, Optional[str]]]: """Retrieve all of a user's registered devices. Only returns devices that are not marked as hidden. @@ -508,7 +504,7 @@ async def get_devices_by_user( and "display_name" for each device. Display name may be null. """ devices = cast( - List[Tuple[str, str, Optional[str]]], + list[tuple[str, str, Optional[str]]], await self.db_pool.simple_select_list( table="devices", keyvalues={"user_id": user_id, "hidden": False}, @@ -524,7 +520,7 @@ async def get_devices_by_user( async def get_devices_by_auth_provider_session_id( self, auth_provider_id: str, auth_provider_session_id: str - ) -> List[Tuple[str, str]]: + ) -> list[tuple[str, str]]: """Retrieve the list of devices associated with a SSO IdP session ID. Args: @@ -534,7 +530,7 @@ async def get_devices_by_auth_provider_session_id( A list of dicts containing the device_id and the user_id of each device """ return cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="device_auth_providers", keyvalues={ @@ -549,7 +545,7 @@ async def get_devices_by_auth_provider_session_id( @trace async def get_device_updates_by_remote( self, destination: str, from_stream_id: int, limit: int - ) -> Tuple[int, List[Tuple[str, JsonDict]]]: + ) -> tuple[int, list[tuple[str, JsonDict]]]: """Get a stream of device updates to send to the given remote server. Args: @@ -659,8 +655,8 @@ async def get_device_updates_by_remote( last_processed_stream_id = from_stream_id # A map of (user ID, device ID) to (stream ID, context). - query_map: Dict[Tuple[str, str], Tuple[int, Optional[str]]] = {} - cross_signing_keys_by_user: Dict[str, Dict[str, object]] = {} + query_map: dict[tuple[str, str], tuple[int, Optional[str]]] = {} + cross_signing_keys_by_user: dict[str, dict[str, object]] = {} for user_id, device_id, update_stream_id, update_context in updates: # Calculate the remaining length budget. # Note that, for now, each entry in `cross_signing_keys_by_user` @@ -766,7 +762,7 @@ def _get_device_updates_by_remote_txn( from_stream_id: int, now_stream_id: int, limit: int, - ) -> List[Tuple[str, str, int, Optional[str]]]: + ) -> list[tuple[str, str, int, Optional[str]]]: """Return device update information for a given remote destination Args: @@ -792,14 +788,14 @@ def _get_device_updates_by_remote_txn( """ txn.execute(sql, (destination, from_stream_id, now_stream_id, limit)) - return cast(List[Tuple[str, str, int, Optional[str]]], txn.fetchall()) + return cast(list[tuple[str, str, int, Optional[str]]], txn.fetchall()) async def _get_device_update_edus_by_remote( self, destination: str, from_stream_id: int, - query_map: Dict[Tuple[str, str], Tuple[int, Optional[str]]], - ) -> List[Tuple[str, dict]]: + query_map: dict[tuple[str, str], tuple[int, Optional[str]]], + ) -> list[tuple[str, dict]]: """Returns a list of device update EDUs as well as E2EE keys Args: @@ -933,7 +929,7 @@ def _mark_as_sent_devices_by_remote_txn( txn.execute(sql, (destination, stream_id)) async def add_user_signature_change_to_streams( - self, from_user_id: str, user_ids: List[str] + self, from_user_id: str, user_ids: list[str] ) -> int: """Persist that a user has made new signatures @@ -962,7 +958,7 @@ def _add_user_signature_change_txn( self, txn: LoggingTransaction, from_user_id: str, - user_ids: List[str], + user_ids: list[str], stream_id: int, ) -> None: txn.call_after( @@ -984,8 +980,8 @@ def _add_user_signature_change_txn( @trace @cancellable async def get_user_devices_from_cache( - self, user_ids: Set[str], user_and_device_ids: List[Tuple[str, str]] - ) -> Tuple[Set[str], Dict[str, Mapping[str, JsonMapping]]]: + self, user_ids: set[str], user_and_device_ids: list[tuple[str, str]] + ) -> tuple[set[str], dict[str, Mapping[str, JsonMapping]]]: """Get the devices (and keys if any) for remote users from the cache. Args: @@ -1005,13 +1001,13 @@ async def get_user_devices_from_cache( user_ids_not_in_cache = unique_user_ids - user_ids_in_cache # First fetch all the users which all devices are to be returned. - results: Dict[str, Mapping[str, JsonMapping]] = {} + results: dict[str, Mapping[str, JsonMapping]] = {} for user_id in user_ids: if user_id in user_ids_in_cache: results[user_id] = await self.get_cached_devices_for_user(user_id) # Then fetch all device-specific requests, but skip users we've already # fetched all devices for. - device_specific_results: Dict[str, Dict[str, JsonMapping]] = {} + device_specific_results: dict[str, dict[str, JsonMapping]] = {} for user_id, device_id in user_and_device_ids: if user_id in user_ids_in_cache and user_id not in user_ids: device = await self._get_cached_user_device(user_id, device_id) @@ -1025,7 +1021,7 @@ async def get_user_devices_from_cache( async def get_users_whose_devices_are_cached( self, user_ids: StrCollection - ) -> Set[str]: + ) -> set[str]: """Checks which of the given users we have cached the devices for.""" user_map = await self.get_device_list_last_stream_id_for_remotes(user_ids) @@ -1056,7 +1052,7 @@ async def get_cached_devices_for_user( self, user_id: str ) -> Mapping[str, JsonMapping]: devices = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="device_lists_remote_cache", keyvalues={"user_id": user_id}, @@ -1071,7 +1067,7 @@ async def get_all_devices_changed( self, from_key: MultiWriterStreamToken, to_key: MultiWriterStreamToken, - ) -> Set[str]: + ) -> set[str]: """Get all users whose devices have changed in the given range. Args: @@ -1131,7 +1127,7 @@ async def get_users_whose_devices_changed( from_key: MultiWriterStreamToken, user_ids: Collection[str], to_key: Optional[MultiWriterStreamToken] = None, - ) -> Set[str]: + ) -> set[str]: """Get set of users whose devices have changed since `from_key` that are in the given list of user_ids. @@ -1164,14 +1160,14 @@ def _get_users_whose_devices_changed_txn( txn: LoggingTransaction, from_key: MultiWriterStreamToken, to_key: MultiWriterStreamToken, - ) -> Set[str]: + ) -> set[str]: sql = """ SELECT user_id, stream_id, instance_name FROM device_lists_stream WHERE ? < stream_id AND stream_id <= ? AND %s """ - changes: Set[str] = set() + changes: set[str] = set() # Query device changes with a batch of users at a time for chunk in batch_iter(user_ids_to_check, 100): @@ -1204,7 +1200,7 @@ def _get_users_whose_devices_changed_txn( async def get_users_whose_signatures_changed( self, user_id: str, from_key: MultiWriterStreamToken - ) -> Set[str]: + ) -> set[str]: """Get the users who have new cross-signing signatures made by `user_id` since `from_key`. @@ -1243,7 +1239,7 @@ async def get_users_whose_signatures_changed( async def get_all_device_list_changes_for_remotes( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: """Get updates for device lists replication stream. Args: @@ -1270,7 +1266,7 @@ async def get_all_device_list_changes_for_remotes( def _get_all_device_list_changes_for_remotes( txn: Cursor, - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: # This query Does The Right Thing where it'll correctly apply the # bounds to the inner queries. sql = """ @@ -1322,7 +1318,7 @@ async def get_device_list_last_stream_id_for_remotes( self, user_ids: Iterable[str] ) -> Mapping[str, Optional[str]]: rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_many_batch( table="device_lists_remote_extremeties", column="user_id", @@ -1332,7 +1328,7 @@ async def get_device_list_last_stream_id_for_remotes( ), ) - results: Dict[str, Optional[str]] = dict.fromkeys(user_ids) + results: dict[str, Optional[str]] = dict.fromkeys(user_ids) results.update(rows) return results @@ -1340,7 +1336,7 @@ async def get_device_list_last_stream_id_for_remotes( async def get_user_ids_requiring_device_list_resync( self, user_ids: Optional[Collection[str]] = None, - ) -> Set[str]: + ) -> set[str]: """Given a list of remote users return the list of users that we should resync the device lists for. If None is given instead of a list, return every user that we should resync the device lists for. @@ -1350,7 +1346,7 @@ async def get_user_ids_requiring_device_list_resync( """ if user_ids: rows = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_many_batch( table="device_lists_remote_resync", column="user_id", @@ -1361,7 +1357,7 @@ async def get_user_ids_requiring_device_list_resync( ) else: rows = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_list( table="device_lists_remote_resync", keyvalues=None, @@ -1406,7 +1402,7 @@ async def mark_remote_user_device_cache_as_valid(self, user_id: str) -> None: desc="mark_remote_user_device_cache_as_valid", ) - async def handle_potentially_left_users(self, user_ids: Set[str]) -> None: + async def handle_potentially_left_users(self, user_ids: set[str]) -> None: """Given a set of remote users check if the server still shares a room with them. If not then mark those users' device cache as stale. """ @@ -1423,7 +1419,7 @@ async def handle_potentially_left_users(self, user_ids: Set[str]) -> None: def handle_potentially_left_users_txn( self, txn: LoggingTransaction, - user_ids: Set[str], + user_ids: set[str], ) -> None: """Given a set of remote users check if the server still shares a room with them. If not then mark those users' device cache as stale. @@ -1463,7 +1459,7 @@ def mark_remote_user_device_list_as_unsubscribed_txn( async def get_dehydrated_device( self, user_id: str - ) -> Optional[Tuple[str, JsonDict]]: + ) -> Optional[tuple[str, JsonDict]]: """Retrieve the information for a dehydrated device. Args: @@ -1672,7 +1668,7 @@ def _prune_txn(txn: LoggingTransaction) -> None: async def get_local_devices_not_accessed_since( self, since_ms: int - ) -> Dict[str, List[str]]: + ) -> dict[str, list[str]]: """Retrieves local devices that haven't been accessed since a given date. Args: @@ -1687,20 +1683,20 @@ async def get_local_devices_not_accessed_since( def get_devices_not_accessed_since_txn( txn: LoggingTransaction, - ) -> List[Tuple[str, str]]: + ) -> list[tuple[str, str]]: sql = """ SELECT user_id, device_id FROM devices WHERE last_seen < ? AND hidden = FALSE """ txn.execute(sql, (since_ms,)) - return cast(List[Tuple[str, str]], txn.fetchall()) + return cast(list[tuple[str, str]], txn.fetchall()) rows = await self.db_pool.runInteraction( "get_devices_not_accessed_since", get_devices_not_accessed_since_txn, ) - devices: Dict[str, List[str]] = {} + devices: dict[str, list[str]] = {} for user_id, device_id in rows: # Remote devices are never stale from our point of view. if self.hs.is_mine_id(user_id): @@ -1728,7 +1724,7 @@ async def get_device_list_changes_in_rooms( room_ids: Collection[str], from_token: MultiWriterStreamToken, to_token: MultiWriterStreamToken, - ) -> Optional[Set[str]]: + ) -> Optional[set[str]]: """Return the set of users whose devices have changed in the given rooms since the given stream ID. @@ -1759,7 +1755,7 @@ async def get_device_list_changes_in_rooms( def _get_device_list_changes_in_rooms_txn( txn: LoggingTransaction, chunk: list[str], - ) -> Set[str]: + ) -> set[str]: clause, args = make_in_list_sql_clause( self.database_engine, "room_id", chunk ) @@ -1788,7 +1784,7 @@ def _get_device_list_changes_in_rooms_txn( return changes - async def get_all_device_list_changes(self, from_id: int, to_id: int) -> Set[str]: + async def get_all_device_list_changes(self, from_id: int, to_id: int) -> set[str]: """Return the set of rooms where devices have changed since the given stream ID. @@ -1807,7 +1803,7 @@ async def get_all_device_list_changes(self, from_id: int, to_id: int) -> Set[str def _get_all_device_list_changes_txn( txn: LoggingTransaction, - ) -> Set[str]: + ) -> set[str]: txn.execute(sql, (from_id, to_id)) return {room_id for (room_id,) in txn} @@ -1818,7 +1814,7 @@ def _get_all_device_list_changes_txn( async def get_device_list_changes_in_room( self, room_id: str, min_stream_id: int - ) -> Collection[Tuple[str, str]]: + ) -> Collection[tuple[str, str]]: """Get all device list changes that happened in the room since the given stream ID. @@ -1834,9 +1830,9 @@ async def get_device_list_changes_in_room( def get_device_list_changes_in_room_txn( txn: LoggingTransaction, - ) -> Collection[Tuple[str, str]]: + ) -> Collection[tuple[str, str]]: txn.execute(sql, (room_id, min_stream_id)) - return cast(Collection[Tuple[str, str]], txn.fetchall()) + return cast(Collection[tuple[str, str]], txn.fetchall()) return await self.db_pool.runInteraction( "get_device_list_changes_in_room", @@ -1911,7 +1907,7 @@ def _update_remote_device_list_cache_entry_txn( ) async def update_remote_device_list_cache( - self, user_id: str, devices: List[dict], stream_id: int + self, user_id: str, devices: list[dict], stream_id: int ) -> None: """Replace the entire cache of the remote user's devices. @@ -1932,7 +1928,7 @@ async def update_remote_device_list_cache( ) def _update_remote_device_list_cache_txn( - self, txn: LoggingTransaction, user_id: str, devices: List[dict], stream_id: int + self, txn: LoggingTransaction, user_id: str, devices: list[dict], stream_id: int ) -> None: """Replace the list of cached devices for this user with the given list.""" self.db_pool.simple_delete_txn( @@ -2031,7 +2027,7 @@ def _add_device_change_to_stream_txn( txn: LoggingTransaction, user_id: str, device_ids: Collection[str], - stream_ids: List[int], + stream_ids: list[int], ) -> None: txn.call_after( self._device_list_stream_cache.entity_has_changed, @@ -2076,7 +2072,7 @@ def _add_device_outbound_poke_to_stream_txn( device_id: str, hosts: Collection[str], stream_id: int, - context: Optional[Dict[str, str]], + context: Optional[dict[str, str]], ) -> None: if self._device_list_federation_stream_cache: for host in hosts: @@ -2163,8 +2159,8 @@ def _add_device_outbound_room_poke_txn( user_id: str, device_ids: StrCollection, room_ids: StrCollection, - stream_ids: List[int], - context: Dict[str, str], + stream_ids: list[int], + context: dict[str, str], ) -> None: """Record the user in the room has updated their device.""" @@ -2208,7 +2204,7 @@ def _add_device_outbound_room_poke_txn( async def get_uncoverted_outbound_room_pokes( self, start_stream_id: int, start_room_id: str, limit: int = 10 - ) -> List[Tuple[str, str, str, int, Optional[Dict[str, str]]]]: + ) -> list[tuple[str, str, str, int, Optional[dict[str, str]]]]: """Get device list changes by room that have not yet been handled and written to `device_lists_outbound_pokes`. @@ -2236,7 +2232,7 @@ async def get_uncoverted_outbound_room_pokes( def get_uncoverted_outbound_room_pokes_txn( txn: LoggingTransaction, - ) -> List[Tuple[str, str, str, int, Optional[Dict[str, str]]]]: + ) -> list[tuple[str, str, str, int, Optional[dict[str, str]]]]: txn.execute( sql, ( @@ -2270,7 +2266,7 @@ async def add_device_list_outbound_pokes( device_id: str, room_id: str, hosts: Collection[str], - context: Optional[Dict[str, str]], + context: Optional[dict[str, str]], ) -> None: """Queue the device update to be sent to the given set of hosts, calculated from the room ID. @@ -2327,7 +2323,7 @@ async def add_remote_device_list_to_pending( async def get_pending_remote_device_list_updates_for_room( self, room_id: str - ) -> Collection[Tuple[str, str]]: + ) -> Collection[tuple[str, str]]: """Get the set of remote device list updates from the pending table for the room. """ @@ -2361,16 +2357,16 @@ async def get_pending_remote_device_list_updates_for_room( def get_pending_remote_device_list_updates_for_room_txn( txn: LoggingTransaction, - ) -> Collection[Tuple[str, str]]: + ) -> Collection[tuple[str, str]]: txn.execute(sql, (room_id, min_device_stream_id)) - return cast(Collection[Tuple[str, str]], txn.fetchall()) + return cast(Collection[tuple[str, str]], txn.fetchall()) return await self.db_pool.runInteraction( "get_pending_remote_device_list_updates_for_room", get_pending_remote_device_list_updates_for_room_txn, ) - async def get_device_change_last_converted_pos(self) -> Tuple[int, str]: + async def get_device_change_last_converted_pos(self) -> tuple[int, str]: """ Get the position of the last row in `device_list_changes_in_room` that has been converted to `device_lists_outbound_pokes`. @@ -2388,7 +2384,7 @@ async def get_device_change_last_converted_pos(self) -> Tuple[int, str]: retcols=["stream_id", "room_id"], desc="get_device_change_last_converted_pos", ) - return cast(Tuple[int, str], min(rows)) + return cast(tuple[int, str], min(rows)) async def set_device_change_last_converted_pos( self, diff --git a/synapse/storage/databases/main/directory.py b/synapse/storage/databases/main/directory.py index 49c0575aca..99a951ca4a 100644 --- a/synapse/storage/databases/main/directory.py +++ b/synapse/storage/databases/main/directory.py @@ -19,7 +19,7 @@ # # -from typing import Iterable, List, Optional, Sequence, Tuple +from typing import Iterable, Optional, Sequence import attr @@ -34,7 +34,7 @@ class RoomAliasMapping: room_id: str room_alias: str - servers: List[str] + servers: list[str] class DirectoryWorkerStore(CacheInvalidationWorkerStore): @@ -187,7 +187,7 @@ async def update_aliases_for_room( def _update_aliases_for_room_txn(txn: LoggingTransaction) -> None: update_creator_sql = "" - sql_params: Tuple[str, ...] = (new_room_id, old_room_id) + sql_params: tuple[str, ...] = (new_room_id, old_room_id) if creator: update_creator_sql = ", creator = ?" sql_params = (new_room_id, creator, old_room_id) diff --git a/synapse/storage/databases/main/e2e_room_keys.py b/synapse/storage/databases/main/e2e_room_keys.py index d978e115e4..a4d03d1d90 100644 --- a/synapse/storage/databases/main/e2e_room_keys.py +++ b/synapse/storage/databases/main/e2e_room_keys.py @@ -21,13 +21,10 @@ from typing import ( TYPE_CHECKING, - Dict, Iterable, - List, Literal, Mapping, Optional, - Tuple, TypedDict, cast, ) @@ -195,7 +192,7 @@ async def update_e2e_room_key( ) async def add_e2e_room_keys( - self, user_id: str, version: str, room_keys: Iterable[Tuple[str, str, RoomKey]] + self, user_id: str, version: str, room_keys: Iterable[tuple[str, str, RoomKey]] ) -> None: """Bulk add room keys to a given backup. @@ -257,8 +254,8 @@ async def get_e2e_room_keys( version: str, room_id: Optional[str] = None, session_id: Optional[str] = None, - ) -> Dict[ - Literal["rooms"], Dict[str, Dict[Literal["sessions"], Dict[str, RoomKey]]] + ) -> dict[ + Literal["rooms"], dict[str, dict[Literal["sessions"], dict[str, RoomKey]]] ]: """Bulk get the E2E room keys for a given backup, optionally filtered to a given room, or a given session. @@ -290,7 +287,7 @@ async def get_e2e_room_keys( keyvalues["session_id"] = session_id rows = cast( - List[Tuple[str, str, int, int, int, str]], + list[tuple[str, str, int, int, int, str]], await self.db_pool.simple_select_list( table="e2e_room_keys", keyvalues=keyvalues, @@ -306,8 +303,8 @@ async def get_e2e_room_keys( ), ) - sessions: Dict[ - Literal["rooms"], Dict[str, Dict[Literal["sessions"], Dict[str, RoomKey]]] + sessions: dict[ + Literal["rooms"], dict[str, dict[Literal["sessions"], dict[str, RoomKey]]] ] = {"rooms": {}} for ( room_id, @@ -333,7 +330,7 @@ async def get_e2e_room_keys_multi( user_id: str, version: str, room_keys: Mapping[str, Mapping[Literal["sessions"], Iterable[str]]], - ) -> Dict[str, Dict[str, RoomKey]]: + ) -> dict[str, dict[str, RoomKey]]: """Get multiple room keys at a time. The difference between this function and get_e2e_room_keys is that this function can be used to retrieve multiple specific keys at a time, whereas get_e2e_room_keys is used for @@ -370,7 +367,7 @@ def _get_e2e_room_keys_multi_txn( user_id: str, version: int, room_keys: Mapping[str, Mapping[Literal["sessions"], Iterable[str]]], - ) -> Dict[str, Dict[str, RoomKey]]: + ) -> dict[str, dict[str, RoomKey]]: if not room_keys: return {} @@ -400,7 +397,7 @@ def _get_e2e_room_keys_multi_txn( txn.execute(sql, params) - ret: Dict[str, Dict[str, RoomKey]] = {} + ret: dict[str, dict[str, RoomKey]] = {} for row in txn: room_id = row[0] @@ -483,7 +480,7 @@ def _get_current_version(txn: LoggingTransaction, user_id: str) -> int: ) # `SELECT MAX() FROM ...` will always return 1 row. The value in that row will # be `NULL` when there are no available versions. - row = cast(Tuple[Optional[int]], txn.fetchone()) + row = cast(tuple[Optional[int]], txn.fetchone()) if row[0] is None: raise StoreError(404, "No current backup version") return row[0] @@ -559,7 +556,7 @@ def _create_e2e_room_keys_version_txn(txn: LoggingTransaction) -> str: "SELECT MAX(version) FROM e2e_room_keys_versions WHERE user_id=?", (user_id,), ) - current_version = cast(Tuple[Optional[int]], txn.fetchone())[0] + current_version = cast(tuple[Optional[int]], txn.fetchone())[0] if current_version is None: current_version = 0 @@ -600,7 +597,7 @@ async def update_e2e_room_keys_version( version_etag: etag of the keys in the backup. If None, then the etag is not updated. """ - updatevalues: Dict[str, object] = {} + updatevalues: dict[str, object] = {} if info is not None and "auth_data" in info: updatevalues["auth_data"] = json_encoder.encode(info["auth_data"]) diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py index 2d3d0c0036..991d64db44 100644 --- a/synapse/storage/databases/main/end_to_end_keys.py +++ b/synapse/storage/databases/main/end_to_end_keys.py @@ -25,15 +25,11 @@ TYPE_CHECKING, Any, Collection, - Dict, Iterable, - List, Literal, Mapping, Optional, Sequence, - Set, - Tuple, Union, cast, overload, @@ -155,7 +151,7 @@ def process_replication_rows( async def get_e2e_device_keys_for_federation_query( self, user_id: str - ) -> Tuple[int, Sequence[JsonMapping]]: + ) -> tuple[int, Sequence[JsonMapping]]: """Get all devices (with any device keys) for a user Returns: @@ -241,9 +237,9 @@ async def _get_e2e_device_keys_for_federation_query_inner( @cancellable async def get_e2e_device_keys_for_cs_api( self, - query_list: Collection[Tuple[str, Optional[str]]], + query_list: Collection[tuple[str, Optional[str]]], include_displaynames: bool = True, - ) -> Dict[str, Dict[str, JsonDict]]: + ) -> dict[str, dict[str, JsonDict]]: """Fetch a list of device keys, formatted suitably for the C/S API. Args: query_list: List of pairs of user_ids and device_ids. @@ -262,7 +258,7 @@ async def get_e2e_device_keys_for_cs_api( # Build the result structure, un-jsonify the results, and add the # "unsigned" section - rv: Dict[str, Dict[str, JsonDict]] = {} + rv: dict[str, dict[str, JsonDict]] = {} for user_id, device_keys in results.items(): rv[user_id] = {} for device_id, device_info in device_keys.items(): @@ -284,36 +280,36 @@ async def get_e2e_device_keys_for_cs_api( @overload async def get_e2e_device_keys_and_signatures( self, - query_list: Collection[Tuple[str, Optional[str]]], + query_list: Collection[tuple[str, Optional[str]]], include_all_devices: Literal[False] = False, - ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: ... + ) -> dict[str, dict[str, DeviceKeyLookupResult]]: ... @overload async def get_e2e_device_keys_and_signatures( self, - query_list: Collection[Tuple[str, Optional[str]]], + query_list: Collection[tuple[str, Optional[str]]], include_all_devices: bool = False, include_deleted_devices: Literal[False] = False, - ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: ... + ) -> dict[str, dict[str, DeviceKeyLookupResult]]: ... @overload async def get_e2e_device_keys_and_signatures( self, - query_list: Collection[Tuple[str, Optional[str]]], + query_list: Collection[tuple[str, Optional[str]]], include_all_devices: Literal[True], include_deleted_devices: Literal[True], - ) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]: ... + ) -> dict[str, dict[str, Optional[DeviceKeyLookupResult]]]: ... @trace @cancellable async def get_e2e_device_keys_and_signatures( self, - query_list: Collection[Tuple[str, Optional[str]]], + query_list: Collection[tuple[str, Optional[str]]], include_all_devices: bool = False, include_deleted_devices: bool = False, ) -> Union[ - Dict[str, Dict[str, DeviceKeyLookupResult]], - Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]], + dict[str, dict[str, DeviceKeyLookupResult]], + dict[str, dict[str, Optional[DeviceKeyLookupResult]]], ]: """Fetch a list of device keys @@ -388,18 +384,18 @@ async def get_e2e_device_keys_and_signatures( async def _get_e2e_device_keys( self, - query_list: Collection[Tuple[str, Optional[str]]], + query_list: Collection[tuple[str, Optional[str]]], include_all_devices: bool = False, include_deleted_devices: bool = False, - ) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]: + ) -> dict[str, dict[str, Optional[DeviceKeyLookupResult]]]: """Get information on devices from the database The results include the device's keys and self-signatures, but *not* any cross-signing signatures which have been added subsequently (for which, see get_e2e_device_keys_and_signatures) """ - query_clauses: List[str] = [] - query_params_list: List[List[object]] = [] + query_clauses: list[str] = [] + query_params_list: list[list[object]] = [] if include_all_devices is False: include_deleted_devices = False @@ -437,7 +433,7 @@ async def _get_e2e_device_keys( query_clauses.append(user_device_id_in_list_clause) query_params_list.append(user_device_args) - result: Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]] = {} + result: dict[str, dict[str, Optional[DeviceKeyLookupResult]]] = {} def get_e2e_device_keys_txn( txn: LoggingTransaction, query_clause: str, query_params: list @@ -490,8 +486,8 @@ def get_e2e_device_keys_txn( @cached() def _get_e2e_cross_signing_signatures_for_device( self, - user_id_and_device_id: Tuple[str, str], - ) -> Sequence[Tuple[str, str]]: + user_id_and_device_id: tuple[str, str], + ) -> Sequence[tuple[str, str]]: """ The single-item version of `_get_e2e_cross_signing_signatures_for_devices`. See @cachedList for why a separate method is needed. @@ -503,8 +499,8 @@ def _get_e2e_cross_signing_signatures_for_device( list_name="device_query", ) async def _get_e2e_cross_signing_signatures_for_devices( - self, device_query: Iterable[Tuple[str, str]] - ) -> Mapping[Tuple[str, str], Sequence[Tuple[str, str]]]: + self, device_query: Iterable[tuple[str, str]] + ) -> Mapping[tuple[str, str], Sequence[tuple[str, str]]]: """Get cross-signing signatures for a given list of user IDs and devices. Args: @@ -524,8 +520,8 @@ async def _get_e2e_cross_signing_signatures_for_devices( """ def _get_e2e_cross_signing_signatures_for_devices_txn( - txn: LoggingTransaction, device_query: Iterable[Tuple[str, str]] - ) -> Mapping[Tuple[str, str], Sequence[Tuple[str, str]]]: + txn: LoggingTransaction, device_query: Iterable[tuple[str, str]] + ) -> Mapping[tuple[str, str], Sequence[tuple[str, str]]]: where_clause_sql, where_clause_params = make_tuple_in_list_sql_clause( self.database_engine, columns=("target_user_id", "target_device_id", "user_id"), @@ -541,7 +537,7 @@ def _get_e2e_cross_signing_signatures_for_devices_txn( txn.execute(signature_sql, where_clause_params) - devices_and_signatures: Dict[Tuple[str, str], List[Tuple[str, str]]] = {} + devices_and_signatures: dict[tuple[str, str], list[tuple[str, str]]] = {} # `@cachedList` requires we return one key for every item in `device_query`. # Pre-populate `devices_and_signatures` with each key so that none are missing. @@ -567,8 +563,8 @@ def _get_e2e_cross_signing_signatures_for_devices_txn( ) async def get_e2e_one_time_keys( - self, user_id: str, device_id: str, key_ids: List[str] - ) -> Dict[Tuple[str, str], str]: + self, user_id: str, device_id: str, key_ids: list[str] + ) -> dict[tuple[str, str], str]: """Retrieve a number of one-time keys for a user Args: @@ -581,7 +577,7 @@ async def get_e2e_one_time_keys( """ rows = cast( - List[Tuple[str, str, str]], + list[tuple[str, str, str]], await self.db_pool.simple_select_many_batch( table="e2e_one_time_keys_json", column="key_id", @@ -600,7 +596,7 @@ async def add_e2e_one_time_keys( user_id: str, device_id: str, time_now: int, - new_keys: Iterable[Tuple[str, str, str]], + new_keys: Iterable[tuple[str, str, str]], ) -> None: """Insert some new one time keys for a device. Errors if any of the keys already exist. @@ -627,7 +623,7 @@ def _add_e2e_one_time_keys_txn( user_id: str, device_id: str, time_now: int, - new_keys: Iterable[Tuple[str, str, str]], + new_keys: Iterable[tuple[str, str, str]], ) -> None: """Insert some new one time keys for a device. Errors if any of the keys already exist. @@ -674,7 +670,7 @@ async def count_e2e_one_time_keys( A mapping from algorithm to number of keys for that algorithm. """ - def _count_e2e_one_time_keys(txn: LoggingTransaction) -> Dict[str, int]: + def _count_e2e_one_time_keys(txn: LoggingTransaction) -> dict[str, int]: sql = ( "SELECT algorithm, COUNT(key_id) FROM e2e_one_time_keys_json" " WHERE user_id = ? AND device_id = ?" @@ -962,7 +958,7 @@ def _get_bare_e2e_cross_signing_keys_bulk_txn( self, txn: LoggingTransaction, user_ids: Iterable[str], - ) -> Dict[str, Dict[str, JsonDict]]: + ) -> dict[str, dict[str, JsonDict]]: """Returns the cross-signing keys for a set of users. The output of this function should be passed to _get_e2e_cross_signing_signatures_txn if the signatures for the calling user need to be fetched. @@ -977,7 +973,7 @@ def _get_bare_e2e_cross_signing_keys_bulk_txn( the dict. """ - result: Dict[str, Dict[str, JsonDict]] = {} + result: dict[str, dict[str, JsonDict]] = {} for user_chunk in batch_iter(user_ids, 100): clause, params = make_in_list_sql_clause( @@ -1017,9 +1013,9 @@ def _get_bare_e2e_cross_signing_keys_bulk_txn( def _get_e2e_cross_signing_signatures_txn( self, txn: LoggingTransaction, - keys: Dict[str, Optional[Dict[str, JsonDict]]], + keys: dict[str, Optional[dict[str, JsonDict]]], from_user_id: str, - ) -> Dict[str, Optional[Dict[str, JsonDict]]]: + ) -> dict[str, Optional[dict[str, JsonDict]]]: """Returns the cross-signing signatures made by a user on a set of keys. Args: @@ -1037,7 +1033,7 @@ def _get_e2e_cross_signing_signatures_txn( # find out what cross-signing keys (a.k.a. devices) we need to get # signatures for. This is a map of (user_id, device_id) to key type # (device_id is the key's public part). - devices: Dict[Tuple[str, str], str] = {} + devices: dict[tuple[str, str], str] = {} for user_id, user_keys in keys.items(): if user_keys is None: @@ -1100,7 +1096,7 @@ def _get_e2e_cross_signing_signatures_txn( @cancellable async def get_e2e_cross_signing_keys_bulk( - self, user_ids: List[str], from_user_id: Optional[str] = None + self, user_ids: list[str], from_user_id: Optional[str] = None ) -> Mapping[str, Optional[Mapping[str, JsonMapping]]]: """Returns the cross-signing keys for a set of users. @@ -1118,7 +1114,7 @@ async def get_e2e_cross_signing_keys_bulk( if from_user_id: result = cast( - Dict[str, Optional[Mapping[str, JsonMapping]]], + dict[str, Optional[Mapping[str, JsonMapping]]], await self.db_pool.runInteraction( "get_e2e_cross_signing_signatures", self._get_e2e_cross_signing_signatures_txn, @@ -1131,7 +1127,7 @@ async def get_e2e_cross_signing_keys_bulk( async def get_all_user_signature_changes_for_remotes( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: """Get updates for groups replication stream. Note that the user signature stream represents when a user signs their @@ -1163,7 +1159,7 @@ async def get_all_user_signature_changes_for_remotes( def _get_all_user_signature_changes_for_remotes_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: sql = """ SELECT stream_id, from_user_id AS user_id FROM user_signature_stream @@ -1194,9 +1190,9 @@ def get_device_stream_token(self) -> MultiWriterStreamToken: ... async def claim_e2e_one_time_keys( - self, query_list: Collection[Tuple[str, str, str, int]] - ) -> Tuple[ - Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str, int]] + self, query_list: Collection[tuple[str, str, str, int]] + ) -> tuple[ + dict[str, dict[str, dict[str, JsonDict]]], list[tuple[str, str, str, int]] ]: """Take a list of one time keys out of the database. @@ -1211,12 +1207,12 @@ async def claim_e2e_one_time_keys( may be less than the input counts. In this case, the returned counts are the number of claims that were not fulfilled. """ - results: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} - missing: List[Tuple[str, str, str, int]] = [] + results: dict[str, dict[str, dict[str, JsonDict]]] = {} + missing: list[tuple[str, str, str, int]] = [] if isinstance(self.database_engine, PostgresEngine): # If we can use execute_values we can use a single batch query # in autocommit mode. - unfulfilled_claim_counts: Dict[Tuple[str, str, str], int] = {} + unfulfilled_claim_counts: dict[tuple[str, str, str], int] = {} for user_id, device_id, algorithm, count in query_list: unfulfilled_claim_counts[user_id, device_id, algorithm] = count @@ -1265,8 +1261,8 @@ async def claim_e2e_one_time_keys( return results, missing async def claim_e2e_fallback_keys( - self, query_list: Iterable[Tuple[str, str, str, bool]] - ) -> Dict[str, Dict[str, Dict[str, JsonDict]]]: + self, query_list: Iterable[tuple[str, str, str, bool]] + ) -> dict[str, dict[str, dict[str, JsonDict]]]: """Take a list of fallback keys out of the database. Args: @@ -1293,13 +1289,13 @@ async def claim_e2e_fallback_keys( def _claim_e2e_fallback_keys_bulk_txn( self, txn: LoggingTransaction, - query_list: Iterable[Tuple[str, str, str, bool]], - ) -> Dict[str, Dict[str, Dict[str, JsonDict]]]: + query_list: Iterable[tuple[str, str, str, bool]], + ) -> dict[str, dict[str, dict[str, JsonDict]]]: """Efficient implementation of claim_e2e_fallback_keys for Postgres. Safe to autocommit: this is a single query. """ - results: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + results: dict[str, dict[str, dict[str, JsonDict]]] = {} sql = """ WITH claims(user_id, device_id, algorithm, mark_as_used) AS ( @@ -1312,11 +1308,11 @@ def _claim_e2e_fallback_keys_bulk_txn( RETURNING k.user_id, k.device_id, k.algorithm, k.key_id, k.key_json; """ claimed_keys = cast( - List[Tuple[str, str, str, str, str]], + list[tuple[str, str, str, str, str]], txn.execute_values(sql, query_list), ) - seen_user_device: Set[Tuple[str, str]] = set() + seen_user_device: set[tuple[str, str]] = set() for user_id, device_id, algorithm, key_id, key_json in claimed_keys: device_results = results.setdefault(user_id, {}).setdefault(device_id, {}) device_results[f"{algorithm}:{key_id}"] = json_decoder.decode(key_json) @@ -1330,10 +1326,10 @@ def _claim_e2e_fallback_keys_bulk_txn( async def _claim_e2e_fallback_keys_simple( self, - query_list: Iterable[Tuple[str, str, str, bool]], - ) -> Dict[str, Dict[str, Dict[str, JsonDict]]]: + query_list: Iterable[tuple[str, str, str, bool]], + ) -> dict[str, dict[str, dict[str, JsonDict]]]: """Naive, inefficient implementation of claim_e2e_fallback_keys for SQLite.""" - results: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + results: dict[str, dict[str, dict[str, JsonDict]]] = {} for user_id, device_id, algorithm, mark_as_used in query_list: row = await self.db_pool.simple_select_one( table="e2e_fallback_keys_json", @@ -1381,7 +1377,7 @@ def _claim_e2e_one_time_key_simple( device_id: str, algorithm: str, count: int, - ) -> List[Tuple[str, str]]: + ) -> list[tuple[str, str]]: """Claim OTK for device for DBs that don't support RETURNING. Returns: @@ -1426,8 +1422,8 @@ def _claim_e2e_one_time_key_simple( def _claim_e2e_one_time_keys_bulk( self, txn: LoggingTransaction, - query_list: Iterable[Tuple[str, str, str, int]], - ) -> List[Tuple[str, str, str, str, str]]: + query_list: Iterable[tuple[str, str, str, int]], + ) -> list[tuple[str, str, str, str, str]]: """Bulk claim OTKs, for DBs that support DELETE FROM... RETURNING. Args: @@ -1466,7 +1462,7 @@ def _claim_e2e_one_time_keys_bulk( RETURNING user_id, device_id, algorithm, key_id, key_json; """ otk_rows = cast( - List[Tuple[str, str, str, str, str]], txn.execute_values(sql, query_list) + list[tuple[str, str, str, str, str]], txn.execute_values(sql, query_list) ) seen_user_device = { @@ -1482,7 +1478,7 @@ def _claim_e2e_one_time_keys_bulk( async def get_master_cross_signing_key_updatable_before( self, user_id: str - ) -> Tuple[bool, Optional[int]]: + ) -> tuple[bool, Optional[int]]: """Get time before which a master cross-signing key may be replaced without UIA. (UIA means "User-Interactive Auth".) @@ -1503,7 +1499,7 @@ async def get_master_cross_signing_key_updatable_before( """ - def impl(txn: LoggingTransaction) -> Tuple[bool, Optional[int]]: + def impl(txn: LoggingTransaction) -> tuple[bool, Optional[int]]: # We want to distinguish between three cases: txn.execute( """ @@ -1515,7 +1511,7 @@ def impl(txn: LoggingTransaction) -> Tuple[bool, Optional[int]]: """, (user_id,), ) - row = cast(Optional[Tuple[Optional[int]]], txn.fetchone()) + row = cast(Optional[tuple[Optional[int]]], txn.fetchone()) if row is None: return False, None return True, row[0] @@ -1527,7 +1523,7 @@ def impl(txn: LoggingTransaction) -> Tuple[bool, Optional[int]]: async def delete_old_otks_for_next_user_batch( self, after_user_id: str, number_of_users: int - ) -> Tuple[List[str], int]: + ) -> tuple[list[str], int]: """Deletes old OTKs belonging to the next batch of users Returns: @@ -1536,7 +1532,7 @@ async def delete_old_otks_for_next_user_batch( * `rows` is the number of deleted rows """ - def impl(txn: LoggingTransaction) -> Tuple[List[str], int]: + def impl(txn: LoggingTransaction) -> tuple[list[str], int]: # Find a batch of users txn.execute( """ diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index d77420ff47..d889e8eceb 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -25,15 +25,10 @@ from typing import ( TYPE_CHECKING, Collection, - Dict, - FrozenSet, Generator, Iterable, - List, Optional, Sequence, - Set, - Tuple, cast, ) @@ -132,9 +127,9 @@ class BackfillQueueNavigationItem: @attr.s(frozen=True, slots=True, auto_attribs=True) class StateDifference: # The event IDs in the auth difference. - auth_difference: Set[str] + auth_difference: set[str] # The event IDs in the conflicted state subgraph. Used in v2.1 only. - conflicted_subgraph: Optional[Set[str]] + conflicted_subgraph: Optional[set[str]] class _NoChainCoverIndex(Exception): @@ -165,7 +160,7 @@ def __init__( ) # Cache of event ID to list of auth event IDs and their depths. - self._event_auth_cache: LruCache[str, List[Tuple[str, int]]] = LruCache( + self._event_auth_cache: LruCache[str, list[tuple[str, int]]] = LruCache( max_size=500000, clock=self.hs.get_clock(), server_name=self.server_name, @@ -199,7 +194,7 @@ def __init__( async def get_auth_chain( self, room_id: str, event_ids: Collection[str], include_given: bool = False - ) -> List[EventBase]: + ) -> list[EventBase]: """Get auth events for given event_ids. The events *must* be state events. Args: @@ -222,7 +217,7 @@ async def get_auth_chain_ids( room_id: str, event_ids: Collection[str], include_given: bool = False, - ) -> Set[str]: + ) -> set[str]: """Get auth events for given event_ids. The events *must* be state events. Args: @@ -267,7 +262,7 @@ def _get_auth_chain_ids_using_cover_index_txn( room_id: str, event_ids: Collection[str], include_given: bool, - ) -> Set[str]: + ) -> set[str]: """Calculates the auth chain IDs using the chain index.""" # First we look up the chain ID/sequence numbers for the given events. @@ -275,10 +270,10 @@ def _get_auth_chain_ids_using_cover_index_txn( initial_events = set(event_ids) # All the events that we've found that are reachable from the events. - seen_events: Set[str] = set() + seen_events: set[str] = set() # A map from chain ID to max sequence number of the given events. - event_chains: Dict[int, int] = {} + event_chains: dict[int, int] = {} sql = """ SELECT event_id, chain_id, sequence_number @@ -313,7 +308,7 @@ def _get_auth_chain_ids_using_cover_index_txn( # are reachable from any event. # A map from chain ID to max sequence number *reachable* from any event ID. - chains: Dict[int, int] = {} + chains: dict[int, int] = {} for links in self._get_chain_links(txn, set(event_chains.keys())): for chain_id in links: if chain_id not in event_chains: @@ -366,8 +361,8 @@ def _get_auth_chain_ids_using_cover_index_txn( @classmethod def _get_chain_links( - cls, txn: LoggingTransaction, chains_to_fetch: Set[int] - ) -> Generator[Dict[int, List[Tuple[int, int, int]]], None, None]: + cls, txn: LoggingTransaction, chains_to_fetch: set[int] + ) -> Generator[dict[int, list[tuple[int, int, int]]], None, None]: """Fetch all auth chain links from the given set of chains, and all links from those chains, recursively. @@ -410,7 +405,7 @@ def _get_chain_links( ) txn.execute(sql % (clause,), args) - links: Dict[int, List[Tuple[int, int, int]]] = {} + links: dict[int, list[tuple[int, int, int]]] = {} for ( origin_chain_id, @@ -428,7 +423,7 @@ def _get_chain_links( def _get_auth_chain_ids_txn( self, txn: LoggingTransaction, event_ids: Collection[str], include_given: bool - ) -> Set[str]: + ) -> set[str]: """Calculates the auth chain IDs. This is used when we don't have a cover index for the room. @@ -449,10 +444,10 @@ def _get_auth_chain_ids_txn( front = set(event_ids) while front: - new_front: Set[str] = set() + new_front: set[str] = set() for chunk in batch_iter(front, 100): # Pull the auth events either from the cache or DB. - to_fetch: List[str] = [] # Event IDs to fetch from DB + to_fetch: list[str] = [] # Event IDs to fetch from DB for event_id in chunk: res = self._event_auth_cache.get(event_id) if res is None: @@ -468,7 +463,7 @@ def _get_auth_chain_ids_txn( # Note we need to batch up the results by event ID before # adding to the cache. - to_cache: Dict[str, List[Tuple[str, int]]] = {} + to_cache: dict[str, list[tuple[str, int]]] = {} for event_id, auth_event_id, auth_event_depth in txn: to_cache.setdefault(event_id, []).append( (auth_event_id, auth_event_depth) @@ -488,8 +483,8 @@ def _get_auth_chain_ids_txn( async def get_auth_chain_difference( self, room_id: str, - state_sets: List[Set[str]], - ) -> Set[str]: + state_sets: list[set[str]], + ) -> set[str]: state_diff = await self.get_auth_chain_difference_extended( room_id, state_sets, None, None ) @@ -498,9 +493,9 @@ async def get_auth_chain_difference( async def get_auth_chain_difference_extended( self, room_id: str, - state_sets: List[Set[str]], - conflicted_set: Optional[Set[str]], - additional_backwards_reachable_conflicted_events: Optional[Set[str]], + state_sets: list[set[str]], + conflicted_set: Optional[set[str]], + additional_backwards_reachable_conflicted_events: Optional[set[str]], ) -> StateDifference: """ "Given sets of state events figure out the auth chain difference (as per state res v2 algorithm). @@ -560,9 +555,9 @@ def _get_auth_chain_difference_using_cover_index_txn( self, txn: LoggingTransaction, room_id: str, - state_sets: List[Set[str]], - conflicted_set: Optional[Set[str]] = None, - additional_backwards_reachable_conflicted_events: Optional[Set[str]] = None, + state_sets: list[set[str]], + conflicted_set: Optional[set[str]] = None, + additional_backwards_reachable_conflicted_events: Optional[set[str]] = None, ) -> StateDifference: """Calculates the auth chain difference using the chain index. @@ -587,14 +582,14 @@ def _get_auth_chain_difference_using_cover_index_txn( ) # Map from event_id -> (chain ID, seq no) - chain_info: Dict[str, Tuple[int, int]] = {} + chain_info: dict[str, tuple[int, int]] = {} # Map from chain ID -> seq no -> event Id - chain_to_event: Dict[int, Dict[int, str]] = {} + chain_to_event: dict[int, dict[int, str]] = {} # All the chains that we've found that are reachable from the state # sets. - seen_chains: Set[int] = set() + seen_chains: set[int] = set() # Fetch the chain cover index for the initial set of events we're # considering. @@ -621,7 +616,7 @@ def fetch_chain_info(events_to_fetch: Collection[str]) -> None: events_missing_chain_info = initial_events.difference(chain_info) # The result set to return, i.e. the auth chain difference. - auth_difference_result: Set[str] = set() + auth_difference_result: set[str] = set() if events_missing_chain_info: # For some reason we have events we haven't calculated the chain @@ -652,21 +647,21 @@ def fetch_chain_info(events_to_fetch: Collection[str]) -> None: # A subset of chain_info for conflicted events only, as we need to # loop all conflicted chain positions. Map from event_id -> (chain ID, seq no) - conflicted_chain_positions: Dict[str, Tuple[int, int]] = {} + conflicted_chain_positions: dict[str, tuple[int, int]] = {} # For each chain, remember the positions where conflicted events are. # We need this for calculating the forward reachable events. - conflicted_chain_to_seq: Dict[int, Set[int]] = {} # chain_id => {seq_num} + conflicted_chain_to_seq: dict[int, set[int]] = {} # chain_id => {seq_num} # A subset of chain_info for additional backwards reachable events only, as we need to # loop all additional backwards reachable events for calculating backwards reachable events. - additional_backwards_reachable_positions: Dict[ - str, Tuple[int, int] + additional_backwards_reachable_positions: dict[ + str, tuple[int, int] ] = {} # event_id => (chain_id, seq_num) # These next two fields are critical as the intersection of them is the conflicted subgraph. # We'll populate them when we walk the chain links. # chain_id => max(seq_num) backwards reachable (e.g 4 means 1,2,3,4 are backwards reachable) - conflicted_backwards_reachable: Dict[int, int] = {} + conflicted_backwards_reachable: dict[int, int] = {} # chain_id => min(seq_num) forwards reachable (e.g 4 means 4,5,6..n are forwards reachable) - conflicted_forwards_reachable: Dict[int, int] = {} + conflicted_forwards_reachable: dict[int, int] = {} # populate the v2.1 data structures if is_state_res_v21: @@ -688,9 +683,9 @@ def fetch_chain_info(events_to_fetch: Collection[str]) -> None: # Corresponds to `state_sets`, except as a map from chain ID to max # sequence number reachable from the state set. - set_to_chain: List[Dict[int, int]] = [] + set_to_chain: list[dict[int, int]] = [] for state_set in state_sets: - chains: Dict[int, int] = {} + chains: dict[int, int] = {} set_to_chain.append(chains) for state_id in state_set: @@ -802,7 +797,7 @@ def fetch_chain_info(events_to_fetch: Collection[str]) -> None: # Mapping from chain ID to the range of sequence numbers that should be # pulled from the database. - auth_diff_chain_to_gap: Dict[int, Tuple[int, int]] = {} + auth_diff_chain_to_gap: dict[int, tuple[int, int]] = {} for chain_id in seen_chains: min_seq_no = min(chains.get(chain_id, 0) for chains in set_to_chain) @@ -820,10 +815,10 @@ def fetch_chain_info(events_to_fetch: Collection[str]) -> None: auth_diff_chain_to_gap[chain_id] = (min_seq_no, max_seq_no) break - conflicted_subgraph_result: Set[str] = set() + conflicted_subgraph_result: set[str] = set() # Mapping from chain ID to the range of sequence numbers that should be # pulled from the database. - conflicted_subgraph_chain_to_gap: Dict[int, Tuple[int, int]] = {} + conflicted_subgraph_chain_to_gap: dict[int, tuple[int, int]] = {} if is_state_res_v21: # also include the conflicted subgraph using backward/forward reachability info from all # the conflicted events. To calculate this, we want to extract the intersection between @@ -882,9 +877,9 @@ def fetch_chain_info(events_to_fetch: Collection[str]) -> None: ) def _fetch_event_ids_from_chains_txn( - self, txn: LoggingTransaction, chains: Dict[int, Tuple[int, int]] - ) -> Set[str]: - result: Set[str] = set() + self, txn: LoggingTransaction, chains: dict[int, tuple[int, int]] + ) -> set[str]: + result: set[str] = set() if isinstance(self.database_engine, PostgresEngine): # We can use `execute_values` to efficiently fetch the gaps when # using postgres. @@ -918,10 +913,10 @@ def _fixup_auth_chain_difference_sets( self, txn: LoggingTransaction, room_id: str, - state_sets: List[Set[str]], - events_missing_chain_info: Set[str], + state_sets: list[set[str]], + events_missing_chain_info: set[str], events_that_have_chain_index: Collection[str], - ) -> Set[str]: + ) -> set[str]: """Helper for `_get_auth_chain_difference_using_cover_index_txn` to handle the case where we haven't calculated the chain cover index for all events. @@ -962,7 +957,7 @@ def _fixup_auth_chain_difference_sets( WHERE tc.room_id = ? """ txn.execute(sql, (room_id,)) - event_to_auth_ids: Dict[str, Set[str]] = {} + event_to_auth_ids: dict[str, set[str]] = {} events_that_have_chain_index = set(events_that_have_chain_index) for event_id, auth_id, auth_id_has_chain in txn: s = event_to_auth_ids.setdefault(event_id, set()) @@ -982,7 +977,7 @@ def _fixup_auth_chain_difference_sets( raise _NoChainCoverIndex(room_id) # Create a map from event IDs we care about to their partial auth chain. - event_id_to_partial_auth_chain: Dict[str, Set[str]] = {} + event_id_to_partial_auth_chain: dict[str, set[str]] = {} for event_id, auth_ids in event_to_auth_ids.items(): if not any(event_id in state_set for state_set in state_sets): continue @@ -1005,7 +1000,7 @@ def _fixup_auth_chain_difference_sets( # 1. Update the state sets to only include indexed events; and # 2. Create a new list containing the auth chains of the un-indexed # events - unindexed_state_sets: List[Set[str]] = [] + unindexed_state_sets: list[set[str]] = [] for state_set in state_sets: unindexed_state_set = set() for event_id, auth_chain in event_id_to_partial_auth_chain.items(): @@ -1031,8 +1026,8 @@ def _fixup_auth_chain_difference_sets( return union - intersection def _get_auth_chain_difference_txn( - self, txn: LoggingTransaction, state_sets: List[Set[str]] - ) -> Set[str]: + self, txn: LoggingTransaction, state_sets: list[set[str]] + ) -> set[str]: """Calculates the auth chain difference using a breadth first search. This is used when we don't have a cover index for the room. @@ -1087,7 +1082,7 @@ def _get_auth_chain_difference_txn( } # The sorted list of events whose auth chains we should walk. - search: List[Tuple[int, str]] = [] + search: list[tuple[int, str]] = [] # We need to get the depth of the initial events for sorting purposes. sql = """ @@ -1104,13 +1099,13 @@ def _get_auth_chain_difference_txn( # I think building a temporary list with fetchall is more efficient than # just `search.extend(txn)`, but this is unconfirmed - search.extend(cast(List[Tuple[int, str]], txn.fetchall())) + search.extend(cast(list[tuple[int, str]], txn.fetchall())) # sort by depth search.sort() # Map from event to its auth events - event_to_auth_events: Dict[str, Set[str]] = {} + event_to_auth_events: dict[str, set[str]] = {} base_sql = """ SELECT a.event_id, auth_id, depth @@ -1129,8 +1124,8 @@ def _get_auth_chain_difference_txn( # currently walking, either from cache or DB. search, chunk = search[:-100], search[-100:] - found: List[Tuple[str, str, int]] = [] # Results found - to_fetch: List[str] = [] # Event IDs to fetch from DB + found: list[tuple[str, str, int]] = [] # Results found + to_fetch: list[str] = [] # Event IDs to fetch from DB for _, event_id in chunk: res = self._event_auth_cache.get(event_id) if res is None: @@ -1147,7 +1142,7 @@ def _get_auth_chain_difference_txn( # We parse the results and add the to the `found` set and the # cache (note we need to batch up the results by event ID before # adding to the cache). - to_cache: Dict[str, List[Tuple[str, int]]] = {} + to_cache: dict[str, list[tuple[str, int]]] = {} for event_id, auth_event_id, auth_event_depth in txn: to_cache.setdefault(event_id, []).append( (auth_event_id, auth_event_depth) @@ -1204,7 +1199,7 @@ async def get_backfill_points_in_room( room_id: str, current_depth: int, limit: int, - ) -> List[Tuple[str, int]]: + ) -> list[tuple[str, int]]: """ Get the backward extremities to backfill from in the room along with the approximate depth. @@ -1235,7 +1230,7 @@ async def get_backfill_points_in_room( def get_backfill_points_in_room_txn( txn: LoggingTransaction, room_id: str - ) -> List[Tuple[str, int]]: + ) -> list[tuple[str, int]]: # Assemble a tuple lookup of event_id -> depth for the oldest events # we know of in the room. Backwards extremeties are the oldest # events we know of in the room but we only know of them because @@ -1336,7 +1331,7 @@ def get_backfill_points_in_room_txn( ), ) - return cast(List[Tuple[str, int]], txn.fetchall()) + return cast(list[tuple[str, int]], txn.fetchall()) return await self.db_pool.runInteraction( "get_backfill_points_in_room", @@ -1346,14 +1341,14 @@ def get_backfill_points_in_room_txn( async def get_max_depth_of( self, event_ids: Collection[str] - ) -> Tuple[Optional[str], int]: + ) -> tuple[Optional[str], int]: """Returns the event ID and depth for the event that has the max depth from a set of event IDs Args: event_ids: The event IDs to calculate the max depth of. """ rows = cast( - List[Tuple[str, int]], + list[tuple[str, int]], await self.db_pool.simple_select_many_batch( table="events", column="event_id", @@ -1378,14 +1373,14 @@ async def get_max_depth_of( return max_depth_event_id, current_max_depth - async def get_min_depth_of(self, event_ids: List[str]) -> Tuple[Optional[str], int]: + async def get_min_depth_of(self, event_ids: list[str]) -> tuple[Optional[str], int]: """Returns the event ID and depth for the event that has the min depth from a set of event IDs Args: event_ids: The event IDs to calculate the max depth of. """ rows = cast( - List[Tuple[str, int]], + list[tuple[str, int]], await self.db_pool.simple_select_many_batch( table="events", column="event_id", @@ -1410,7 +1405,7 @@ async def get_min_depth_of(self, event_ids: List[str]) -> Tuple[Optional[str], i return min_depth_event_id, current_min_depth - async def get_prev_events_for_room(self, room_id: str) -> List[str]: + async def get_prev_events_for_room(self, room_id: str) -> list[str]: """ Gets a subset of the current forward extremities in the given room. @@ -1431,7 +1426,7 @@ async def get_prev_events_for_room(self, room_id: str) -> List[str]: def _get_prev_events_for_room_txn( self, txn: LoggingTransaction, room_id: str - ) -> List[str]: + ) -> list[str]: # we just use the 10 newest events. Older events will become # prev_events of future events. @@ -1449,7 +1444,7 @@ def _get_prev_events_for_room_txn( async def get_rooms_with_many_extremities( self, min_count: int, limit: int, room_id_filter: Iterable[str] - ) -> List[str]: + ) -> list[str]: """Get the top rooms with at least N extremities. Args: @@ -1462,7 +1457,7 @@ async def get_rooms_with_many_extremities( sorted by extremity count. """ - def _get_rooms_with_many_extremities_txn(txn: LoggingTransaction) -> List[str]: + def _get_rooms_with_many_extremities_txn(txn: LoggingTransaction) -> list[str]: where_clause = "1=1" if room_id_filter: where_clause = "room_id NOT IN (%s)" % ( @@ -1487,7 +1482,7 @@ def _get_rooms_with_many_extremities_txn(txn: LoggingTransaction) -> List[str]: ) @cached(max_entries=5000, iterable=True) - async def get_latest_event_ids_in_room(self, room_id: str) -> FrozenSet[str]: + async def get_latest_event_ids_in_room(self, room_id: str) -> frozenset[str]: event_ids = await self.db_pool.simple_select_onecol( table="event_forward_extremities", keyvalues={"room_id": room_id}, @@ -1610,7 +1605,7 @@ async def _get_forward_extremeties_for_room( WHERE room_id = ? """ - def get_forward_extremeties_for_room_txn(txn: LoggingTransaction) -> List[str]: + def get_forward_extremeties_for_room_txn(txn: LoggingTransaction) -> list[str]: txn.execute(sql, (stream_ordering, room_id)) return [event_id for (event_id,) in txn] @@ -1627,7 +1622,7 @@ def get_forward_extremeties_for_room_txn(txn: LoggingTransaction) -> List[str]: def _get_connected_prev_event_backfill_results_txn( self, txn: LoggingTransaction, event_id: str, limit: int - ) -> List[BackfillQueueNavigationItem]: + ) -> list[BackfillQueueNavigationItem]: """ Find any events connected by prev_event the specified event_id. @@ -1675,8 +1670,8 @@ def _get_connected_prev_event_backfill_results_txn( ] async def get_backfill_events( - self, room_id: str, seed_event_id_list: List[str], limit: int - ) -> List[EventBase]: + self, room_id: str, seed_event_id_list: list[str], limit: int + ) -> list[EventBase]: """Get a list of Events for a given topic that occurred before (and including) the events in seed_event_id_list. Return a list of max size `limit` @@ -1704,9 +1699,9 @@ def _get_backfill_events( self, txn: LoggingTransaction, room_id: str, - seed_event_id_list: List[str], + seed_event_id_list: list[str], limit: int, - ) -> Set[str]: + ) -> set[str]: """ We want to make sure that we do a breadth-first, "depth" ordered search. We also handle navigating historical branches of history connected by @@ -1719,7 +1714,7 @@ def _get_backfill_events( limit, ) - event_id_results: Set[str] = set() + event_id_results: set[str] = set() # In a PriorityQueue, the lowest valued entries are retrieved first. # We're using depth as the priority in the queue and tie-break based on @@ -1727,7 +1722,7 @@ def _get_backfill_events( # highest and newest-in-time message. We add events to the queue with a # negative depth so that we process the newest-in-time messages first # going backwards in time. stream_ordering follows the same pattern. - queue: "PriorityQueue[Tuple[int, int, str, str]]" = PriorityQueue() + queue: "PriorityQueue[tuple[int, int, str, str]]" = PriorityQueue() for seed_event_id in seed_event_id_list: event_lookup_result = self.db_pool.simple_select_one_txn( @@ -1847,7 +1842,7 @@ def _record_event_failed_pull_attempt_upsert_txn( @trace async def get_event_ids_with_failed_pull_attempts( self, event_ids: StrCollection - ) -> Set[str]: + ) -> set[str]: """ Filter the given list of `event_ids` and return events which have any failed pull attempts. @@ -1860,7 +1855,7 @@ async def get_event_ids_with_failed_pull_attempts( """ rows = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_many_batch( table="event_failed_pull_attempts", column="event_id", @@ -1877,7 +1872,7 @@ async def get_event_ids_to_not_pull_from_backoff( self, room_id: str, event_ids: Collection[str], - ) -> Dict[str, int]: + ) -> dict[str, int]: """ Filter down the events to ones that we've failed to pull before recently. Uses exponential backoff. @@ -1891,7 +1886,7 @@ async def get_event_ids_to_not_pull_from_backoff( next timestamp at which we may try pulling them again. """ event_failed_pull_attempts = cast( - List[Tuple[str, int, int]], + list[tuple[str, int, int]], await self.db_pool.simple_select_many_batch( table="event_failed_pull_attempts", column="event_id", @@ -1932,10 +1927,10 @@ async def get_event_ids_to_not_pull_from_backoff( async def get_missing_events( self, room_id: str, - earliest_events: List[str], - latest_events: List[str], + earliest_events: list[str], + latest_events: list[str], limit: int, - ) -> List[EventBase]: + ) -> list[EventBase]: ids = await self.db_pool.runInteraction( "get_missing_events", self._get_missing_events, @@ -1950,13 +1945,13 @@ def _get_missing_events( self, txn: LoggingTransaction, room_id: str, - earliest_events: List[str], - latest_events: List[str], + earliest_events: list[str], + latest_events: list[str], limit: int, - ) -> List[str]: + ) -> list[str]: seen_events = set(earliest_events) front = set(latest_events) - seen_events - event_results: List[str] = [] + event_results: list[str] = [] query = ( "SELECT prev_event_id FROM event_edges " @@ -1983,7 +1978,7 @@ def _get_missing_events( @trace @tag_args - async def get_successor_events(self, event_id: str) -> List[str]: + async def get_successor_events(self, event_id: str) -> list[str]: """Fetch all events that have the given event as a prev event Args: @@ -2057,7 +2052,7 @@ def _remove_received_event_from_staging_txn( """ txn.execute(sql, (origin, event_id)) - row = cast(Optional[Tuple[int]], txn.fetchone()) + row = cast(Optional[tuple[int]], txn.fetchone()) if row is None: return None @@ -2104,7 +2099,7 @@ def _remove_received_event_from_staging_txn( async def get_next_staged_event_id_for_room( self, room_id: str, - ) -> Optional[Tuple[str, str]]: + ) -> Optional[tuple[str, str]]: """ Get the next event ID in the staging area for the given room. @@ -2114,7 +2109,7 @@ async def get_next_staged_event_id_for_room( def _get_next_staged_event_id_for_room_txn( txn: LoggingTransaction, - ) -> Optional[Tuple[str, str]]: + ) -> Optional[tuple[str, str]]: sql = """ SELECT origin, event_id FROM federation_inbound_events_staging @@ -2125,7 +2120,7 @@ def _get_next_staged_event_id_for_room_txn( txn.execute(sql, (room_id,)) - return cast(Optional[Tuple[str, str]], txn.fetchone()) + return cast(Optional[tuple[str, str]], txn.fetchone()) return await self.db_pool.runInteraction( "get_next_staged_event_id_for_room", _get_next_staged_event_id_for_room_txn @@ -2135,12 +2130,12 @@ async def get_next_staged_event_for_room( self, room_id: str, room_version: RoomVersion, - ) -> Optional[Tuple[str, EventBase]]: + ) -> Optional[tuple[str, EventBase]]: """Get the next event in the staging area for the given room.""" def _get_next_staged_event_for_room_txn( txn: LoggingTransaction, - ) -> Optional[Tuple[str, str, str]]: + ) -> Optional[tuple[str, str, str]]: sql = """ SELECT event_json, internal_metadata, origin FROM federation_inbound_events_staging @@ -2150,7 +2145,7 @@ def _get_next_staged_event_for_room_txn( """ txn.execute(sql, (room_id,)) - return cast(Optional[Tuple[str, str, str]], txn.fetchone()) + return cast(Optional[tuple[str, str, str]], txn.fetchone()) row = await self.db_pool.runInteraction( "get_next_staged_event_for_room", _get_next_staged_event_for_room_txn @@ -2199,7 +2194,7 @@ async def prune_staged_events_in_room( # by other events in the queue). We do this so that we can always # backpaginate in all the events we have dropped. rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="federation_inbound_events_staging", keyvalues={"room_id": room_id}, @@ -2210,8 +2205,8 @@ async def prune_staged_events_in_room( # Find the set of events referenced by those in the queue, as well as # collecting all the event IDs in the queue. - referenced_events: Set[str] = set() - seen_events: Set[str] = set() + referenced_events: set[str] = set() + seen_events: set[str] = set() for event_id, event_json in rows: seen_events.add(event_id) event_d = db_to_json(event_json) @@ -2272,7 +2267,7 @@ async def prune_staged_events_in_room( return True - async def get_all_rooms_with_staged_incoming_events(self) -> List[str]: + async def get_all_rooms_with_staged_incoming_events(self) -> list[str]: """Get the room IDs of all events currently staged.""" return await self.db_pool.simple_select_onecol( table="federation_inbound_events_staging", @@ -2287,15 +2282,15 @@ async def _get_stats_for_federation_staging(self) -> None: def _get_stats_for_federation_staging_txn( txn: LoggingTransaction, - ) -> Tuple[int, int]: + ) -> tuple[int, int]: txn.execute("SELECT count(*) FROM federation_inbound_events_staging") - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) txn.execute( "SELECT min(received_ts) FROM federation_inbound_events_staging" ) - (received_ts,) = cast(Tuple[Optional[int]], txn.fetchone()) + (received_ts,) = cast(tuple[Optional[int]], txn.fetchone()) # If there is nothing in the staging area default it to 0. age = 0 @@ -2409,8 +2404,8 @@ def delete_event_auth(txn: LoggingTransaction) -> bool: def _materialize( origin_chain_id: int, origin_sequence_number: int, - links: Dict[int, List[Tuple[int, int, int]]], - materialized: Dict[int, int], + links: dict[int, list[tuple[int, int, int]]], + materialized: dict[int, int], backwards: bool = True, ) -> None: """Helper function for fetching auth chain links. For a given origin chain @@ -2468,10 +2463,10 @@ def _materialize( def _generate_forward_links( - links: Dict[int, List[Tuple[int, int, int]]], -) -> Dict[int, List[Tuple[int, int, int]]]: + links: dict[int, list[tuple[int, int, int]]], +) -> dict[int, list[tuple[int, int, int]]]: """Reverse the input links from the given backwards links""" - new_links: Dict[int, List[Tuple[int, int, int]]] = {} + new_links: dict[int, list[tuple[int, int, int]]] = {} for origin_chain_id, chain_links in links.items(): for origin_seq_num, target_chain_id, target_seq_num in chain_links: new_links.setdefault(target_chain_id, []).append( @@ -2481,9 +2476,9 @@ def _generate_forward_links( def accumulate_forwards_reachable_events( - conflicted_forwards_reachable: Dict[int, int], - back_links: Dict[int, List[Tuple[int, int, int]]], - conflicted_chain_positions: Dict[str, Tuple[int, int]], + conflicted_forwards_reachable: dict[int, int], + back_links: dict[int, list[tuple[int, int, int]]], + conflicted_chain_positions: dict[str, tuple[int, int]], ) -> None: """Accumulate new forwards reachable events using the back_links provided. diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py index ec26aedc6b..d65ab82fff 100644 --- a/synapse/storage/databases/main/event_push_actions.py +++ b/synapse/storage/databases/main/event_push_actions.py @@ -84,11 +84,8 @@ from typing import ( TYPE_CHECKING, Collection, - Dict, - List, Mapping, Optional, - Tuple, Union, cast, ) @@ -118,11 +115,11 @@ logger = logging.getLogger(__name__) -DEFAULT_NOTIF_ACTION: List[Union[dict, str]] = [ +DEFAULT_NOTIF_ACTION: list[Union[dict, str]] = [ "notify", {"set_tweak": "highlight", "value": False}, ] -DEFAULT_HIGHLIGHT_ACTION: List[Union[dict, str]] = [ +DEFAULT_HIGHLIGHT_ACTION: list[Union[dict, str]] = [ "notify", {"set_tweak": "sound", "value": "default"}, {"set_tweak": "highlight"}, @@ -138,7 +135,7 @@ class _RoomReceipt: unthreaded_stream_ordering: int = 0 # threaded_stream_ordering includes the main pseudo-thread. - threaded_stream_ordering: Dict[str, int] = attr.Factory(dict) + threaded_stream_ordering: dict[str, int] = attr.Factory(dict) def is_unread(self, thread_id: str, stream_ordering: int) -> bool: """Returns True if the stream ordering is unread according to the receipt information.""" @@ -165,7 +162,7 @@ class HttpPushAction: event_id: str room_id: str stream_ordering: int - actions: List[Union[dict, str]] + actions: list[Union[dict, str]] @attr.s(slots=True, frozen=True, auto_attribs=True) @@ -244,7 +241,7 @@ def _serialize_action( return json_encoder.encode(actions) -def _deserialize_action(actions: str, is_highlight: bool) -> List[Union[dict, str]]: +def _deserialize_action(actions: str, is_highlight: bool) -> list[Union[dict, str]]: """Custom deserializer for actions. This allows us to "compress" common actions""" if actions: return db_to_json(actions) @@ -256,7 +253,7 @@ def _deserialize_action(actions: str, is_highlight: bool) -> List[Union[dict, st class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBaseStore): - _background_tasks: List[LoopingCall] = [] + _background_tasks: list[LoopingCall] = [] def __init__( self, @@ -351,7 +348,7 @@ def drop_null_thread_id_indexes_txn(txn: LoggingTransaction) -> None: ) return 0 - async def get_unread_counts_by_room_for_user(self, user_id: str) -> Dict[str, int]: + async def get_unread_counts_by_room_for_user(self, user_id: str) -> dict[str, int]: """Get the notification count by room for a user. Only considers notifications, not highlight or unread counts, and threads are currently aggregated under their room. @@ -373,7 +370,7 @@ async def get_unread_counts_by_room_for_user(self, user_id: str) -> Dict[str, in def _get_unread_counts_by_room_for_user_txn( self, txn: LoggingTransaction, user_id: str - ) -> Dict[str, int]: + ) -> dict[str, int]: receipt_types_clause, args = make_in_list_sql_clause( self.database_engine, "receipt_type", @@ -440,7 +437,7 @@ def _get_unread_counts_by_room_for_user_txn( txn.execute(sql, args) seen_thread_ids = set() - room_to_count: Dict[str, int] = defaultdict(int) + room_to_count: dict[str, int] = defaultdict(int) for room_id, thread_id, notif_count in txn: room_to_count[room_id] += notif_count @@ -585,7 +582,7 @@ def _get_unread_counts_by_pos_txn( """ main_counts = NotifCounts() - thread_counts: Dict[str, NotifCounts] = {} + thread_counts: dict[str, NotifCounts] = {} def _get_thread(thread_id: str) -> NotifCounts: if thread_id == MAIN_TIMELINE: @@ -778,7 +775,7 @@ def _get_notif_unread_count_for_user_room( stream_ordering: int, max_stream_ordering: Optional[int] = None, thread_id: Optional[str] = None, - ) -> List[Tuple[int, int, str]]: + ) -> list[tuple[int, int, str]]: """Returns the notify and unread counts from `event_push_actions` for the given user/room in the given range. @@ -840,12 +837,12 @@ def _get_notif_unread_count_for_user_room( """ txn.execute(sql, args) - return cast(List[Tuple[int, int, str]], txn.fetchall()) + return cast(list[tuple[int, int, str]], txn.fetchall()) async def get_push_action_users_in_range( self, min_stream_ordering: int, max_stream_ordering: int - ) -> List[str]: - def f(txn: LoggingTransaction) -> List[str]: + ) -> list[str]: + def f(txn: LoggingTransaction) -> list[str]: sql = ( "SELECT DISTINCT(user_id) FROM event_push_actions WHERE" " stream_ordering >= ? AND stream_ordering <= ? AND notif = 1" @@ -861,7 +858,7 @@ def _get_receipts_for_room_and_threads_txn( user_id: str, room_ids: StrCollection, thread_ids: StrCollection, - ) -> Dict[str, _RoomReceipt]: + ) -> dict[str, _RoomReceipt]: """ Get (private) read receipts for a user in each of the given room IDs and thread IDs. @@ -936,7 +933,7 @@ def _get_receipts_for_room_and_threads_txn( txn.execute(sql, args) - result: Dict[str, _RoomReceipt] = {} + result: dict[str, _RoomReceipt] = {} for room_id, thread_id, stream_ordering in txn: room_receipt = result.setdefault(room_id, _RoomReceipt()) if thread_id is None: @@ -952,7 +949,7 @@ async def get_unread_push_actions_for_user_in_range_for_http( min_stream_ordering: int, max_stream_ordering: int, limit: int = 20, - ) -> List[HttpPushAction]: + ) -> list[HttpPushAction]: """Get a list of the most recent unread push actions for a given user, within the given stream ordering range. Called by the httppusher. @@ -971,7 +968,7 @@ async def get_unread_push_actions_for_user_in_range_for_http( def get_push_actions_txn( txn: LoggingTransaction, - ) -> List[Tuple[str, str, str, int, str, bool]]: + ) -> list[tuple[str, str, str, int, str, bool]]: sql = """ SELECT ep.event_id, ep.room_id, ep.thread_id, ep.stream_ordering, ep.actions, ep.highlight @@ -984,7 +981,7 @@ def get_push_actions_txn( ORDER BY ep.stream_ordering ASC LIMIT ? """ txn.execute(sql, (user_id, min_stream_ordering, max_stream_ordering, limit)) - return cast(List[Tuple[str, str, str, int, str, bool]], txn.fetchall()) + return cast(list[tuple[str, str, str, int, str, bool]], txn.fetchall()) push_actions = await self.db_pool.runInteraction( "get_unread_push_actions_for_user_in_range_http", get_push_actions_txn @@ -1040,7 +1037,7 @@ async def get_unread_push_actions_for_user_in_range_for_email( min_stream_ordering: int, max_stream_ordering: int, limit: int = 20, - ) -> List[EmailPushAction]: + ) -> list[EmailPushAction]: """Get a list of the most recent unread push actions for a given user, within the given stream ordering range. Called by the emailpusher @@ -1059,7 +1056,7 @@ async def get_unread_push_actions_for_user_in_range_for_email( def get_push_actions_txn( txn: LoggingTransaction, - ) -> List[Tuple[str, str, str, int, str, bool, int]]: + ) -> list[tuple[str, str, str, int, str, bool, int]]: sql = """ SELECT ep.event_id, ep.room_id, ep.thread_id, ep.stream_ordering, ep.actions, ep.highlight, e.received_ts @@ -1073,7 +1070,7 @@ def get_push_actions_txn( ORDER BY ep.stream_ordering DESC LIMIT ? """ txn.execute(sql, (user_id, min_stream_ordering, max_stream_ordering, limit)) - return cast(List[Tuple[str, str, str, int, str, bool, int]], txn.fetchall()) + return cast(list[tuple[str, str, str, int, str, bool, int]], txn.fetchall()) push_actions = await self.db_pool.runInteraction( "get_unread_push_actions_for_user_in_range_email", get_push_actions_txn @@ -1159,7 +1156,7 @@ def _get_if_maybe_push_in_range_for_user_txn(txn: LoggingTransaction) -> bool: async def add_push_actions_to_staging( self, event_id: str, - user_id_actions: Dict[str, Collection[Union[Mapping, str]]], + user_id_actions: dict[str, Collection[Union[Mapping, str]]], count_as_unread: bool, thread_id: str, ) -> None: @@ -1179,7 +1176,7 @@ async def add_push_actions_to_staging( # can be used to insert into the `event_push_actions_staging` table. def _gen_entry( user_id: str, actions: Collection[Union[Mapping, str]] - ) -> Tuple[str, str, str, int, int, int, str, int]: + ) -> tuple[str, str, str, int, int, int, str, int]: is_highlight = 1 if _action_has_highlight(actions) else 0 notif = 1 if "notify" in actions else 0 return ( @@ -1296,7 +1293,7 @@ def _find_first_stream_ordering_after_ts_txn( The stream ordering """ txn.execute("SELECT MAX(stream_ordering) FROM events") - max_stream_ordering = cast(Tuple[Optional[int]], txn.fetchone())[0] + max_stream_ordering = cast(tuple[Optional[int]], txn.fetchone())[0] if max_stream_ordering is None: return 0 @@ -1355,7 +1352,7 @@ def _find_first_stream_ordering_after_ts_txn( async def get_time_of_last_push_action_before( self, stream_ordering: int ) -> Optional[int]: - def f(txn: LoggingTransaction) -> Optional[Tuple[int]]: + def f(txn: LoggingTransaction) -> Optional[tuple[int]]: sql = """ SELECT e.received_ts FROM event_push_actions AS ep @@ -1365,7 +1362,7 @@ def f(txn: LoggingTransaction) -> Optional[Tuple[int]]: LIMIT 1 """ txn.execute(sql, (stream_ordering,)) - return cast(Optional[Tuple[int]], txn.fetchone()) + return cast(Optional[tuple[int]], txn.fetchone()) result = await self.db_pool.runInteraction( "get_time_of_last_push_action_before", f @@ -1457,7 +1454,7 @@ def _handle_new_receipts_for_notifs_txn(self, txn: LoggingTransaction) -> bool: limit, ), ) - rows = cast(List[Tuple[int, str, str, Optional[str], int]], txn.fetchall()) + rows = cast(list[tuple[int, str, str, Optional[str], int]], txn.fetchall()) # For each new read receipt we delete push actions from before it and # recalculate the summary. @@ -1469,7 +1466,7 @@ def _handle_new_receipts_for_notifs_txn(self, txn: LoggingTransaction) -> bool: continue thread_clause = "" - thread_args: Tuple = () + thread_args: tuple = () if thread_id is not None: thread_clause = "AND thread_id = ?" thread_args = (thread_id,) @@ -1654,7 +1651,7 @@ def _rotate_notifs_before_txn( # object because we might not have the same amount of rows in each of them. To do # this, we use a dict indexed on the user ID and room ID to make it easier to # populate. - summaries: Dict[Tuple[str, str, str], _EventPushSummary] = {} + summaries: dict[tuple[str, str, str], _EventPushSummary] = {} for row in txn: summaries[(row[0], row[1], row[2])] = _EventPushSummary( unread_count=row[3], @@ -1832,10 +1829,10 @@ async def get_push_actions_for_user( before: Optional[int] = None, limit: int = 50, only_highlight: bool = False, - ) -> List[UserPushAction]: + ) -> list[UserPushAction]: def f( txn: LoggingTransaction, - ) -> List[Tuple[str, str, int, int, str, bool, str, int]]: + ) -> list[tuple[str, str, int, int, str, bool, str, int]]: before_clause = "" if before: before_clause = "AND epa.stream_ordering < ?" @@ -1863,7 +1860,7 @@ def f( """ % (before_clause,) txn.execute(sql, args) return cast( - List[Tuple[str, str, int, int, str, bool, str, int]], txn.fetchall() + list[tuple[str, str, int, int, str, bool, str, int]], txn.fetchall() ) push_actions = await self.db_pool.runInteraction("get_push_actions_for_user", f) diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index b6037468b3..da9ecfbdb9 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -27,14 +27,10 @@ TYPE_CHECKING, Any, Collection, - Dict, Generator, Iterable, - List, Optional, Sequence, - Set, - Tuple, TypedDict, cast, ) @@ -129,7 +125,7 @@ class DeltaState: should e.g. be removed from `current_state_events` table. """ - to_delete: List[Tuple[str, str]] + to_delete: list[tuple[str, str]] to_insert: StateMap[str] no_longer_in_room: bool = False @@ -207,9 +203,9 @@ class SlidingSyncTableChanges: SlidingSyncMembershipSnapshotSharedInsertValues ) # List of membership to insert into `sliding_sync_membership_snapshots` - to_insert_membership_snapshots: List[SlidingSyncMembershipInfo] + to_insert_membership_snapshots: list[SlidingSyncMembershipInfo] # List of user_id to delete from `sliding_sync_membership_snapshots` - to_delete_membership_snapshots: List[str] + to_delete_membership_snapshots: list[str] @attr.s(slots=True, auto_attribs=True) @@ -226,7 +222,7 @@ class NewEventChainLinks: chain_id: int sequence_number: int - links: List[Tuple[int, int]] = attr.Factory(list) + links: list[tuple[int, int]] = attr.Factory(list) class PersistEventsStore: @@ -274,11 +270,11 @@ def __init__( async def _persist_events_and_state_updates( self, room_id: str, - events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], *, state_delta_for_room: Optional[DeltaState], - new_forward_extremities: Optional[Set[str]], - new_event_links: Dict[str, NewEventChainLinks], + new_forward_extremities: Optional[set[str]], + new_event_links: dict[str, NewEventChainLinks], use_negative_stream_ordering: bool = False, inhibit_local_membership_updates: bool = False, ) -> None: @@ -585,21 +581,21 @@ async def _calculate_sliding_sync_table_changes( ] membership_snapshot_shared_insert_values: SlidingSyncMembershipSnapshotSharedInsertValues = {} - membership_infos_to_insert_membership_snapshots: List[ + membership_infos_to_insert_membership_snapshots: list[ SlidingSyncMembershipInfo ] = [] if to_insert: - membership_event_id_to_user_id_map: Dict[str, str] = {} + membership_event_id_to_user_id_map: dict[str, str] = {} for state_key, event_id in to_insert.items(): if state_key[0] == EventTypes.Member and self.is_mine_id(state_key[1]): membership_event_id_to_user_id_map[event_id] = state_key[1] - membership_event_map: Dict[str, EventBase] = {} + membership_event_map: dict[str, EventBase] = {} # In normal event persist scenarios, we should be able to find the # membership events in the `events_and_contexts` given to us but it's # possible a state reset happened which added us to the room without a # corresponding new membership event (reset back to a previous membership). - missing_membership_event_ids: Set[str] = set() + missing_membership_event_ids: set[str] = set() for membership_event_id in membership_event_id_to_user_id_map.keys(): membership_event = event_map.get(membership_event_id) if membership_event: @@ -668,7 +664,7 @@ async def _calculate_sliding_sync_table_changes( # these state events in `events_and_contexts` since we don't generally # batch up local membership changes with other events, but it can # happen. - missing_state_event_ids: Set[str] = set() + missing_state_event_ids: set[str] = set() for state_key, event_id in current_state_ids_map.items(): event = event_map.get(event_id) if event: @@ -780,7 +776,7 @@ async def _calculate_sliding_sync_table_changes( # events in the `events_and_contexts` given to us but it's possible a state # reset happened which that reset back to a previous state. current_state_map = {} - missing_event_ids: Set[str] = set() + missing_event_ids: set[str] = set() for state_key, event_id in current_state_ids_map.items(): event = event_map.get(event_id) if event: @@ -826,7 +822,7 @@ async def _calculate_sliding_sync_table_changes( async def calculate_chain_cover_index_for_events( self, room_id: str, events: Collection[EventBase] - ) -> Dict[str, NewEventChainLinks]: + ) -> dict[str, NewEventChainLinks]: # Filter to state events, and ensure there are no duplicates. state_events = [] seen_events = set() @@ -849,7 +845,7 @@ async def calculate_chain_cover_index_for_events( def calculate_chain_cover_index_for_events_txn( self, txn: LoggingTransaction, room_id: str, state_events: Collection[EventBase] - ) -> Dict[str, NewEventChainLinks]: + ) -> dict[str, NewEventChainLinks]: # We now calculate chain ID/sequence numbers for any state events we're # persisting. We ignore out of band memberships as we're not in the room # and won't have their auth chain (we'll fix it up later if we join the @@ -905,7 +901,7 @@ def calculate_chain_cover_index_for_events_txn( event_to_auth_chain, ) - async def _get_events_which_are_prevs(self, event_ids: Iterable[str]) -> List[str]: + async def _get_events_which_are_prevs(self, event_ids: Iterable[str]) -> list[str]: """Filter the supplied list of event_ids to get those which are prev_events of existing (non-outlier/rejected) events. @@ -915,7 +911,7 @@ async def _get_events_which_are_prevs(self, event_ids: Iterable[str]) -> List[st Returns: Filtered event ids """ - results: List[str] = [] + results: list[str] = [] def _get_events_which_are_prevs_txn( txn: LoggingTransaction, batch: Collection[str] @@ -946,7 +942,7 @@ def _get_events_which_are_prevs_txn( return results - async def _get_prevs_before_rejected(self, event_ids: Iterable[str]) -> Set[str]: + async def _get_prevs_before_rejected(self, event_ids: Iterable[str]) -> set[str]: """Get soft-failed ancestors to remove from the extremities. Given a set of events, find all those that have been soft-failed or @@ -967,7 +963,7 @@ async def _get_prevs_before_rejected(self, event_ids: Iterable[str]) -> Set[str] # The set of event_ids to return. This includes all soft-failed events # and their prev events. - existing_prevs: Set[str] = set() + existing_prevs: set[str] = set() def _get_prevs_before_rejected_txn( txn: LoggingTransaction, batch: Collection[str] @@ -1016,11 +1012,11 @@ def _persist_events_txn( txn: LoggingTransaction, *, room_id: str, - events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], inhibit_local_membership_updates: bool, state_delta_for_room: Optional[DeltaState], - new_forward_extremities: Optional[Set[str]], - new_event_links: Dict[str, NewEventChainLinks], + new_forward_extremities: Optional[set[str]], + new_event_links: dict[str, NewEventChainLinks], sliding_sync_table_changes: Optional[SlidingSyncTableChanges], ) -> None: """Insert some number of room events into the necessary database tables. @@ -1178,8 +1174,8 @@ def _persist_events_txn( def _persist_event_auth_chain_txn( self, txn: LoggingTransaction, - events: List[EventBase], - new_event_links: Dict[str, NewEventChainLinks], + events: list[EventBase], + new_event_links: dict[str, NewEventChainLinks], ) -> None: if new_event_links: self._persist_chain_cover_index(txn, self.db_pool, new_event_links) @@ -1212,9 +1208,9 @@ def _add_chain_cover_index( txn: LoggingTransaction, db_pool: DatabasePool, event_chain_id_gen: SequenceGenerator, - event_to_room_id: Dict[str, str], - event_to_types: Dict[str, Tuple[str, str]], - event_to_auth_chain: Dict[str, StrCollection], + event_to_room_id: dict[str, str], + event_to_types: dict[str, tuple[str, str]], + event_to_auth_chain: dict[str, StrCollection], ) -> None: """Calculate and persist the chain cover index for the given events. @@ -1241,10 +1237,10 @@ def _calculate_chain_cover_index( txn: LoggingTransaction, db_pool: DatabasePool, event_chain_id_gen: SequenceGenerator, - event_to_room_id: Dict[str, str], - event_to_types: Dict[str, Tuple[str, str]], - event_to_auth_chain: Dict[str, StrCollection], - ) -> Dict[str, NewEventChainLinks]: + event_to_room_id: dict[str, str], + event_to_types: dict[str, tuple[str, str]], + event_to_auth_chain: dict[str, StrCollection], + ) -> dict[str, NewEventChainLinks]: """Calculate the chain cover index for the given events. Args: @@ -1259,7 +1255,7 @@ def _calculate_chain_cover_index( """ # Map from event ID to chain ID/sequence number. - chain_map: Dict[str, Tuple[int, int]] = {} + chain_map: dict[str, tuple[int, int]] = {} # Set of event IDs to calculate chain ID/seq numbers for. events_to_calc_chain_id_for = set(event_to_room_id) @@ -1268,7 +1264,7 @@ def _calculate_chain_cover_index( # we're looking at. These should just be out of band memberships, where # we didn't have the auth chain when we first persisted. auth_chain_to_calc_rows = cast( - List[Tuple[str, str, str]], + list[tuple[str, str, str]], db_pool.simple_select_many_txn( txn, table="event_auth_chain_to_calculate", @@ -1490,7 +1486,7 @@ def _persist_chain_cover_index( cls, txn: LoggingTransaction, db_pool: DatabasePool, - new_event_links: Dict[str, NewEventChainLinks], + new_event_links: dict[str, NewEventChainLinks], ) -> None: db_pool.simple_insert_many_txn( txn, @@ -1536,12 +1532,12 @@ def _allocate_chain_ids( txn: LoggingTransaction, db_pool: DatabasePool, event_chain_id_gen: SequenceGenerator, - event_to_room_id: Dict[str, str], - event_to_types: Dict[str, Tuple[str, str]], - event_to_auth_chain: Dict[str, StrCollection], - events_to_calc_chain_id_for: Set[str], - chain_map: Dict[str, Tuple[int, int]], - ) -> Dict[str, Tuple[int, int]]: + event_to_room_id: dict[str, str], + event_to_types: dict[str, tuple[str, str]], + event_to_auth_chain: dict[str, StrCollection], + events_to_calc_chain_id_for: set[str], + chain_map: dict[str, tuple[int, int]], + ) -> dict[str, tuple[int, int]]: """Allocates, but does not persist, chain ID/sequence numbers for the events in `events_to_calc_chain_id_for`. (c.f. _add_chain_cover_index for info on args) @@ -1573,8 +1569,8 @@ def _allocate_chain_ids( # new chain if the sequence number has already been allocated. # - existing_chains: Set[int] = set() - tree: List[Tuple[str, Optional[str]]] = [] + existing_chains: set[int] = set() + tree: list[tuple[str, Optional[str]]] = [] # We need to do this in a topologically sorted order as we want to # generate chain IDs/sequence numbers of an event's auth events before @@ -1604,7 +1600,7 @@ def _allocate_chain_ids( ) txn.execute(sql % (clause,), args) - chain_to_max_seq_no: Dict[Any, int] = {row[0]: row[1] for row in txn} + chain_to_max_seq_no: dict[Any, int] = {row[0]: row[1] for row in txn} # Allocate the new events chain ID/sequence numbers. # @@ -1614,8 +1610,8 @@ def _allocate_chain_ids( # number of new chain IDs in one call, replacing all temporary # objects with real allocated chain IDs. - unallocated_chain_ids: Set[object] = set() - new_chain_tuples: Dict[str, Tuple[Any, int]] = {} + unallocated_chain_ids: set[object] = set() + new_chain_tuples: dict[str, tuple[Any, int]] = {} for event_id, auth_event_id in tree: # If we reference an auth_event_id we fetch the allocated chain ID, # either from the existing `chain_map` or the newly generated @@ -1626,7 +1622,7 @@ def _allocate_chain_ids( if not existing_chain_id: existing_chain_id = chain_map[auth_event_id] - new_chain_tuple: Optional[Tuple[Any, int]] = None + new_chain_tuple: Optional[tuple[Any, int]] = None if existing_chain_id: # We found a chain ID/sequence number candidate, check its # not already taken. @@ -1653,7 +1649,7 @@ def _allocate_chain_ids( ) # Map from potentially temporary chain ID to real chain ID - chain_id_to_allocated_map: Dict[Any, int] = dict( + chain_id_to_allocated_map: dict[Any, int] = dict( zip(unallocated_chain_ids, newly_allocated_chain_ids) ) chain_id_to_allocated_map.update((c, c) for c in existing_chains) @@ -1666,12 +1662,12 @@ def _allocate_chain_ids( def _persist_transaction_ids_txn( self, txn: LoggingTransaction, - events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], ) -> None: """Persist the mapping from transaction IDs to event IDs (if defined).""" inserted_ts = self._clock.time_msec() - to_insert_device_id: List[Tuple[str, str, str, str, str, int]] = [] + to_insert_device_id: list[tuple[str, str, str, str, str, int]] = [] for event, _ in events_and_contexts: txn_id = getattr(event.internal_metadata, "txn_id", None) device_id = getattr(event.internal_metadata, "device_id", None) @@ -1899,7 +1895,7 @@ def _update_current_state_txn( sliding_sync_table_changes.joined_room_updates.values() ) - args: List[Any] = [ + args: list[Any] = [ room_id, room_id, sliding_sync_table_changes.joined_room_bump_stamp_to_fully_insert, @@ -2316,7 +2312,7 @@ def _update_sliding_sync_tables_with_new_persisted_events_txn( self, txn: LoggingTransaction, room_id: str, - events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], ) -> None: """ Update the latest `event_stream_ordering`/`bump_stamp` columns in the @@ -2427,7 +2423,7 @@ def _update_forward_extremities_txn( self, txn: LoggingTransaction, room_id: str, - new_forward_extremities: Set[str], + new_forward_extremities: set[str], max_stream_order: int, ) -> None: self.db_pool.simple_delete_txn( @@ -2456,8 +2452,8 @@ def _update_forward_extremities_txn( @classmethod def _filter_events_and_contexts_for_duplicates( - cls, events_and_contexts: List[EventPersistencePair] - ) -> List[EventPersistencePair]: + cls, events_and_contexts: list[EventPersistencePair] + ) -> list[EventPersistencePair]: """Ensure that we don't have the same event twice. Pick the earliest non-outlier if there is one, else the earliest one. @@ -2486,7 +2482,7 @@ def _update_room_depths_txn( self, txn: LoggingTransaction, room_id: str, - events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], ) -> None: """Update min_depth for each room @@ -2528,8 +2524,8 @@ def _update_room_depths_txn( def _update_outliers_txn( self, txn: LoggingTransaction, - events_and_contexts: List[EventPersistencePair], - ) -> List[EventPersistencePair]: + events_and_contexts: list[EventPersistencePair], + ) -> list[EventPersistencePair]: """Update any outliers with new event info. This turns outliers into ex-outliers (unless the new event was rejected), and @@ -2547,7 +2543,7 @@ def _update_outliers_txn( a room that has been un-partial stated. """ rows = cast( - List[Tuple[str, bool]], + list[tuple[str, bool]], self.db_pool.simple_select_many_txn( txn, "events", @@ -2740,8 +2736,8 @@ def event_dict(event: EventBase) -> JsonDict: def _store_rejected_events_txn( self, txn: LoggingTransaction, - events_and_contexts: List[EventPersistencePair], - ) -> List[EventPersistencePair]: + events_and_contexts: list[EventPersistencePair], + ) -> list[EventPersistencePair]: """Add rows to the 'rejections' table for received events which were rejected @@ -2768,8 +2764,8 @@ def _update_metadata_tables_txn( self, txn: LoggingTransaction, *, - events_and_contexts: List[EventPersistencePair], - all_events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], + all_events_and_contexts: list[EventPersistencePair], inhibit_local_membership_updates: bool = False, ) -> None: """Update all the miscellaneous tables for new events @@ -2863,9 +2859,9 @@ def _update_metadata_tables_txn( def _add_to_cache( self, txn: LoggingTransaction, - events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], ) -> None: - to_prefill: List[EventCacheEntry] = [] + to_prefill: list[EventCacheEntry] = [] ev_map = {e.event_id: e for e, _ in events_and_contexts} if not ev_map: @@ -2925,7 +2921,7 @@ def insert_labels_for_event_txn( self, txn: LoggingTransaction, event_id: str, - labels: List[str], + labels: list[str], room_id: str, topological_ordering: int, ) -> None: @@ -2967,7 +2963,7 @@ def _insert_event_expiry_txn( def _store_room_members_txn( self, txn: LoggingTransaction, - events: List[EventBase], + events: list[EventBase], *, inhibit_local_membership_updates: bool = False, ) -> None: @@ -3336,8 +3332,8 @@ def store_event_search_txn( def _set_push_actions_for_event_and_users_txn( self, txn: LoggingTransaction, - events_and_contexts: List[EventPersistencePair], - all_events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], + all_events_and_contexts: list[EventPersistencePair], ) -> None: """Handles moving push actions from staging table to main event_push_actions table for all events in `events_and_contexts`. @@ -3517,7 +3513,7 @@ def _update_min_depth_for_room_txn( ) def _handle_mult_prev_events( - self, txn: LoggingTransaction, events: List[EventBase] + self, txn: LoggingTransaction, events: list[EventBase] ) -> None: """ For the given event, update the event edges table and forward and @@ -3535,7 +3531,7 @@ def _handle_mult_prev_events( self._update_backward_extremeties(txn, events) def _update_backward_extremeties( - self, txn: LoggingTransaction, events: List[EventBase] + self, txn: LoggingTransaction, events: list[EventBase] ) -> None: """Updates the event_backward_extremities tables based on the new/updated events being persisted. @@ -3637,16 +3633,16 @@ class _LinkMap: # Stores the set of links as nested maps: source chain ID -> target chain ID # -> source sequence number -> target sequence number. - maps: Dict[int, Dict[int, Dict[int, int]]] = attr.Factory(dict) + maps: dict[int, dict[int, dict[int, int]]] = attr.Factory(dict) # Stores the links that have been added (with new set to true), as tuples of # `(source chain ID, source sequence no, target chain ID, target sequence no.)` - additions: Set[Tuple[int, int, int, int]] = attr.Factory(set) + additions: set[tuple[int, int, int, int]] = attr.Factory(set) def add_link( self, - src_tuple: Tuple[int, int], - target_tuple: Tuple[int, int], + src_tuple: tuple[int, int], + target_tuple: tuple[int, int], new: bool = True, ) -> bool: """Add a new link between two chains, ensuring no redundant links are added. @@ -3701,7 +3697,7 @@ def add_link( current_links[src_seq] = target_seq return True - def get_additions(self) -> Generator[Tuple[int, int, int, int], None, None]: + def get_additions(self) -> Generator[tuple[int, int, int, int], None, None]: """Gets any newly added links. Yields: @@ -3715,8 +3711,8 @@ def get_additions(self) -> Generator[Tuple[int, int, int, int], None, None]: def exists_path_from( self, - src_tuple: Tuple[int, int], - target_tuple: Tuple[int, int], + src_tuple: tuple[int, int], + target_tuple: tuple[int, int], ) -> bool: """Checks if there is a path between the source chain ID/sequence and target chain ID/sequence. @@ -3728,7 +3724,7 @@ def exists_path_from( return target_seq <= src_seq # We have to graph traverse the links to check for indirect paths. - visited_chains: Dict[int, int] = collections.Counter() + visited_chains: dict[int, int] = collections.Counter() search = [(src_chain, src_seq)] while search: chain, seq = search.pop() diff --git a/synapse/storage/databases/main/events_bg_updates.py b/synapse/storage/databases/main/events_bg_updates.py index 37dd8e48d5..637b9104c0 100644 --- a/synapse/storage/databases/main/events_bg_updates.py +++ b/synapse/storage/databases/main/events_bg_updates.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, Optional, cast import attr @@ -97,7 +97,7 @@ class _CalculateChainCover: # Map from room_id to last depth/stream processed for each room that we have # processed all events for (i.e. the rooms we can flip the # `has_auth_chain_index` for) - finished_room_map: Dict[str, Tuple[int, int]] + finished_room_map: dict[str, tuple[int, int]] @attr.s(slots=True, frozen=True, auto_attribs=True) @@ -451,7 +451,7 @@ def reindex_search_txn(txn: LoggingTransaction) -> int: chunks = [event_ids[i : i + 100] for i in range(0, len(event_ids), 100)] for chunk in chunks: ev_rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], self.db_pool.simple_select_many_txn( txn, table="event_json", @@ -527,8 +527,8 @@ def _cleanup_extremities_bg_update_txn(txn: LoggingTransaction) -> int: # The set of extremity event IDs that we're checking this round original_set = set() - # A dict[str, Set[str]] of event ID to their prev events. - graph: Dict[str, Set[str]] = {} + # A dict[str, set[str]] of event ID to their prev events. + graph: dict[str, set[str]] = {} # The set of descendants of the original set that are not rejected # nor soft-failed. Ancestors of these events should be removed @@ -647,7 +647,7 @@ def _cleanup_extremities_bg_update_txn(txn: LoggingTransaction) -> int: if deleted: # We now need to invalidate the caches of these rooms rows = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_many_txn( txn, table="events", @@ -851,7 +851,7 @@ async def _rejected_events_metadata(self, progress: dict, batch_size: int) -> in def get_rejected_events( txn: Cursor, - ) -> List[Tuple[str, str, JsonDict, bool, bool]]: + ) -> list[tuple[str, str, JsonDict, bool, bool]]: # Fetch rejected event json, their room version and whether we have # inserted them into the state_events or auth_events tables. # @@ -883,7 +883,7 @@ def get_rejected_events( ) return cast( - List[Tuple[str, str, JsonDict, bool, bool]], + list[tuple[str, str, JsonDict, bool, bool]], [(row[0], row[1], db_to_json(row[2]), row[3], row[4]) for row in txn], ) @@ -1126,7 +1126,7 @@ def _calculate_chain_cover_txn( # We also need to fetch the auth events for them. auth_events = cast( - List[Tuple[str, str]], + list[tuple[str, str]], self.db_pool.simple_select_many_txn( txn, table="event_auth", @@ -1137,7 +1137,7 @@ def _calculate_chain_cover_txn( ), ) - event_to_auth_chain: Dict[str, List[str]] = {} + event_to_auth_chain: dict[str, list[str]] = {} for event_id, auth_id in auth_events: event_to_auth_chain.setdefault(event_id, []).append(auth_id) @@ -1151,7 +1151,7 @@ def _calculate_chain_cover_txn( self.event_chain_id_gen, event_to_room_id, event_to_types, - cast(Dict[str, StrCollection], event_to_auth_chain), + cast(dict[str, StrCollection], event_to_auth_chain), ) return _CalculateChainCover( @@ -1256,7 +1256,7 @@ def _event_arbitrary_relations_txn(txn: LoggingTransaction) -> int: results = list(txn) # (event_id, parent_id, rel_type) for each relation - relations_to_insert: List[Tuple[str, str, str, str]] = [] + relations_to_insert: list[tuple[str, str, str, str]] = [] for event_id, event_json_raw in results: try: event_json = db_to_json(event_json_raw) @@ -1636,7 +1636,7 @@ async def _sliding_sync_joined_rooms_bg_update( # We don't need to fetch any progress state because we just grab the next N # events in `sliding_sync_joined_rooms_to_recalculate` - def _get_rooms_to_update_txn(txn: LoggingTransaction) -> List[Tuple[str]]: + def _get_rooms_to_update_txn(txn: LoggingTransaction) -> list[tuple[str]]: """ Returns: A list of room ID's to update along with the progress value @@ -1658,7 +1658,7 @@ def _get_rooms_to_update_txn(txn: LoggingTransaction) -> List[Tuple[str]]: (batch_size,), ) - rooms_to_update_rows = cast(List[Tuple[str]], txn.fetchall()) + rooms_to_update_rows = cast(list[tuple[str]], txn.fetchall()) return rooms_to_update_rows @@ -1674,9 +1674,9 @@ def _get_rooms_to_update_txn(txn: LoggingTransaction) -> List[Tuple[str]]: return 0 # Map from room_id to insert/update state values in the `sliding_sync_joined_rooms` table. - joined_room_updates: Dict[str, SlidingSyncStateInsertValues] = {} + joined_room_updates: dict[str, SlidingSyncStateInsertValues] = {} # Map from room_id to stream_ordering/bump_stamp, etc values - joined_room_stream_ordering_updates: Dict[ + joined_room_stream_ordering_updates: dict[ str, _JoinedRoomStreamOrderingUpdate ] = {} # As long as we get this value before we fetch the current state, we can use it @@ -1886,8 +1886,8 @@ async def _sliding_sync_membership_snapshots_bg_update( def _find_memberships_to_update_txn( txn: LoggingTransaction, - ) -> List[ - Tuple[ + ) -> list[ + tuple[ str, Optional[str], Optional[str], @@ -1979,8 +1979,8 @@ def _find_memberships_to_update_txn( raise Exception("last_event_stream_ordering should not be None") memberships_to_update_rows = cast( - List[ - Tuple[ + list[ + tuple[ str, Optional[str], Optional[str], @@ -2023,7 +2023,7 @@ def _find_memberships_to_update_txn( def _find_previous_invite_or_knock_membership_txn( txn: LoggingTransaction, room_id: str, user_id: str, event_id: str - ) -> Optional[Tuple[str, str]]: + ) -> Optional[tuple[str, str]]: # Find the previous invite/knock event before the leave event # # Here are some notes on how we landed on this query: @@ -2085,11 +2085,11 @@ def _find_previous_invite_or_knock_membership_txn( return event_id, membership # Map from (room_id, user_id) to ... - to_insert_membership_snapshots: Dict[ - Tuple[str, str], SlidingSyncMembershipSnapshotSharedInsertValues + to_insert_membership_snapshots: dict[ + tuple[str, str], SlidingSyncMembershipSnapshotSharedInsertValues ] = {} - to_insert_membership_infos: Dict[ - Tuple[str, str], SlidingSyncMembershipInfoWithEventPos + to_insert_membership_infos: dict[ + tuple[str, str], SlidingSyncMembershipInfoWithEventPos ] = {} for ( room_id, @@ -2510,7 +2510,7 @@ def _txn( ) memberships_to_update_rows = cast( - List[Tuple[str, str, str, int, int]], + list[tuple[str, str, str, int, int]], txn.fetchall(), ) if not memberships_to_update_rows: @@ -2519,9 +2519,9 @@ def _txn( # Assemble the values to update # # (room_id, user_id) - key_values: List[Tuple[str, str]] = [] + key_values: list[tuple[str, str]] = [] # (forgotten,) - value_values: List[Tuple[int]] = [] + value_values: list[tuple[int]] = [] for ( room_id, user_id, @@ -2585,7 +2585,7 @@ async def fixup_max_depth_cap_bg_update( room_id_bound = progress.get("room_id", "") - def redo_max_depth_bg_update_txn(txn: LoggingTransaction) -> Tuple[bool, int]: + def redo_max_depth_bg_update_txn(txn: LoggingTransaction) -> tuple[bool, int]: txn.execute( """ SELECT room_id, room_version FROM rooms @@ -2597,7 +2597,7 @@ def redo_max_depth_bg_update_txn(txn: LoggingTransaction) -> Tuple[bool, int]: ) # Find the next room ID to process, with a relevant room version. - room_ids: List[str] = [] + room_ids: list[str] = [] max_room_id: Optional[str] = None for room_id, room_version_str in txn: max_room_id = room_id @@ -2704,7 +2704,7 @@ def _resolve_stale_data_in_sliding_sync_joined_rooms_table( # If we have nothing written to the `sliding_sync_joined_rooms` table, there is # nothing to clean up - row = cast(Optional[Tuple[int]], txn.fetchone()) + row = cast(Optional[tuple[int]], txn.fetchone()) max_stream_ordering_sliding_sync_joined_rooms_table = None depends_on = None if row is not None: @@ -2830,7 +2830,7 @@ def _resolve_stale_data_in_sliding_sync_membership_snapshots_table( # If we have nothing written to the `sliding_sync_membership_snapshots` table, # there is nothing to clean up - row = cast(Optional[Tuple[int]], txn.fetchone()) + row = cast(Optional[tuple[int]], txn.fetchone()) max_stream_ordering_sliding_sync_membership_snapshots_table = None if row is not None: (max_stream_ordering_sliding_sync_membership_snapshots_table,) = row diff --git a/synapse/storage/databases/main/events_forward_extremities.py b/synapse/storage/databases/main/events_forward_extremities.py index bd763885d7..d43fb443fd 100644 --- a/synapse/storage/databases/main/events_forward_extremities.py +++ b/synapse/storage/databases/main/events_forward_extremities.py @@ -20,7 +20,7 @@ # import logging -from typing import List, Optional, Tuple, cast +from typing import Optional, cast from synapse.api.errors import SynapseError from synapse.storage.database import LoggingTransaction @@ -98,7 +98,7 @@ def delete_forward_extremities_for_room_txn(txn: LoggingTransaction) -> int: async def get_forward_extremities_for_room( self, room_id: str - ) -> List[Tuple[str, int, int, Optional[int]]]: + ) -> list[tuple[str, int, int, Optional[int]]]: """ Get list of forward extremities for a room. @@ -108,7 +108,7 @@ async def get_forward_extremities_for_room( def get_forward_extremities_for_room_txn( txn: LoggingTransaction, - ) -> List[Tuple[str, int, int, Optional[int]]]: + ) -> list[tuple[str, int, int, Optional[int]]]: sql = """ SELECT event_id, state_group, depth, received_ts FROM event_forward_extremities @@ -118,7 +118,7 @@ def get_forward_extremities_for_room_txn( """ txn.execute(sql, (room_id,)) - return cast(List[Tuple[str, int, int, Optional[int]]], txn.fetchall()) + return cast(list[tuple[str, int, int, Optional[int]]], txn.fetchall()) return await self.db_pool.runInteraction( "get_forward_extremities_for_room", diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 4f9a1a4f78..005f75a2d8 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -27,15 +27,11 @@ TYPE_CHECKING, Any, Collection, - Dict, Iterable, - List, Literal, Mapping, MutableMapping, Optional, - Set, - Tuple, cast, overload, ) @@ -191,7 +187,7 @@ class _EventRow: format_version: Optional[int] room_version_id: Optional[str] rejected_reason: Optional[str] - redactions: List[str] + redactions: list[str] outlier: bool @@ -286,7 +282,7 @@ def __init__( 5 * 60 * 1000, ) - self._get_event_cache: AsyncLruCache[Tuple[str], EventCacheEntry] = ( + self._get_event_cache: AsyncLruCache[tuple[str], EventCacheEntry] = ( AsyncLruCache( clock=hs.get_clock(), server_name=self.server_name, @@ -300,8 +296,8 @@ def __init__( # Map from event ID to a deferred that will result in a map from event # ID to cache entry. Note that the returned dict may not have the # requested event in it if the event isn't in the DB. - self._current_event_fetches: Dict[ - str, ObservableDeferred[Dict[str, EventCacheEntry]] + self._current_event_fetches: dict[ + str, ObservableDeferred[dict[str, EventCacheEntry]] ] = {} # We keep track of the events we have currently loaded in memory so that @@ -311,8 +307,8 @@ def __init__( self._event_ref: MutableMapping[str, EventBase] = weakref.WeakValueDictionary() self._event_fetch_lock = threading.Condition() - self._event_fetch_list: List[ - Tuple[Iterable[str], "defer.Deferred[Dict[str, _EventRow]]"] + self._event_fetch_list: list[ + tuple[Iterable[str], "defer.Deferred[dict[str, _EventRow]]"] ] = [] self._event_fetch_ongoing = 0 event_fetch_ongoing_gauge.labels(**{SERVER_NAME_LABEL: self.server_name}).set( @@ -323,7 +319,7 @@ def __init__( # the DataStore and PersistEventStore. def get_chain_id_txn(txn: Cursor) -> int: txn.execute("SELECT COALESCE(max(chain_id), 0) FROM event_auth_chains") - return cast(Tuple[int], txn.fetchone())[0] + return cast(tuple[int], txn.fetchone())[0] self.event_chain_id_gen = build_sequence_generator( db_conn, @@ -387,7 +383,7 @@ def get_un_partial_stated_events_token(self, instance_name: str) -> int: async def get_un_partial_stated_events_from_stream( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, Tuple[str, bool]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str, bool]]], int, bool]: """Get updates for the un-partial-stated events replication stream. Args: @@ -414,7 +410,7 @@ async def get_un_partial_stated_events_from_stream( def get_un_partial_stated_events_from_stream_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, Tuple[str, bool]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str, bool]]], int, bool]: sql = """ SELECT stream_id, event_id, rejection_status_changed FROM un_partial_stated_event_stream @@ -585,7 +581,7 @@ async def get_events( redact_behaviour: EventRedactBehaviour = EventRedactBehaviour.redact, get_prev_content: bool = False, allow_rejected: bool = False, - ) -> Dict[str, EventBase]: + ) -> dict[str, EventBase]: """Get events from the database Unknown events will be omitted from the response. @@ -633,7 +629,7 @@ async def get_events_as_list( redact_behaviour: EventRedactBehaviour = EventRedactBehaviour.redact, get_prev_content: bool = False, allow_rejected: bool = False, - ) -> List[EventBase]: + ) -> list[EventBase]: """Get events from the database and return in a list in the same order as given by `event_ids` arg. @@ -792,7 +788,7 @@ async def get_unredacted_events_from_cache_or_db( self, event_ids: Collection[str], allow_rejected: bool = False, - ) -> Dict[str, EventCacheEntry]: + ) -> dict[str, EventCacheEntry]: """Fetch a bunch of events from the cache or the database. Note that the events pulled by this function will not have any redactions @@ -836,9 +832,9 @@ async def get_unredacted_events_from_cache_or_db( # avoid extraneous work (if we don't do this we can end up in a n^2 mode # when we wait on the same Deferred N times, then try and merge the # same dict into itself N times). - already_fetching_ids: Set[str] = set() - already_fetching_deferreds: Set[ - ObservableDeferred[Dict[str, EventCacheEntry]] + already_fetching_ids: set[str] = set() + already_fetching_deferreds: set[ + ObservableDeferred[dict[str, EventCacheEntry]] ] = set() for event_id in missing_events_ids: @@ -853,7 +849,7 @@ async def get_unredacted_events_from_cache_or_db( if missing_events_ids: - async def get_missing_events_from_cache_or_db() -> Dict[ + async def get_missing_events_from_cache_or_db() -> dict[ str, EventCacheEntry ]: """Fetches the events in `missing_event_ids` from the database. @@ -869,7 +865,7 @@ async def get_missing_events_from_cache_or_db() -> Dict[ # to all the events we pulled from the DB (this will result in this # function returning more events than requested, but that can happen # already due to `_get_events_from_db`). - fetching_deferred: ObservableDeferred[Dict[str, EventCacheEntry]] = ( + fetching_deferred: ObservableDeferred[dict[str, EventCacheEntry]] = ( ObservableDeferred(defer.Deferred(), consumeErrors=True) ) for event_id in missing_events_ids: @@ -908,7 +904,7 @@ async def get_missing_events_from_cache_or_db() -> Dict[ # We must allow the database fetch to complete in the presence of # cancellations, since multiple `_get_events_from_cache_or_db` calls can # reuse the same fetch. - missing_events: Dict[str, EventCacheEntry] = await delay_cancellation( + missing_events: dict[str, EventCacheEntry] = await delay_cancellation( get_missing_events_from_cache_or_db() ) event_entry_map.update(missing_events) @@ -999,7 +995,7 @@ def _invalidate_async_get_event_cache_room_id(self, room_id: str) -> None: async def _get_events_from_cache( self, events: Iterable[str], update_metrics: bool = True - ) -> Dict[str, EventCacheEntry]: + ) -> dict[str, EventCacheEntry]: """Fetch events from the caches, both in memory and any external. May return rejected events. @@ -1025,7 +1021,7 @@ async def _get_events_from_cache( @trace async def _get_events_from_external_cache( self, events: Collection[str], update_metrics: bool = True - ) -> Dict[str, EventCacheEntry]: + ) -> dict[str, EventCacheEntry]: """Fetch events from any configured external cache. May return rejected events. @@ -1051,7 +1047,7 @@ async def _get_events_from_external_cache( def _get_events_from_local_cache( self, events: Iterable[str], update_metrics: bool = True - ) -> Dict[str, EventCacheEntry]: + ) -> dict[str, EventCacheEntry]: """Fetch events from the local, in memory, caches. May return rejected events. @@ -1095,7 +1091,7 @@ async def get_stripped_room_state_from_event_context( context: EventContext, state_keys_to_include: StateFilter, membership_user_id: Optional[str] = None, - ) -> List[JsonDict]: + ) -> list[JsonDict]: """ Retrieve the stripped state from a room, given an event context to retrieve state from as well as the state types to include. Optionally, include the membership @@ -1257,7 +1253,7 @@ def _fetch_loop(self, conn: LoggingDatabaseConnection) -> None: def _fetch_event_list( self, conn: LoggingDatabaseConnection, - event_list: List[Tuple[Iterable[str], "defer.Deferred[Dict[str, _EventRow]]"]], + event_list: list[tuple[Iterable[str], "defer.Deferred[dict[str, _EventRow]]"]], ) -> None: """Handle a load of requests from the _event_fetch_list queue @@ -1312,7 +1308,7 @@ def fire_errback(exc: Exception) -> None: @trace async def _get_events_from_db( self, event_ids: Collection[str] - ) -> Dict[str, EventCacheEntry]: + ) -> dict[str, EventCacheEntry]: """Fetch a bunch of events from the database. May return rejected events. @@ -1333,8 +1329,8 @@ async def _get_events_from_db( str(len(event_ids)), ) - fetched_event_ids: Set[str] = set() - fetched_events: Dict[str, _EventRow] = {} + fetched_event_ids: set[str] = set() + fetched_events: dict[str, _EventRow] = {} @trace async def _fetch_event_ids_and_get_outstanding_redactions( @@ -1351,7 +1347,7 @@ async def _fetch_event_ids_and_get_outstanding_redactions( row_map = await self._enqueue_events(event_ids_to_fetch) # we need to recursively fetch any redactions of those events - redaction_ids: Set[str] = set() + redaction_ids: set[str] = set() for event_id in event_ids_to_fetch: row = row_map.get(event_id) fetched_event_ids.add(event_id) @@ -1378,7 +1374,7 @@ async def _fetch_event_ids_and_get_outstanding_redactions( ) # build a map from event_id to EventBase - event_map: Dict[str, EventBase] = {} + event_map: dict[str, EventBase] = {} for event_id, row in fetched_events.items(): assert row.event_id == event_id @@ -1491,7 +1487,7 @@ async def _fetch_event_ids_and_get_outstanding_redactions( # finally, we can decide whether each one needs redacting, and build # the cache entries. - result_map: Dict[str, EventCacheEntry] = {} + result_map: dict[str, EventCacheEntry] = {} for event_id, original_ev in event_map.items(): redactions = fetched_events[event_id].redactions redacted_event = self._maybe_redact_event_row( @@ -1511,7 +1507,7 @@ async def _fetch_event_ids_and_get_outstanding_redactions( return result_map - async def _enqueue_events(self, events: Collection[str]) -> Dict[str, _EventRow]: + async def _enqueue_events(self, events: Collection[str]) -> dict[str, _EventRow]: """Fetches events from the database using the _event_fetch_list. This allows batch and bulk fetching of events - it allows us to fetch events without having to create a new transaction for each request for events. @@ -1524,7 +1520,7 @@ async def _enqueue_events(self, events: Collection[str]) -> Dict[str, _EventRow] that weren't requested. """ - events_d: "defer.Deferred[Dict[str, _EventRow]]" = defer.Deferred() + events_d: "defer.Deferred[dict[str, _EventRow]]" = defer.Deferred() with self._event_fetch_lock: self._event_fetch_list.append((events, events_d)) self._event_fetch_lock.notify() @@ -1540,7 +1536,7 @@ async def _enqueue_events(self, events: Collection[str]) -> Dict[str, _EventRow] def _fetch_event_rows( self, txn: LoggingTransaction, event_ids: Iterable[str] - ) -> Dict[str, _EventRow]: + ) -> dict[str, _EventRow]: """Fetch event rows from the database Events which are not found are omitted from the result. @@ -1607,7 +1603,7 @@ def _fetch_event_rows( # check for MSC4932 redactions to_check = [] - events: List[_EventRow] = [] + events: list[_EventRow] = [] for e in evs: event = event_dict.get(e) if not event: @@ -1656,7 +1652,7 @@ def _maybe_redact_event_row( self, original_ev: EventBase, redactions: Iterable[str], - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], ) -> Optional[EventBase]: """Given an event object and a list of possible redacting event ids, determine whether to honour any of those redactions and if so return a redacted @@ -1727,12 +1723,12 @@ def _maybe_redact_event_row( # no valid redaction found for this event return None - async def have_events_in_timeline(self, event_ids: Iterable[str]) -> Set[str]: + async def have_events_in_timeline(self, event_ids: Iterable[str]) -> set[str]: """Given a list of event ids, check if we have already processed and stored them as non outliers. """ rows = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_many_batch( table="events", retcols=("event_id",), @@ -1749,7 +1745,7 @@ async def have_events_in_timeline(self, event_ids: Iterable[str]) -> Set[str]: @tag_args async def have_seen_events( self, room_id: str, event_ids: Iterable[str] - ) -> Set[str]: + ) -> set[str]: """Given a list of event ids, check if we have already processed them. The room_id is only used to structure the cache (so that it can later be @@ -1768,7 +1764,7 @@ async def have_seen_events( # we break it down. However, each batch requires its own index scan, so we make # the batches as big as possible. - results: Set[str] = set() + results: set[str] = set() for event_ids_chunk in batch_iter(event_ids, 500): events_seen_dict = await self._have_seen_events_dict( room_id, event_ids_chunk @@ -1798,7 +1794,7 @@ async def _have_seen_events_dict( # not being invalidated when purging events from a room. The optimisation can # be re-added after https://github.com/matrix-org/synapse/issues/13476 - def have_seen_events_txn(txn: LoggingTransaction) -> Dict[str, bool]: + def have_seen_events_txn(txn: LoggingTransaction) -> dict[str, bool]: # we deliberately do *not* query the database for room_id, to make the # query an index-only lookup on `events_event_id_key`. # @@ -1850,7 +1846,7 @@ async def get_current_state_event_counts(self, room_id: str) -> int: room_id, ) - async def get_room_complexity(self, room_id: str) -> Dict[str, float]: + async def get_room_complexity(self, room_id: str) -> dict[str, float]: """ Get a rough approximation of the complexity of the room. This is used by remote servers to decide whether they wish to join the room or not. @@ -1873,7 +1869,7 @@ async def get_room_complexity(self, room_id: str) -> Dict[str, float]: async def get_all_new_forward_event_rows( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> List[Tuple[int, str, str, str, str, str, str, str, bool, bool]]: + ) -> list[tuple[int, str, str, str, str, str, str, str, bool, bool]]: """Returns new events, for the Events replication stream Args: @@ -1889,7 +1885,7 @@ async def get_all_new_forward_event_rows( def get_all_new_forward_event_rows( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str, str, str, str, str, str, bool, bool]]: + ) -> list[tuple[int, str, str, str, str, str, str, str, bool, bool]]: sql = ( "SELECT e.stream_ordering, e.event_id, e.room_id, e.type," " se.state_key, redacts, relates_to_id, membership, rejections.reason IS NOT NULL," @@ -1907,7 +1903,7 @@ def get_all_new_forward_event_rows( ) txn.execute(sql, (last_id, current_id, instance_name, limit)) return cast( - List[Tuple[int, str, str, str, str, str, str, str, bool, bool]], + list[tuple[int, str, str, str, str, str, str, str, bool, bool]], txn.fetchall(), ) @@ -1917,7 +1913,7 @@ def get_all_new_forward_event_rows( async def get_ex_outlier_stream_rows( self, instance_name: str, last_id: int, current_id: int - ) -> List[Tuple[int, str, str, str, str, str, str, str, bool, bool]]: + ) -> list[tuple[int, str, str, str, str, str, str, str, bool, bool]]: """Returns de-outliered events, for the Events replication stream Args: @@ -1932,7 +1928,7 @@ async def get_ex_outlier_stream_rows( def get_ex_outlier_stream_rows_txn( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str, str, str, str, str, str, bool, bool]]: + ) -> list[tuple[int, str, str, str, str, str, str, str, bool, bool]]: sql = ( "SELECT out.event_stream_ordering, e.event_id, e.room_id, e.type," " se.state_key, redacts, relates_to_id, membership, rejections.reason IS NOT NULL," @@ -1954,7 +1950,7 @@ def get_ex_outlier_stream_rows_txn( txn.execute(sql, (last_id, current_id, instance_name)) return cast( - List[Tuple[int, str, str, str, str, str, str, str, bool, bool]], + list[tuple[int, str, str, str, str, str, str, str, bool, bool]], txn.fetchall(), ) @@ -1964,7 +1960,7 @@ def get_ex_outlier_stream_rows_txn( async def get_all_new_backfill_event_rows( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, Tuple[str, str, str, str, str, str]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str, str, str, str, str, str]]], int, bool]: """Get updates for backfill replication stream, including all new backfilled events and events that have gone from being outliers to not. @@ -1994,7 +1990,7 @@ async def get_all_new_backfill_event_rows( def get_all_new_backfill_event_rows( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, Tuple[str, str, str, str, str, str]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str, str, str, str, str, str]]], int, bool]: sql = ( "SELECT -e.stream_ordering, e.event_id, e.room_id, e.type," " se.state_key, redacts, relates_to_id" @@ -2008,10 +2004,10 @@ def get_all_new_backfill_event_rows( " LIMIT ?" ) txn.execute(sql, (-last_id, -current_id, instance_name, limit)) - new_event_updates: List[ - Tuple[int, Tuple[str, str, str, str, str, str]] + new_event_updates: list[ + tuple[int, tuple[str, str, str, str, str, str]] ] = [] - row: Tuple[int, str, str, str, str, str, str] + row: tuple[int, str, str, str, str, str, str] # Type safety: iterating over `txn` yields `Tuple`, i.e. # `Tuple[Any, ...]` of arbitrary length. Mypy detects assigning a # variadic tuple to a fixed length tuple and flags it up as an error. @@ -2057,7 +2053,7 @@ def get_all_new_backfill_event_rows( async def get_all_updated_current_state_deltas( self, instance_name: str, from_token: int, to_token: int, target_row_count: int - ) -> Tuple[List[Tuple[int, str, str, str, str]], int, bool]: + ) -> tuple[list[tuple[int, str, str, str, str]], int, bool]: """Fetch updates from current_state_delta_stream Args: @@ -2079,7 +2075,7 @@ async def get_all_updated_current_state_deltas( def get_all_updated_current_state_deltas_txn( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str, str, str]]: + ) -> list[tuple[int, str, str, str, str]]: sql = """ SELECT stream_id, room_id, type, state_key, event_id FROM current_state_delta_stream @@ -2088,23 +2084,23 @@ def get_all_updated_current_state_deltas_txn( ORDER BY stream_id ASC LIMIT ? """ txn.execute(sql, (from_token, to_token, instance_name, target_row_count)) - return cast(List[Tuple[int, str, str, str, str]], txn.fetchall()) + return cast(list[tuple[int, str, str, str, str]], txn.fetchall()) def get_deltas_for_stream_id_txn( txn: LoggingTransaction, stream_id: int - ) -> List[Tuple[int, str, str, str, str]]: + ) -> list[tuple[int, str, str, str, str]]: sql = """ SELECT stream_id, room_id, type, state_key, event_id FROM current_state_delta_stream WHERE stream_id = ? """ txn.execute(sql, [stream_id]) - return cast(List[Tuple[int, str, str, str, str]], txn.fetchall()) + return cast(list[tuple[int, str, str, str, str]], txn.fetchall()) # we need to make sure that, for every stream id in the results, we get *all* # the rows with that stream id. - rows: List[Tuple[int, str, str, str, str]] = await self.db_pool.runInteraction( + rows: list[tuple[int, str, str, str, str]] = await self.db_pool.runInteraction( "get_all_updated_current_state_deltas", get_all_updated_current_state_deltas_txn, ) @@ -2135,7 +2131,7 @@ def get_deltas_for_stream_id_txn( async def get_senders_for_event_ids( self, event_ids: Collection[str] - ) -> Dict[str, Optional[str]]: + ) -> dict[str, Optional[str]]: """ Given a sequence of event IDs, return the sender associated with each. @@ -2151,7 +2147,7 @@ async def get_senders_for_event_ids( def _get_senders_for_event_ids( txn: LoggingTransaction, - ) -> Dict[str, Optional[str]]: + ) -> dict[str, Optional[str]]: rows = self.db_pool.simple_select_many_txn( txn=txn, table="events", @@ -2167,7 +2163,7 @@ def _get_senders_for_event_ids( ) @cached(max_entries=5000) - async def get_event_ordering(self, event_id: str, room_id: str) -> Tuple[int, int]: + async def get_event_ordering(self, event_id: str, room_id: str) -> tuple[int, int]: res = await self.db_pool.simple_select_one( table="events", retcols=["topological_ordering", "stream_ordering"], @@ -2182,7 +2178,7 @@ async def get_event_ordering(self, event_id: str, room_id: str) -> Tuple[int, in return int(res[0]), int(res[1]) - async def get_next_event_to_expire(self) -> Optional[Tuple[str, int]]: + async def get_next_event_to_expire(self) -> Optional[tuple[str, int]]: """Retrieve the entry with the lowest expiry timestamp in the event_expiry table, or None if there's no more event to expire. @@ -2194,7 +2190,7 @@ async def get_next_event_to_expire(self) -> Optional[Tuple[str, int]]: def get_next_event_to_expire_txn( txn: LoggingTransaction, - ) -> Optional[Tuple[str, int]]: + ) -> Optional[tuple[str, int]]: txn.execute( """ SELECT event_id, expiry_ts FROM event_expiry @@ -2202,7 +2198,7 @@ def get_next_event_to_expire_txn( """ ) - return cast(Optional[Tuple[str, int]], txn.fetchone()) + return cast(Optional[tuple[str, int]], txn.fetchone()) return await self.db_pool.runInteraction( desc="get_next_event_to_expire", func=get_next_event_to_expire_txn @@ -2229,7 +2225,7 @@ async def get_event_id_from_transaction_id_and_device_id( async def get_already_persisted_events( self, events: Iterable[EventBase] - ) -> Dict[str, str]: + ) -> dict[str, str]: """Look up if we have already persisted an event for the transaction ID, returning a mapping from event ID in the given list to the event ID of an existing event. @@ -2239,7 +2235,7 @@ async def get_already_persisted_events( """ mapping = {} - txn_id_to_event: Dict[Tuple[str, str, str, str], str] = {} + txn_id_to_event: dict[tuple[str, str, str, str], str] = {} for event in events: device_id = getattr(event.internal_metadata, "device_id", None) @@ -2516,7 +2512,7 @@ async def get_partial_state_events( any of the events which are unknown (or are outliers). """ result = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_many_batch( table="partial_state_events", column="event_id", @@ -2541,7 +2537,7 @@ async def is_partial_state_event(self, event_id: str) -> bool: ) return result is not None - async def get_partial_state_events_batch(self, room_id: str) -> List[str]: + async def get_partial_state_events_batch(self, room_id: str) -> list[str]: """ Get a list of events in the given room that: - have partial state; and @@ -2560,7 +2556,7 @@ async def get_partial_state_events_batch(self, room_id: str) -> List[str]: @staticmethod def _get_partial_state_events_batch_txn( txn: LoggingTransaction, room_id: str - ) -> List[str]: + ) -> list[str]: # we want to work through the events from oldest to newest, so # we only want events whose prev_events do *not* have partial state - hence # the 'NOT EXISTS' clause in the below. @@ -2644,8 +2640,8 @@ def mark_event_rejected_txn( self.invalidate_get_event_cache_after_txn(txn, event_id) async def get_events_sent_by_user_in_room( - self, user_id: str, room_id: str, limit: int, filter: Optional[List[str]] = None - ) -> Optional[List[str]]: + self, user_id: str, room_id: str, limit: int, filter: Optional[list[str]] = None + ) -> Optional[list[str]]: """ Get a list of event ids of events sent by the user in the specified room @@ -2660,10 +2656,10 @@ def _get_events_by_user_in_room_txn( txn: LoggingTransaction, user_id: str, room_id: str, - filter: Optional[List[str]], + filter: Optional[list[str]], batch_size: int, offset: int, - ) -> Tuple[Optional[List[str]], int]: + ) -> tuple[Optional[list[str]], int]: if filter: base_clause, args = make_in_list_sql_clause( txn.database_engine, "type", filter @@ -2696,7 +2692,7 @@ def _get_events_by_user_in_room_txn( if batch_size > limit: batch_size = limit - selected_ids: List[str] = [] + selected_ids: list[str] = [] while offset < limit: res, offset = await self.db_pool.runInteraction( "get_events_by_user", diff --git a/synapse/storage/databases/main/experimental_features.py b/synapse/storage/databases/main/experimental_features.py index d980c57fa8..77b6c36884 100644 --- a/synapse/storage/databases/main/experimental_features.py +++ b/synapse/storage/databases/main/experimental_features.py @@ -19,7 +19,7 @@ # # -from typing import TYPE_CHECKING, Dict, FrozenSet, List, Tuple, cast +from typing import TYPE_CHECKING, cast from synapse.storage.database import ( DatabasePool, @@ -44,7 +44,7 @@ def __init__( super().__init__(database, db_conn, hs) @cached() - async def list_enabled_features(self, user_id: str) -> FrozenSet[str]: + async def list_enabled_features(self, user_id: str) -> frozenset[str]: """ Checks to see what features are enabled for a given user Args: @@ -54,7 +54,7 @@ async def list_enabled_features(self, user_id: str) -> FrozenSet[str]: the features currently enabled for the user """ enabled = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_list( table="per_user_experimental_features", keyvalues={"user_id": user_id, "enabled": True}, @@ -67,7 +67,7 @@ async def list_enabled_features(self, user_id: str) -> FrozenSet[str]: async def set_features_for_user( self, user: str, - features: Dict["ExperimentalFeature", bool], + features: dict["ExperimentalFeature", bool], ) -> None: """ Enables or disables features for a given user diff --git a/synapse/storage/databases/main/filtering.py b/synapse/storage/databases/main/filtering.py index af9634bad4..4b3bc69d20 100644 --- a/synapse/storage/databases/main/filtering.py +++ b/synapse/storage/databases/main/filtering.py @@ -20,7 +20,7 @@ # # -from typing import TYPE_CHECKING, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, Optional, Union, cast from canonicaljson import encode_canonical_json @@ -187,7 +187,7 @@ def _do_txn(txn: LoggingTransaction) -> int: sql = "SELECT MAX(filter_id) FROM user_filters WHERE full_user_id = ?" txn.execute(sql, (user_id.to_string(),)) - max_id = cast(Tuple[Optional[int]], txn.fetchone())[0] + max_id = cast(tuple[Optional[int]], txn.fetchone())[0] if max_id is None: filter_id = 0 else: diff --git a/synapse/storage/databases/main/keys.py b/synapse/storage/databases/main/keys.py index 2a99a97dd6..9833565095 100644 --- a/synapse/storage/databases/main/keys.py +++ b/synapse/storage/databases/main/keys.py @@ -22,7 +22,7 @@ import itertools import json import logging -from typing import Dict, Iterable, List, Mapping, Optional, Tuple, Union, cast +from typing import Iterable, Mapping, Optional, Union, cast from canonicaljson import encode_canonical_json from signedjson.key import decode_verify_key_bytes @@ -50,7 +50,7 @@ async def store_server_keys_response( server_name: str, from_server: str, ts_added_ms: int, - verify_keys: Dict[str, FetchKeyResult], + verify_keys: dict[str, FetchKeyResult], response_json: JsonDict, ) -> None: """Stores the keys for the given server that we got from `from_server`. @@ -130,7 +130,7 @@ def store_server_keys_response_txn(txn: LoggingTransaction) -> None: @cached() def _get_server_keys_json( - self, server_name_and_key_id: Tuple[str, str] + self, server_name_and_key_id: tuple[str, str] ) -> FetchKeyResult: raise NotImplementedError() @@ -138,8 +138,8 @@ def _get_server_keys_json( cached_method_name="_get_server_keys_json", list_name="server_name_and_key_ids" ) async def get_server_keys_json( - self, server_name_and_key_ids: Iterable[Tuple[str, str]] - ) -> Mapping[Tuple[str, str], FetchKeyResult]: + self, server_name_and_key_ids: Iterable[tuple[str, str]] + ) -> Mapping[tuple[str, str], FetchKeyResult]: """ Args: server_name_and_key_ids: @@ -151,7 +151,7 @@ async def get_server_keys_json( """ keys = {} - def _get_keys(txn: Cursor, batch: Tuple[Tuple[str, str], ...]) -> None: + def _get_keys(txn: Cursor, batch: tuple[tuple[str, str], ...]) -> None: """Processes a batch of keys to fetch, and adds the result to `keys`.""" # batch_iter always returns tuples so it's safe to do len(batch) @@ -189,7 +189,7 @@ def _get_keys(txn: Cursor, batch: Tuple[Tuple[str, str], ...]) -> None: valid_until_ts=ts_valid_until_ms, ) - def _txn(txn: Cursor) -> Dict[Tuple[str, str], FetchKeyResult]: + def _txn(txn: Cursor) -> dict[tuple[str, str], FetchKeyResult]: for batch in batch_iter(server_name_and_key_ids, 50): _get_keys(txn, batch) return keys @@ -215,7 +215,7 @@ async def get_server_keys_json_for_remote( If we have multiple entries for a given key ID, returns the most recent. """ rows = cast( - List[Tuple[str, str, int, int, Union[bytes, memoryview]]], + list[tuple[str, str, int, int, Union[bytes, memoryview]]], await self.db_pool.simple_select_many_batch( table="server_keys_json", column="key_id", @@ -252,13 +252,13 @@ async def get_server_keys_json_for_remote( async def get_all_server_keys_json_for_remote( self, server_name: str, - ) -> Dict[str, FetchKeyResultForRemote]: + ) -> dict[str, FetchKeyResultForRemote]: """Fetch the cached keys for the given server. If we have multiple entries for a given key ID, returns the most recent. """ rows = cast( - List[Tuple[str, str, int, int, Union[bytes, memoryview]]], + list[tuple[str, str, int, int, Union[bytes, memoryview]]], await self.db_pool.simple_select_list( table="server_keys_json", keyvalues={"server_name": server_name}, diff --git a/synapse/storage/databases/main/lock.py b/synapse/storage/databases/main/lock.py index e2b15eaf6a..9dd2cae344 100644 --- a/synapse/storage/databases/main/lock.py +++ b/synapse/storage/databases/main/lock.py @@ -21,7 +21,7 @@ import logging from contextlib import AsyncExitStack from types import TracebackType -from typing import TYPE_CHECKING, Collection, Optional, Set, Tuple, Type +from typing import TYPE_CHECKING, Collection, Optional from weakref import WeakValueDictionary from twisted.internet import defer @@ -82,7 +82,7 @@ def __init__( # A map from `(lock_name, lock_key)` to lock that we think we # currently hold. - self._live_lock_tokens: WeakValueDictionary[Tuple[str, str], Lock] = ( + self._live_lock_tokens: WeakValueDictionary[tuple[str, str], Lock] = ( WeakValueDictionary() ) @@ -91,7 +91,7 @@ def __init__( # multiple read locks at a time but only one write lock (no mixing read # and write locks at the same time). self._live_read_write_lock_tokens: WeakValueDictionary[ - Tuple[str, str, str], Lock + tuple[str, str, str], Lock ] = WeakValueDictionary() # When we shut down we want to remove the locks. Technically this can @@ -104,7 +104,7 @@ def __init__( shutdown_func=self._on_shutdown, ) - self._acquiring_locks: Set[Tuple[str, str]] = set() + self._acquiring_locks: set[tuple[str, str]] = set() self.clock.looping_call( self._reap_stale_read_write_locks, _LOCK_TIMEOUT_MS / 10.0 @@ -288,7 +288,7 @@ def set_lock() -> None: async def try_acquire_multi_read_write_lock( self, - lock_names: Collection[Tuple[str, str]], + lock_names: Collection[tuple[str, str]], write: bool, ) -> Optional[AsyncExitStack]: """Try to acquire multiple locks for the given names/keys. Will return @@ -318,7 +318,7 @@ async def try_acquire_multi_read_write_lock( def _try_acquire_multi_read_write_lock_txn( self, txn: LoggingTransaction, - lock_names: Collection[Tuple[str, str]], + lock_names: Collection[tuple[str, str]], write: bool, ) -> Collection["Lock"]: locks = [] @@ -497,7 +497,7 @@ async def __aenter__(self) -> None: async def __aexit__( self, - _exctype: Optional[Type[BaseException]], + _exctype: Optional[type[BaseException]], _excinst: Optional[BaseException], _exctb: Optional[TracebackType], ) -> bool: diff --git a/synapse/storage/databases/main/media_repository.py b/synapse/storage/databases/main/media_repository.py index b8bd0042d7..b9f882662e 100644 --- a/synapse/storage/databases/main/media_repository.py +++ b/synapse/storage/databases/main/media_repository.py @@ -25,9 +25,7 @@ TYPE_CHECKING, Collection, Iterable, - List, Optional, - Tuple, Union, cast, ) @@ -275,7 +273,7 @@ async def get_local_media_by_user_paginate( user_id: str, order_by: str = MediaSortOrder.CREATED_TS.value, direction: Direction = Direction.FORWARDS, - ) -> Tuple[List[LocalMedia], int]: + ) -> tuple[list[LocalMedia], int]: """Get a paginated list of metadata for a local piece of media which an user_id has uploaded @@ -292,7 +290,7 @@ async def get_local_media_by_user_paginate( def get_local_media_by_user_paginate_txn( txn: LoggingTransaction, - ) -> Tuple[List[LocalMedia], int]: + ) -> tuple[list[LocalMedia], int]: # Set ordering order_by_column = MediaSortOrder(order_by).value @@ -301,14 +299,14 @@ def get_local_media_by_user_paginate_txn( else: order = "ASC" - args: List[Union[str, int]] = [user_id] + args: list[Union[str, int]] = [user_id] sql = """ SELECT COUNT(*) as total_media FROM local_media_repository WHERE user_id = ? """ txn.execute(sql, args) - count = cast(Tuple[int], txn.fetchone())[0] + count = cast(tuple[int], txn.fetchone())[0] sql = """ SELECT @@ -365,7 +363,7 @@ async def get_local_media_ids( keep_profiles: bool, include_quarantined_media: bool, include_protected_media: bool, - ) -> List[str]: + ) -> list[str]: """ Retrieve a list of media IDs from the local media store. @@ -437,7 +435,7 @@ async def get_local_media_ids( AND NOT safe_from_quarantine """ - def _get_local_media_ids_txn(txn: LoggingTransaction) -> List[str]: + def _get_local_media_ids_txn(txn: LoggingTransaction) -> list[str]: txn.execute(sql, (before_ts, before_ts, size_gt)) return [row[0] for row in txn] @@ -544,7 +542,7 @@ async def mark_local_media_as_safe(self, media_id: str, safe: bool = True) -> No desc="mark_local_media_as_safe", ) - async def count_pending_media(self, user_id: UserID) -> Tuple[int, int]: + async def count_pending_media(self, user_id: UserID) -> tuple[int, int]: """Count the number of pending media for a user. Returns: @@ -552,7 +550,7 @@ async def count_pending_media(self, user_id: UserID) -> Tuple[int, int]: expiration timestamp. """ - def get_pending_media_txn(txn: LoggingTransaction) -> Tuple[int, int]: + def get_pending_media_txn(txn: LoggingTransaction) -> tuple[int, int]: sql = """ SELECT COUNT(*), MIN(created_ts) FROM local_media_repository @@ -637,9 +635,9 @@ async def store_url_cache( desc="store_url_cache", ) - async def get_local_media_thumbnails(self, media_id: str) -> List[ThumbnailInfo]: + async def get_local_media_thumbnails(self, media_id: str) -> list[ThumbnailInfo]: rows = cast( - List[Tuple[int, int, str, str, int]], + list[tuple[int, int, str, str, int]], await self.db_pool.simple_select_list( "local_media_repository_thumbnails", {"media_id": media_id}, @@ -755,7 +753,7 @@ async def store_cached_remote_media( async def update_cached_last_access_time( self, local_media: Iterable[str], - remote_media: Iterable[Tuple[str, str]], + remote_media: Iterable[tuple[str, str]], time_ms: int, ) -> None: """Updates the last access time of the given media @@ -793,9 +791,9 @@ def update_cache_txn(txn: LoggingTransaction) -> None: async def get_remote_media_thumbnails( self, origin: str, media_id: str - ) -> List[ThumbnailInfo]: + ) -> list[ThumbnailInfo]: rows = cast( - List[Tuple[int, int, str, str, int]], + list[tuple[int, int, str, str, int]], await self.db_pool.simple_select_list( "remote_media_cache_thumbnails", {"media_origin": origin, "media_id": media_id}, @@ -881,7 +879,7 @@ async def store_remote_media_thumbnail( async def get_remote_media_ids( self, before_ts: int, include_quarantined_media: bool - ) -> List[Tuple[str, str, str]]: + ) -> list[tuple[str, str, str]]: """ Retrieve a list of server name, media ID tuples from the remote media cache. @@ -911,7 +909,7 @@ async def get_remote_media_ids( """ return cast( - List[Tuple[str, str, str]], + list[tuple[str, str, str]], await self.db_pool.execute("get_remote_media_ids", sql, before_ts), ) @@ -932,7 +930,7 @@ def delete_remote_media_txn(txn: LoggingTransaction) -> None: "delete_remote_media", delete_remote_media_txn ) - async def get_expired_url_cache(self, now_ts: int) -> List[str]: + async def get_expired_url_cache(self, now_ts: int) -> list[str]: sql = ( "SELECT media_id FROM local_media_repository_url_cache" " WHERE expires_ts < ?" @@ -940,7 +938,7 @@ async def get_expired_url_cache(self, now_ts: int) -> List[str]: " LIMIT 500" ) - def _get_expired_url_cache_txn(txn: LoggingTransaction) -> List[str]: + def _get_expired_url_cache_txn(txn: LoggingTransaction) -> list[str]: txn.execute(sql, (now_ts,)) return [row[0] for row in txn] @@ -959,7 +957,7 @@ def _delete_url_cache_txn(txn: LoggingTransaction) -> None: await self.db_pool.runInteraction("delete_url_cache", _delete_url_cache_txn) - async def get_url_cache_media_before(self, before_ts: int) -> List[str]: + async def get_url_cache_media_before(self, before_ts: int) -> list[str]: sql = ( "SELECT media_id FROM local_media_repository" " WHERE created_ts < ? AND url_cache IS NOT NULL" @@ -967,7 +965,7 @@ async def get_url_cache_media_before(self, before_ts: int) -> List[str]: " LIMIT 500" ) - def _get_url_cache_media_before_txn(txn: LoggingTransaction) -> List[str]: + def _get_url_cache_media_before_txn(txn: LoggingTransaction) -> list[str]: txn.execute(sql, (before_ts,)) return [row[0] for row in txn] diff --git a/synapse/storage/databases/main/metrics.py b/synapse/storage/databases/main/metrics.py index 49411ed034..dc8e2c1616 100644 --- a/synapse/storage/databases/main/metrics.py +++ b/synapse/storage/databases/main/metrics.py @@ -21,7 +21,7 @@ import calendar import logging import time -from typing import TYPE_CHECKING, Dict, List, Tuple, cast +from typing import TYPE_CHECKING, cast from synapse.metrics import SERVER_NAME_LABEL, GaugeBucketCollector from synapse.metrics.background_process_metrics import wrap_as_background_process @@ -85,7 +85,7 @@ def __init__( @wrap_as_background_process("read_forward_extremities") async def _read_forward_extremities(self) -> None: - def fetch(txn: LoggingTransaction) -> List[Tuple[int, int]]: + def fetch(txn: LoggingTransaction) -> list[tuple[int, int]]: txn.execute( """ SELECT t1.c, t2.c @@ -98,7 +98,7 @@ def fetch(txn: LoggingTransaction) -> List[Tuple[int, int]]: ) t2 ON t1.room_id = t2.room_id """ ) - return cast(List[Tuple[int, int]], txn.fetchall()) + return cast(list[tuple[int, int]], txn.fetchall()) res = await self.db_pool.runInteraction("read_forward_extremities", fetch) @@ -125,7 +125,7 @@ def _count_messages(txn: LoggingTransaction) -> int: AND stream_ordering > ? """ txn.execute(sql, (self.stream_ordering_day_ago,)) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction("count_e2ee_messages", _count_messages) @@ -144,7 +144,7 @@ def _count_messages(txn: LoggingTransaction) -> int: """ txn.execute(sql, (like_clause, self.stream_ordering_day_ago)) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction( @@ -159,7 +159,7 @@ def _count(txn: LoggingTransaction) -> int: AND stream_ordering > ? """ txn.execute(sql, (self.stream_ordering_day_ago,)) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction( @@ -181,7 +181,7 @@ def _count_messages(txn: LoggingTransaction) -> int: AND stream_ordering > ? """ txn.execute(sql, (self.stream_ordering_day_ago,)) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction("count_messages", _count_messages) @@ -200,7 +200,7 @@ def _count_messages(txn: LoggingTransaction) -> int: """ txn.execute(sql, (like_clause, self.stream_ordering_day_ago)) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction( @@ -215,7 +215,7 @@ def _count(txn: LoggingTransaction) -> int: AND stream_ordering > ? """ txn.execute(sql, (self.stream_ordering_day_ago,)) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction("count_daily_active_rooms", _count) @@ -256,10 +256,10 @@ def _count_users(self, txn: LoggingTransaction, time_from: int) -> int: # Mypy knows that fetchone() might return None if there are no rows. # We know better: "SELECT COUNT(...) FROM ..." without any GROUP BY always # returns exactly one row. - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count - async def count_r30v2_users(self) -> Dict[str, int]: + async def count_r30v2_users(self) -> dict[str, int]: """ Counts the number of 30 day retained users, defined as users that: - Appear more than once in the past 60 days @@ -279,7 +279,7 @@ async def count_r30v2_users(self) -> Dict[str, int]: - "web" (any web application -- it's not possible to distinguish Element Web here) """ - def _count_r30v2_users(txn: LoggingTransaction) -> Dict[str, int]: + def _count_r30v2_users(txn: LoggingTransaction) -> dict[str, int]: thirty_days_in_secs = 86400 * 30 now = int(self.clock.time()) sixty_days_ago_in_secs = now - 2 * thirty_days_in_secs @@ -376,7 +376,7 @@ def _count_r30v2_users(txn: LoggingTransaction) -> Dict[str, int]: thirty_days_in_secs * 1000, ), ) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) results["all"] = count return results diff --git a/synapse/storage/databases/main/monthly_active_users.py b/synapse/storage/databases/main/monthly_active_users.py index 86744f616c..bf8e540ffb 100644 --- a/synapse/storage/databases/main/monthly_active_users.py +++ b/synapse/storage/databases/main/monthly_active_users.py @@ -18,7 +18,7 @@ # # import logging -from typing import TYPE_CHECKING, Dict, List, Mapping, Optional, Tuple, cast +from typing import TYPE_CHECKING, Mapping, Optional, cast from synapse.metrics.background_process_metrics import wrap_as_background_process from synapse.storage.database import ( @@ -94,7 +94,7 @@ def _count_users(txn: LoggingTransaction) -> int: WHERE (users.appservice_id IS NULL OR users.appservice_id = ''); """ txn.execute(sql) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction("count_users", _count_users) @@ -112,7 +112,7 @@ async def get_monthly_active_count_by_service(self) -> Mapping[str, int]: """ - def _count_users_by_service(txn: LoggingTransaction) -> Dict[str, int]: + def _count_users_by_service(txn: LoggingTransaction) -> dict[str, int]: sql = """ SELECT COALESCE(appservice_id, 'native'), COUNT(*) FROM monthly_active_users @@ -121,7 +121,7 @@ def _count_users_by_service(txn: LoggingTransaction) -> Dict[str, int]: """ txn.execute(sql) - result = cast(List[Tuple[str, int]], txn.fetchall()) + result = cast(list[tuple[str, int]], txn.fetchall()) return dict(result) return await self.db_pool.runInteraction( @@ -130,7 +130,7 @@ def _count_users_by_service(txn: LoggingTransaction) -> Dict[str, int]: async def get_monthly_active_users_by_service( self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None - ) -> List[Tuple[str, str]]: + ) -> list[tuple[str, str]]: """Generates list of monthly active users and their services. Please see "get_monthly_active_count_by_service" docstring for more details about services. @@ -160,7 +160,7 @@ async def get_monthly_active_users_by_service( where_clause = "" query_params = [] - def _list_users(txn: LoggingTransaction) -> List[Tuple[str, str]]: + def _list_users(txn: LoggingTransaction) -> list[tuple[str, str]]: sql = f""" SELECT COALESCE(appservice_id, 'native'), user_id FROM monthly_active_users @@ -169,11 +169,11 @@ def _list_users(txn: LoggingTransaction) -> List[Tuple[str, str]]: """ txn.execute(sql, query_params) - return cast(List[Tuple[str, str]], txn.fetchall()) + return cast(list[tuple[str, str]], txn.fetchall()) return await self.db_pool.runInteraction("list_users", _list_users) - async def get_registered_reserved_users(self) -> List[str]: + async def get_registered_reserved_users(self) -> list[str]: """Of the reserved threepids defined in config, retrieve those that are associated with registered users @@ -219,7 +219,7 @@ async def reap_monthly_active_users(self) -> None: entries exist. """ - def _reap_users(txn: LoggingTransaction, reserved_users: List[str]) -> None: + def _reap_users(txn: LoggingTransaction, reserved_users: list[str]) -> None: """ Args: reserved_users: reserved users to preserve @@ -294,7 +294,7 @@ def _reap_users(txn: LoggingTransaction, reserved_users: List[str]) -> None: ) def _initialise_reserved_users( - self, txn: LoggingTransaction, threepids: List[dict] + self, txn: LoggingTransaction, threepids: list[dict] ) -> None: """Ensures that reserved threepids are accounted for in the MAU table, should be called on start up. diff --git a/synapse/storage/databases/main/presence.py b/synapse/storage/databases/main/presence.py index 587f51df2c..fec94f4e5a 100644 --- a/synapse/storage/databases/main/presence.py +++ b/synapse/storage/databases/main/presence.py @@ -21,12 +21,9 @@ from typing import ( TYPE_CHECKING, Any, - Dict, Iterable, - List, Mapping, Optional, - Tuple, Union, cast, ) @@ -116,8 +113,8 @@ def __init__( ) async def update_presence( - self, presence_states: List[UserPresenceState] - ) -> Tuple[int, int]: + self, presence_states: list[UserPresenceState] + ) -> tuple[int, int]: assert self._can_persist_presence stream_ordering_manager = self._presence_id_gen.get_next_mult( @@ -142,8 +139,8 @@ async def update_presence( def _update_presence_txn( self, txn: LoggingTransaction, - stream_orderings: List[int], - presence_states: List[UserPresenceState], + stream_orderings: list[int], + presence_states: list[UserPresenceState], ) -> None: for stream_id, state in zip(stream_orderings, presence_states): txn.call_after( @@ -193,7 +190,7 @@ def _update_presence_txn( async def get_all_presence_updates( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, list]], int, bool]: + ) -> tuple[list[tuple[int, list]], int, bool]: """Get updates for presence replication stream. Args: @@ -220,7 +217,7 @@ async def get_all_presence_updates( def get_all_presence_updates_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, list]], int, bool]: + ) -> tuple[list[tuple[int, list]], int, bool]: sql = """ SELECT stream_id, user_id, state, last_active_ts, last_federation_update_ts, last_user_sync_ts, @@ -232,7 +229,7 @@ def get_all_presence_updates_txn( """ txn.execute(sql, (last_id, current_id, limit)) updates = cast( - List[Tuple[int, list]], + list[tuple[int, list]], [(row[0], row[1:]) for row in txn], ) @@ -263,7 +260,7 @@ async def get_presence_for_users( # TODO All these columns are nullable, but we don't expect that: # https://github.com/matrix-org/synapse/issues/16467 rows = cast( - List[Tuple[str, str, int, int, int, Optional[str], Union[int, bool]]], + list[tuple[str, str, int, int, int, Optional[str], Union[int, bool]]], await self.db_pool.simple_select_many_batch( table="presence_stream", column="user_id", @@ -375,7 +372,7 @@ def _add_users_to_send_full_presence_to(txn: LoggingTransaction) -> None: async def get_presence_for_all_users( self, include_offline: bool = True, - ) -> Dict[str, UserPresenceState]: + ) -> dict[str, UserPresenceState]: """Retrieve the current presence state for all users. Note that the presence_stream table is culled frequently, so it should only @@ -402,7 +399,7 @@ async def get_presence_for_all_users( # TODO All these columns are nullable, but we don't expect that: # https://github.com/matrix-org/synapse/issues/16467 rows = cast( - List[Tuple[str, str, int, int, int, Optional[str], Union[int, bool]]], + list[tuple[str, str, int, int, int, Optional[str], Union[int, bool]]], await self.db_pool.runInteraction( "get_presence_for_all_users", self.db_pool.simple_select_list_paginate_txn, @@ -457,7 +454,7 @@ def get_current_presence_token(self) -> int: def get_presence_stream_id_gen(self) -> MultiWriterIdGenerator: return self._presence_id_gen - def _get_active_presence(self, db_conn: Connection) -> List[UserPresenceState]: + def _get_active_presence(self, db_conn: Connection) -> list[UserPresenceState]: """Fetch non-offline presence from the database so that we can register the appropriate time outs. """ @@ -488,7 +485,7 @@ def _get_active_presence(self, db_conn: Connection) -> List[UserPresenceState]: for user_id, state, last_active_ts, last_federation_update_ts, last_user_sync_ts, status_msg, currently_active in rows ] - def take_presence_startup_info(self) -> List[UserPresenceState]: + def take_presence_startup_info(self) -> list[UserPresenceState]: active_on_startup = self._presence_on_startup self._presence_on_startup = [] return active_on_startup diff --git a/synapse/storage/databases/main/profile.py b/synapse/storage/databases/main/profile.py index 30d8a58d96..71f01a597b 100644 --- a/synapse/storage/databases/main/profile.py +++ b/synapse/storage/databases/main/profile.py @@ -19,7 +19,7 @@ # # import json -from typing import TYPE_CHECKING, Dict, Optional, Tuple, cast +from typing import TYPE_CHECKING, Optional, cast from canonicaljson import encode_canonical_json @@ -240,7 +240,7 @@ def get_profile_field(txn: LoggingTransaction) -> JsonValue: # Test exists first since value being None is used for both # missing and a null JSON value. - exists, value = cast(Tuple[bool, JsonValue], txn.fetchone()) + exists, value = cast(tuple[bool, JsonValue], txn.fetchone()) if not exists: raise StoreError(404, "No row found") return value @@ -258,7 +258,7 @@ def get_profile_field(txn: LoggingTransaction) -> JsonValue: # If value_type is None, then the value did not exist. value_type, value = cast( - Tuple[Optional[str], JsonValue], txn.fetchone() + tuple[Optional[str], JsonValue], txn.fetchone() ) if not value_type: raise StoreError(404, "No row found") @@ -271,7 +271,7 @@ def get_profile_field(txn: LoggingTransaction) -> JsonValue: return await self.db_pool.runInteraction("get_profile_field", get_profile_field) - async def get_profile_fields(self, user_id: UserID) -> Dict[str, str]: + async def get_profile_fields(self, user_id: UserID) -> dict[str, str]: """ Get all custom profile fields for a user. @@ -346,7 +346,7 @@ def _check_profile_size( # possible due to the grammar. (f'$."{new_field_name}"', user_id.localpart), ) - row = cast(Tuple[Optional[int], Optional[int], Optional[int]], txn.fetchone()) + row = cast(tuple[Optional[int], Optional[int], Optional[int]], txn.fetchone()) # The values return null if the column is null. total_bytes = ( diff --git a/synapse/storage/databases/main/purge_events.py b/synapse/storage/databases/main/purge_events.py index d4642a1309..10de1b35a6 100644 --- a/synapse/storage/databases/main/purge_events.py +++ b/synapse/storage/databases/main/purge_events.py @@ -20,7 +20,7 @@ # import logging -from typing import Any, Set, Tuple, cast +from typing import Any, cast from synapse.api.errors import SynapseError from synapse.storage.database import LoggingTransaction @@ -103,7 +103,7 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore): async def purge_history( self, room_id: str, token: str, delete_local_events: bool - ) -> Set[int]: + ) -> set[int]: """Deletes room history before a certain point. Note that only a single purge can occur at once, this is guaranteed via @@ -137,7 +137,7 @@ def _purge_history_txn( room_id: str, token: RoomStreamToken, delete_local_events: bool, - ) -> Set[int]: + ) -> set[int]: # Tables that should be pruned: # event_auth # event_backward_extremities @@ -204,7 +204,7 @@ def _purge_history_txn( logger.info("[purge] looking for events to delete") should_delete_expr = "state_events.state_key IS NULL" - should_delete_params: Tuple[Any, ...] = () + should_delete_params: tuple[Any, ...] = () if not delete_local_events: should_delete_expr += " AND event_id NOT LIKE ?" @@ -355,7 +355,7 @@ def _purge_history_txn( """, (room_id,), ) - (min_depth,) = cast(Tuple[int], txn.fetchone()) + (min_depth,) = cast(tuple[int], txn.fetchone()) logger.info("[purge] updating room_depth to %d", min_depth) diff --git a/synapse/storage/databases/main/push_rule.py b/synapse/storage/databases/main/push_rule.py index 1860be1713..ecab19eb2e 100644 --- a/synapse/storage/databases/main/push_rule.py +++ b/synapse/storage/databases/main/push_rule.py @@ -23,13 +23,10 @@ TYPE_CHECKING, Any, Collection, - Dict, Iterable, - List, Mapping, Optional, Sequence, - Tuple, Union, cast, ) @@ -69,8 +66,8 @@ def _load_rules( - rawrules: List[Tuple[str, int, str, str]], - enabled_map: Dict[str, bool], + rawrules: list[tuple[str, int, str, str]], + enabled_map: dict[str, bool], experimental_config: ExperimentalConfig, ) -> FilteredPushRules: """Take the DB rows returned from the DB and convert them into a full @@ -206,7 +203,7 @@ def process_replication_position( @cached(max_entries=5000) async def get_push_rules_for_user(self, user_id: str) -> FilteredPushRules: rows = cast( - List[Tuple[str, int, int, str, str]], + list[tuple[str, int, int, str, str]], await self.db_pool.simple_select_list( table="push_rules", keyvalues={"user_name": user_id}, @@ -232,9 +229,9 @@ async def get_push_rules_for_user(self, user_id: str) -> FilteredPushRules: self.hs.config.experimental, ) - async def get_push_rules_enabled_for_user(self, user_id: str) -> Dict[str, bool]: + async def get_push_rules_enabled_for_user(self, user_id: str) -> dict[str, bool]: results = cast( - List[Tuple[str, Optional[Union[int, bool]]]], + list[tuple[str, Optional[Union[int, bool]]]], await self.db_pool.simple_select_list( table="push_rules_enable", keyvalues={"user_name": user_id}, @@ -257,7 +254,7 @@ def have_push_rules_changed_txn(txn: LoggingTransaction) -> bool: " WHERE user_id = ? AND ? < stream_id" ) txn.execute(sql, (user_id, last_id)) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return bool(count) return await self.db_pool.runInteraction( @@ -271,7 +268,7 @@ async def bulk_get_push_rules( if not user_ids: return {} - raw_rules: Dict[str, List[Tuple[str, int, str, str]]] = { + raw_rules: dict[str, list[tuple[str, int, str, str]]] = { user_id: [] for user_id in user_ids } @@ -280,7 +277,7 @@ async def bulk_get_push_rules( gather_results( ( cast( - "defer.Deferred[List[Tuple[str, str, int, int, str, str]]]", + "defer.Deferred[list[tuple[str, str, int, int, str, str]]]", run_in_background( self.db_pool.simple_select_many_batch, table="push_rules", @@ -312,7 +309,7 @@ async def bulk_get_push_rules( (rule_id, priority_class, conditions, actions) ) - results: Dict[str, FilteredPushRules] = {} + results: dict[str, FilteredPushRules] = {} for user_id, rules in raw_rules.items(): results[user_id] = _load_rules( @@ -323,14 +320,14 @@ async def bulk_get_push_rules( async def bulk_get_push_rules_enabled( self, user_ids: Collection[str] - ) -> Dict[str, Dict[str, bool]]: + ) -> dict[str, dict[str, bool]]: if not user_ids: return {} - results: Dict[str, Dict[str, bool]] = {user_id: {} for user_id in user_ids} + results: dict[str, dict[str, bool]] = {user_id: {} for user_id in user_ids} rows = cast( - List[Tuple[str, str, Optional[int]]], + list[tuple[str, str, Optional[int]]], await self.db_pool.simple_select_many_batch( table="push_rules_enable", column="user_name", @@ -346,7 +343,7 @@ async def bulk_get_push_rules_enabled( async def get_all_push_rule_updates( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, Tuple[str]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str]]], int, bool]: """Get updates for push_rules replication stream. Args: @@ -373,7 +370,7 @@ async def get_all_push_rule_updates( def get_all_push_rule_updates_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, Tuple[str]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str]]], int, bool]: sql = """ SELECT stream_id, user_id FROM push_rules_stream @@ -383,7 +380,7 @@ def get_all_push_rule_updates_txn( """ txn.execute(sql, (last_id, current_id, limit)) updates = cast( - List[Tuple[int, Tuple[str]]], + list[tuple[int, tuple[str]]], [(stream_id, (user_id,)) for stream_id, user_id in txn], ) @@ -794,7 +791,7 @@ async def set_push_rule_actions( self, user_id: str, rule_id: str, - actions: List[Union[dict, str]], + actions: list[Union[dict, str]], is_default_rule: bool, ) -> None: """ diff --git a/synapse/storage/databases/main/pusher.py b/synapse/storage/databases/main/pusher.py index 1b2aa79ab1..c8f049536a 100644 --- a/synapse/storage/databases/main/pusher.py +++ b/synapse/storage/databases/main/pusher.py @@ -23,12 +23,9 @@ from typing import ( TYPE_CHECKING, Any, - Dict, Iterable, Iterator, - List, Optional, - Tuple, cast, ) @@ -51,7 +48,7 @@ logger = logging.getLogger(__name__) # The type of a row in the pushers table. -PusherRow = Tuple[ +PusherRow = tuple[ int, # id str, # user_name Optional[int], # access_token @@ -192,7 +189,7 @@ async def get_pushers_by_app_id_and_pushkey( async def get_pushers_by_user_id(self, user_id: str) -> Iterator[PusherConfig]: return await self.get_pushers_by({"user_name": user_id}) - async def get_pushers_by(self, keyvalues: Dict[str, Any]) -> Iterator[PusherConfig]: + async def get_pushers_by(self, keyvalues: dict[str, Any]) -> Iterator[PusherConfig]: """Retrieve pushers that match the given criteria. Args: @@ -202,7 +199,7 @@ async def get_pushers_by(self, keyvalues: Dict[str, Any]) -> Iterator[PusherConf The pushers for which the given columns have the given values. """ - def get_pushers_by_txn(txn: LoggingTransaction) -> List[PusherRow]: + def get_pushers_by_txn(txn: LoggingTransaction) -> list[PusherRow]: # We could technically use simple_select_list here, but we need to call # COALESCE on the 'enabled' column. While it is technically possible to give # simple_select_list the whole `COALESCE(...) AS ...` as a column name, it @@ -220,7 +217,7 @@ def get_pushers_by_txn(txn: LoggingTransaction) -> List[PusherRow]: txn.execute(sql, list(keyvalues.values())) - return cast(List[PusherRow], txn.fetchall()) + return cast(list[PusherRow], txn.fetchall()) ret = await self.db_pool.runInteraction( desc="get_pushers_by", @@ -230,7 +227,7 @@ def get_pushers_by_txn(txn: LoggingTransaction) -> List[PusherRow]: return self._decode_pushers_rows(ret) async def get_enabled_pushers(self) -> Iterator[PusherConfig]: - def get_enabled_pushers_txn(txn: LoggingTransaction) -> List[PusherRow]: + def get_enabled_pushers_txn(txn: LoggingTransaction) -> list[PusherRow]: txn.execute( """ SELECT id, user_name, access_token, profile_tag, kind, app_id, @@ -240,7 +237,7 @@ def get_enabled_pushers_txn(txn: LoggingTransaction) -> List[PusherRow]: FROM pushers WHERE COALESCE(enabled, TRUE) """ ) - return cast(List[PusherRow], txn.fetchall()) + return cast(list[PusherRow], txn.fetchall()) return self._decode_pushers_rows( await self.db_pool.runInteraction( @@ -250,7 +247,7 @@ def get_enabled_pushers_txn(txn: LoggingTransaction) -> List[PusherRow]: async def get_all_updated_pushers_rows( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: """Get updates for pushers replication stream. Args: @@ -277,7 +274,7 @@ async def get_all_updated_pushers_rows( def get_all_updated_pushers_rows_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: sql = """ SELECT id, user_name, app_id, pushkey FROM pushers @@ -286,7 +283,7 @@ def get_all_updated_pushers_rows_txn( """ txn.execute(sql, (last_id, current_id, limit)) updates = cast( - List[Tuple[int, tuple]], + list[tuple[int, tuple]], [ (stream_id, (user_name, app_id, pushkey, False)) for stream_id, user_name, app_id, pushkey in txn @@ -379,9 +376,9 @@ async def update_pusher_failing_since( async def get_throttle_params_by_room( self, pusher_id: int - ) -> Dict[str, ThrottleParams]: + ) -> dict[str, ThrottleParams]: res = cast( - List[Tuple[str, Optional[int], Optional[int]]], + list[tuple[str, Optional[int], Optional[int]]], await self.db_pool.simple_select_list( "pusher_throttle", {"pusher": pusher_id}, @@ -610,7 +607,7 @@ def set_device_id_for_pushers_txn(txn: LoggingTransaction) -> int: (last_pusher_id, batch_size), ) - rows = cast(List[Tuple[int, Optional[str], Optional[str]]], txn.fetchall()) + rows = cast(list[tuple[int, Optional[str], Optional[str]]], txn.fetchall()) if len(rows) == 0: return 0 @@ -764,7 +761,7 @@ async def delete_all_pushers_for_user(self, user_id: str) -> None: # account. pushers = list(await self.get_pushers_by_user_id(user_id)) - def delete_pushers_txn(txn: LoggingTransaction, stream_ids: List[int]) -> None: + def delete_pushers_txn(txn: LoggingTransaction, stream_ids: list[int]) -> None: self._invalidate_cache_and_stream( # type: ignore[attr-defined] txn, self.get_if_user_has_pusher, (user_id,) ) diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py index f1dbf68971..63d4e1f68c 100644 --- a/synapse/storage/databases/main/receipts.py +++ b/synapse/storage/databases/main/receipts.py @@ -24,14 +24,10 @@ TYPE_CHECKING, Any, Collection, - Dict, Iterable, - List, Mapping, Optional, Sequence, - Set, - Tuple, cast, ) @@ -92,14 +88,14 @@ def merge_to_content(receipts: Collection["ReceiptInRoom"]) -> JsonMapping: # matching threaded receipts. # Set of (user_id, event_id) - unthreaded_receipts: Set[Tuple[str, str]] = { + unthreaded_receipts: set[tuple[str, str]] = { (receipt.user_id, receipt.event_id) for receipt in receipts if receipt.thread_id is None } # event_id -> receipt_type -> user_id -> receipt data - content: Dict[str, Dict[str, Dict[str, JsonMapping]]] = {} + content: dict[str, dict[str, dict[str, JsonMapping]]] = {} for receipt in receipts: data = receipt.data if receipt.thread_id is not None: @@ -180,7 +176,7 @@ def get_last_unthreaded_receipt_for_user_txn( user_id: str, room_id: str, receipt_types: Collection[str], - ) -> Optional[Tuple[str, int]]: + ) -> Optional[tuple[str, int]]: """ Fetch the event ID and stream_ordering for the latest unthreaded receipt in a room with one of the given receipt types. @@ -212,11 +208,11 @@ def get_last_unthreaded_receipt_for_user_txn( args.extend((user_id, room_id)) txn.execute(sql, args) - return cast(Optional[Tuple[str, int]], txn.fetchone()) + return cast(Optional[tuple[str, int]], txn.fetchone()) async def get_receipts_for_user( self, user_id: str, receipt_types: Iterable[str] - ) -> Dict[str, str]: + ) -> dict[str, str]: """ Fetch the event IDs for the latest receipts sent by the given user. @@ -285,7 +281,7 @@ async def _get_receipts_for_user_with_orderings( A map of room ID to the latest receipt information. """ - def f(txn: LoggingTransaction) -> List[Tuple[str, str, int, int]]: + def f(txn: LoggingTransaction) -> list[tuple[str, str, int, int]]: sql = ( "SELECT rl.room_id, rl.event_id," " e.topological_ordering, e.stream_ordering" @@ -297,7 +293,7 @@ def f(txn: LoggingTransaction) -> List[Tuple[str, str, int, int]]: " AND receipt_type = ?" ) txn.execute(sql, (user_id, receipt_type)) - return cast(List[Tuple[str, str, int, int]], txn.fetchall()) + return cast(list[tuple[str, str, int, int]], txn.fetchall()) rows = await self.db_pool.runInteraction( "get_receipts_for_user_with_orderings", f @@ -316,7 +312,7 @@ async def get_linearized_receipts_for_rooms( room_ids: Iterable[str], to_key: MultiWriterStreamToken, from_key: Optional[MultiWriterStreamToken] = None, - ) -> List[JsonMapping]: + ) -> list[JsonMapping]: """Get receipts for multiple rooms for sending to clients. Args: @@ -379,7 +375,7 @@ async def _get_linearized_receipts_for_room( ) -> Sequence[JsonMapping]: """See get_linearized_receipts_for_room""" - def f(txn: LoggingTransaction) -> List[Tuple[str, str, str, str]]: + def f(txn: LoggingTransaction) -> list[tuple[str, str, str, str]]: if from_key: sql = """ SELECT stream_id, instance_name, receipt_type, user_id, event_id, data @@ -466,7 +462,7 @@ def f( txn.execute(sql + clause, [to_key.get_max_stream_pos()] + list(args)) - results: Dict[str, List[ReceiptInRoom]] = {} + results: dict[str, list[ReceiptInRoom]] = {} for ( stream_id, instance_name, @@ -515,7 +511,7 @@ def f( async def get_linearized_receipts_for_events( self, - room_and_event_ids: Collection[Tuple[str, str]], + room_and_event_ids: Collection[tuple[str, str]], ) -> Mapping[str, Sequence[ReceiptInRoom]]: """Get all receipts for the given set of events. @@ -531,8 +527,8 @@ async def get_linearized_receipts_for_events( def get_linearized_receipts_for_events_txn( txn: LoggingTransaction, - room_id_event_id_tuples: Collection[Tuple[str, str]], - ) -> List[Tuple[str, str, str, str, Optional[str], str]]: + room_id_event_id_tuples: Collection[tuple[str, str]], + ) -> list[tuple[str, str, str, str, Optional[str], str]]: clause, args = make_tuple_in_list_sql_clause( self.database_engine, ("room_id", "event_id"), room_id_event_id_tuples ) @@ -548,7 +544,7 @@ def get_linearized_receipts_for_events_txn( return txn.fetchall() # room_id -> receipts - room_to_receipts: Dict[str, List[ReceiptInRoom]] = {} + room_to_receipts: dict[str, list[ReceiptInRoom]] = {} for batch in batch_iter(room_and_event_ids, 1000): batch_results = await self.db_pool.runInteraction( "get_linearized_receipts_for_events", @@ -596,7 +592,7 @@ async def get_linearized_receipts_for_all_rooms( A dictionary of roomids to a list of receipts. """ - def f(txn: LoggingTransaction) -> List[Tuple[str, str, str, str, str]]: + def f(txn: LoggingTransaction) -> list[tuple[str, str, str, str, str]]: if from_key: sql = """ SELECT stream_id, instance_name, room_id, receipt_type, user_id, event_id, data @@ -659,7 +655,7 @@ async def get_linearized_receipts_for_user_in_rooms( def get_linearized_receipts_for_user_in_rooms_txn( txn: LoggingTransaction, batch_room_ids: StrCollection, - ) -> List[Tuple[str, str, str, str, Optional[str], str]]: + ) -> list[tuple[str, str, str, str, Optional[str], str]]: clause, args = make_in_list_sql_clause( self.database_engine, "room_id", batch_room_ids ) @@ -687,7 +683,7 @@ def get_linearized_receipts_for_user_in_rooms_txn( ] # room_id -> receipts - room_to_receipts: Dict[str, List[ReceiptInRoom]] = {} + room_to_receipts: dict[str, list[ReceiptInRoom]] = {} for batch in batch_iter(room_ids, 1000): batch_results = await self.db_pool.runInteraction( "get_linearized_receipts_for_events", @@ -746,7 +742,7 @@ def f(txn: LoggingTransaction, room_ids: StrCollection) -> StrCollection: return [room_id for (room_id,) in txn] - results: List[str] = [] + results: list[str] = [] for batch in batch_iter(room_ids, 1000): batch_result = await self.db_pool.runInteraction( "get_rooms_with_receipts_between", f, batch @@ -757,7 +753,7 @@ def f(txn: LoggingTransaction, room_ids: StrCollection) -> StrCollection: async def get_users_sent_receipts_between( self, last_id: int, current_id: int - ) -> List[str]: + ) -> list[str]: """Get all users who sent receipts between `last_id` exclusive and `current_id` inclusive. @@ -768,7 +764,7 @@ async def get_users_sent_receipts_between( if last_id == current_id: return [] - def _get_users_sent_receipts_between_txn(txn: LoggingTransaction) -> List[str]: + def _get_users_sent_receipts_between_txn(txn: LoggingTransaction) -> list[str]: sql = """ SELECT DISTINCT user_id FROM receipts_linearized WHERE ? < stream_id AND stream_id <= ? @@ -783,8 +779,8 @@ def _get_users_sent_receipts_between_txn(txn: LoggingTransaction) -> List[str]: async def get_all_updated_receipts( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[ - List[Tuple[int, Tuple[str, str, str, str, Optional[str], JsonDict]]], int, bool + ) -> tuple[ + list[tuple[int, tuple[str, str, str, str, Optional[str], JsonDict]]], int, bool ]: """Get updates for receipts replication stream. @@ -812,8 +808,8 @@ async def get_all_updated_receipts( def get_all_updated_receipts_txn( txn: LoggingTransaction, - ) -> Tuple[ - List[Tuple[int, Tuple[str, str, str, str, Optional[str], JsonDict]]], + ) -> tuple[ + list[tuple[int, tuple[str, str, str, str, Optional[str], JsonDict]]], int, bool, ]: @@ -828,7 +824,7 @@ def get_all_updated_receipts_txn( txn.execute(sql, (last_id, current_id, instance_name, limit)) updates = cast( - List[Tuple[int, Tuple[str, str, str, str, Optional[str], JsonDict]]], + list[tuple[int, tuple[str, str, str, str, Optional[str], JsonDict]]], [(r[0], r[1:6] + (db_to_json(r[6]),)) for r in txn], ) @@ -917,7 +913,7 @@ def _insert_linearized_receipt_txn( if stream_ordering is not None: if thread_id is None: thread_clause = "r.thread_id IS NULL" - thread_args: Tuple[str, ...] = () + thread_args: tuple[str, ...] = () else: thread_clause = "r.thread_id = ?" thread_args = (thread_id,) @@ -986,7 +982,7 @@ def _insert_linearized_receipt_txn( return rx_ts def _graph_to_linear( - self, txn: LoggingTransaction, room_id: str, event_ids: List[str] + self, txn: LoggingTransaction, room_id: str, event_ids: list[str] ) -> str: """ Generate a linearized event from a list of events (i.e. a list of forward @@ -1026,7 +1022,7 @@ async def insert_receipt( room_id: str, receipt_type: str, user_id: str, - event_ids: List[str], + event_ids: list[str], thread_id: Optional[str], data: dict, ) -> Optional[PersistedPosition]: @@ -1098,7 +1094,7 @@ async def _insert_graph_receipt( room_id: str, receipt_type: str, user_id: str, - event_ids: List[str], + event_ids: list[str], thread_id: Optional[str], data: JsonDict, ) -> None: @@ -1237,7 +1233,7 @@ def _remote_duplicate_receipts_txn(txn: LoggingTransaction) -> None: HAVING COUNT(*) > 1 """ txn.execute(sql) - duplicate_keys = cast(List[Tuple[int, str, str, str]], list(txn)) + duplicate_keys = cast(list[tuple[int, str, str, str]], list(txn)) # Then remove duplicate receipts, keeping the one with the highest # `stream_id`. Since there might be duplicate rows with the same @@ -1255,7 +1251,7 @@ def _remote_duplicate_receipts_txn(txn: LoggingTransaction) -> None: LIMIT 1 """ txn.execute(sql, (room_id, receipt_type, user_id, stream_id)) - row_id = cast(Tuple[str], txn.fetchone())[0] + row_id = cast(tuple[str], txn.fetchone())[0] sql = f""" DELETE FROM receipts_linearized @@ -1306,7 +1302,7 @@ def _remote_duplicate_receipts_txn(txn: LoggingTransaction) -> None: HAVING COUNT(*) > 1 """ txn.execute(sql) - duplicate_keys = cast(List[Tuple[str, str, str]], list(txn)) + duplicate_keys = cast(list[tuple[str, str, str]], list(txn)) # Then remove all duplicate receipts. # We could be clever and try to keep the latest receipt out of every set of diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py index 906d1a91f6..7ce9bf43e6 100644 --- a/synapse/storage/databases/main/registration.py +++ b/synapse/storage/databases/main/registration.py @@ -22,7 +22,7 @@ import logging import random import re -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, Any, Optional, Union, cast import attr @@ -576,7 +576,7 @@ async def set_renewal_token_for_user( async def get_user_from_renewal_token( self, renewal_token: str - ) -> Tuple[str, int, Optional[int]]: + ) -> tuple[str, int, Optional[int]]: """Get a user ID and renewal status from a renewal token. Args: @@ -592,7 +592,7 @@ async def get_user_from_renewal_token( has not been renewed using the current token yet. """ return cast( - Tuple[str, int, Optional[int]], + tuple[str, int, Optional[int]], await self.db_pool.simple_select_one( table="account_validity", keyvalues={"renewal_token": renewal_token}, @@ -617,7 +617,7 @@ async def get_renewal_token_for_user(self, user_id: str) -> str: desc="get_renewal_token_for_user", ) - async def get_users_expiring_soon(self) -> List[Tuple[str, int]]: + async def get_users_expiring_soon(self) -> list[tuple[str, int]]: """Selects users whose account will expire in the [now, now + renew_at] time window (see configuration for account_validity for information on what renew_at refers to). @@ -628,14 +628,14 @@ async def get_users_expiring_soon(self) -> List[Tuple[str, int]]: def select_users_txn( txn: LoggingTransaction, now_ms: int, renew_at: int - ) -> List[Tuple[str, int]]: + ) -> list[tuple[str, int]]: sql = ( "SELECT user_id, expiration_ts_ms FROM account_validity" " WHERE email_sent = FALSE AND (expiration_ts_ms - ?) <= ?" ) values = [now_ms, renew_at] txn.execute(sql, values) - return cast(List[Tuple[str, int]], txn.fetchall()) + return cast(list[tuple[str, int]], txn.fetchall()) return await self.db_pool.runInteraction( "get_users_expiring_soon", @@ -858,17 +858,17 @@ def is_support_user_txn(self, txn: LoggingTransaction, user_id: str) -> bool: ) return True if res == UserTypes.SUPPORT else False - async def get_users_by_id_case_insensitive(self, user_id: str) -> Dict[str, str]: + async def get_users_by_id_case_insensitive(self, user_id: str) -> dict[str, str]: """Gets users that match user_id case insensitively. Returns: A mapping of user_id -> password_hash. """ - def f(txn: LoggingTransaction) -> Dict[str, str]: + def f(txn: LoggingTransaction) -> dict[str, str]: sql = "SELECT name, password_hash FROM users WHERE lower(name) = lower(?)" txn.execute(sql, (user_id,)) - result = cast(List[Tuple[str, str]], txn.fetchall()) + result = cast(list[tuple[str, str]], txn.fetchall()) return dict(result) return await self.db_pool.runInteraction("get_users_by_id_case_insensitive", f) @@ -978,7 +978,7 @@ async def remove_user_external_id( async def replace_user_external_id( self, - record_external_ids: List[Tuple[str, str]], + record_external_ids: list[tuple[str, str]], user_id: str, ) -> None: """Replace mappings from external user ids to a mxid in a single transaction. @@ -1045,7 +1045,7 @@ async def get_user_by_external_id( desc="get_user_by_external_id", ) - async def get_external_ids_by_user(self, mxid: str) -> List[Tuple[str, str]]: + async def get_external_ids_by_user(self, mxid: str) -> list[tuple[str, str]]: """Look up external ids for the given user Args: @@ -1055,7 +1055,7 @@ async def get_external_ids_by_user(self, mxid: str) -> List[Tuple[str, str]]: Tuples of (auth_provider, external_id) """ return cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="user_external_ids", keyvalues={"user_id": mxid}, @@ -1075,7 +1075,7 @@ def _count_users(txn: LoggingTransaction) -> int: return await self.db_pool.runInteraction("count_users", _count_users) - async def count_daily_user_type(self) -> Dict[str, int]: + async def count_daily_user_type(self) -> dict[str, int]: """ Counts 1) native non guest users 2) native guests users @@ -1083,7 +1083,7 @@ async def count_daily_user_type(self) -> Dict[str, int]: who registered on the homeserver in the past 24 hours """ - def _count_daily_user_type(txn: LoggingTransaction) -> Dict[str, int]: + def _count_daily_user_type(txn: LoggingTransaction) -> dict[str, int]: yesterday = int(self.clock.time()) - (60 * 60 * 24) sql = """ @@ -1116,7 +1116,7 @@ def _count_users(txn: LoggingTransaction) -> int: WHERE appservice_id IS NULL """ ) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction("count_users", _count_users) @@ -1196,9 +1196,9 @@ async def user_add_threepid( {"user_id": user_id, "validated_at": validated_at, "added_at": added_at}, ) - async def user_get_threepids(self, user_id: str) -> List[ThreepidResult]: + async def user_get_threepids(self, user_id: str) -> list[ThreepidResult]: results = cast( - List[Tuple[str, str, int, int]], + list[tuple[str, str, int, int]], await self.db_pool.simple_select_list( "user_threepids", keyvalues={"user_id": user_id}, @@ -1253,7 +1253,7 @@ async def add_user_bound_threepid( desc="add_user_bound_threepid", ) - async def user_get_bound_threepids(self, user_id: str) -> List[Tuple[str, str]]: + async def user_get_bound_threepids(self, user_id: str) -> list[tuple[str, str]]: """Get the threepids that a user has bound to an identity server through the homeserver The homeserver remembers where binds to an identity server occurred. Using this method can retrieve those threepids. @@ -1267,7 +1267,7 @@ async def user_get_bound_threepids(self, user_id: str) -> List[Tuple[str, str]]: address: The address of the threepid (e.g "bob@example.com") """ return cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="user_threepid_id_server", keyvalues={"user_id": user_id}, @@ -1302,7 +1302,7 @@ async def remove_user_bound_threepid( async def get_id_servers_user_bound( self, user_id: str, medium: str, address: str - ) -> List[str]: + ) -> list[str]: """Get the list of identity servers that the server proxied bind requests to for given user and threepid @@ -1686,7 +1686,7 @@ async def use_registration_token(self, token: str) -> None: """ def _use_registration_token_txn(txn: LoggingTransaction) -> None: - # Normally, res is Optional[Dict[str, Any]]. + # Normally, res is Optional[dict[str, Any]]. # Override type because the return type is only optional if # allow_none is True, and we don't want mypy throwing errors # about None not being indexable. @@ -1716,7 +1716,7 @@ def _use_registration_token_txn(txn: LoggingTransaction) -> None: async def get_registration_tokens( self, valid: Optional[bool] = None - ) -> List[Tuple[str, Optional[int], int, int, Optional[int]]]: + ) -> list[tuple[str, Optional[int], int, int, Optional[int]]]: """List all registration tokens. Used by the admin API. Args: @@ -1735,7 +1735,7 @@ async def get_registration_tokens( def select_registration_tokens_txn( txn: LoggingTransaction, now: int, valid: Optional[bool] - ) -> List[Tuple[str, Optional[int], int, int, Optional[int]]]: + ) -> list[tuple[str, Optional[int], int, int, Optional[int]]]: if valid is None: # Return all tokens regardless of validity txn.execute( @@ -1765,7 +1765,7 @@ def select_registration_tokens_txn( txn.execute(sql, [now]) return cast( - List[Tuple[str, Optional[int], int, int, Optional[int]]], txn.fetchall() + list[tuple[str, Optional[int], int, int, Optional[int]]], txn.fetchall() ) return await self.db_pool.runInteraction( @@ -1775,7 +1775,7 @@ def select_registration_tokens_txn( valid, ) - async def get_one_registration_token(self, token: str) -> Optional[Dict[str, Any]]: + async def get_one_registration_token(self, token: str) -> Optional[dict[str, Any]]: """Get info about the given registration token. Used by the admin API. Args: @@ -1892,8 +1892,8 @@ def _create_registration_token_txn(txn: LoggingTransaction) -> bool: ) async def update_registration_token( - self, token: str, updatevalues: Dict[str, Optional[int]] - ) -> Optional[Dict[str, Any]]: + self, token: str, updatevalues: dict[str, Optional[int]] + ) -> Optional[dict[str, Any]]: """Update a registration token. Used by the admin API. Args: @@ -1909,7 +1909,7 @@ async def update_registration_token( def _update_registration_token_txn( txn: LoggingTransaction, - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: try: self.db_pool.simple_update_one_txn( txn, @@ -2457,7 +2457,7 @@ async def user_delete_access_tokens( user_id: str, except_token_id: Optional[int] = None, device_id: Optional[str] = None, - ) -> List[Tuple[str, int, Optional[str]]]: + ) -> list[tuple[str, int, Optional[str]]]: """ Invalidate access and refresh tokens belonging to a user @@ -2471,14 +2471,14 @@ async def user_delete_access_tokens( A tuple of (token, token id, device id) for each of the deleted tokens """ - def f(txn: LoggingTransaction) -> List[Tuple[str, int, Optional[str]]]: + def f(txn: LoggingTransaction) -> list[tuple[str, int, Optional[str]]]: keyvalues = {"user_id": user_id} if device_id is not None: keyvalues["device_id"] = device_id items = keyvalues.items() where_clause = " AND ".join(k + " = ?" for k, _ in items) - values: List[Union[str, int]] = [v for _, v in items] + values: list[Union[str, int]] = [v for _, v in items] # Conveniently, refresh_tokens and access_tokens both use the user_id and device_id fields. Only caveat # is the `except_token_id` param that is tricky to get right, so for now we're just using the same where # clause and values before we handle that. This seems to be only used in the "set password" handler. @@ -2517,7 +2517,7 @@ async def user_delete_access_tokens_for_devices( self, user_id: str, device_ids: StrCollection, - ) -> List[Tuple[str, int, Optional[str]]]: + ) -> list[tuple[str, int, Optional[str]]]: """ Invalidate access and refresh tokens belonging to a user @@ -2530,7 +2530,7 @@ async def user_delete_access_tokens_for_devices( def user_delete_access_tokens_for_devices_txn( txn: LoggingTransaction, batch_device_ids: StrCollection - ) -> List[Tuple[str, int, Optional[str]]]: + ) -> list[tuple[str, int, Optional[str]]]: self.db_pool.simple_delete_many_txn( txn, table="refresh_tokens", @@ -2686,7 +2686,7 @@ async def _background_update_set_deactivated_flag( def _background_update_set_deactivated_flag_txn( txn: LoggingTransaction, - ) -> Tuple[bool, int]: + ) -> tuple[bool, int]: txn.execute( """ SELECT diff --git a/synapse/storage/databases/main/relations.py b/synapse/storage/databases/main/relations.py index ea746e0511..529102c245 100644 --- a/synapse/storage/databases/main/relations.py +++ b/synapse/storage/databases/main/relations.py @@ -22,15 +22,10 @@ from typing import ( TYPE_CHECKING, Collection, - Dict, - FrozenSet, Iterable, - List, Mapping, Optional, Sequence, - Set, - Tuple, Union, cast, ) @@ -179,7 +174,7 @@ async def get_relations_for_event( from_token: Optional[StreamToken] = None, to_token: Optional[StreamToken] = None, recurse: bool = False, - ) -> Tuple[Sequence[_RelatedEvent], Optional[StreamToken]]: + ) -> tuple[Sequence[_RelatedEvent], Optional[StreamToken]]: """Get a list of relations for an event, ordered by topological ordering. Args: @@ -209,7 +204,7 @@ async def get_relations_for_event( assert limit >= 0 where_clause = ["room_id = ?"] - where_args: List[Union[str, int]] = [room_id] + where_args: list[Union[str, int]] = [room_id] is_redacted = event.internal_metadata.is_redacted() if relation_type is not None: @@ -281,14 +276,14 @@ async def get_relations_for_event( def _get_recent_references_for_event_txn( txn: LoggingTransaction, - ) -> Tuple[List[_RelatedEvent], Optional[StreamToken]]: + ) -> tuple[list[_RelatedEvent], Optional[StreamToken]]: txn.execute(sql, [event.event_id] + where_args + [limit + 1]) events = [] - topo_orderings: List[int] = [] - stream_orderings: List[int] = [] + topo_orderings: list[int] = [] + stream_orderings: list[int] = [] for event_id, relation_type, sender, topo_ordering, stream_ordering in cast( - List[Tuple[str, str, str, int, int]], txn + list[tuple[str, str, str, int, int]], txn ): # Do not include edits for redacted events as they leak event # content. @@ -329,8 +324,8 @@ def _get_recent_references_for_event_txn( async def get_all_relations_for_event_with_types( self, event_id: str, - relation_types: List[str], - ) -> List[str]: + relation_types: list[str], + ) -> list[str]: """Get the event IDs of all events that have a relation to the given event with one of the given relation types. @@ -345,9 +340,9 @@ async def get_all_relations_for_event_with_types( def get_all_relation_ids_for_event_with_types_txn( txn: LoggingTransaction, - ) -> List[str]: + ) -> list[str]: rows = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_many_txn( txn=txn, table="event_relations", @@ -368,7 +363,7 @@ def get_all_relation_ids_for_event_with_types_txn( async def get_all_relations_for_event( self, event_id: str, - ) -> List[str]: + ) -> list[str]: """Get the event IDs of all events that have a relation to the given event. Args: @@ -380,9 +375,9 @@ async def get_all_relations_for_event( def get_all_relation_ids_for_event_txn( txn: LoggingTransaction, - ) -> List[str]: + ) -> list[str]: rows = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_list_txn( txn=txn, table="event_relations", @@ -462,7 +457,7 @@ async def event_is_target_of_relation(self, parent_id: str) -> bool: return result is not None @cached() # type: ignore[synapse-@cached-mutable] - async def get_references_for_event(self, event_id: str) -> List[JsonDict]: + async def get_references_for_event(self, event_id: str) -> list[JsonDict]: raise NotImplementedError() @cachedList(cached_method_name="get_references_for_event", list_name="event_ids") @@ -498,12 +493,12 @@ async def get_references_for_events( def _get_references_for_events_txn( txn: LoggingTransaction, - ) -> Mapping[str, List[_RelatedEvent]]: + ) -> Mapping[str, list[_RelatedEvent]]: txn.execute(sql, args) - result: Dict[str, List[_RelatedEvent]] = {} + result: dict[str, list[_RelatedEvent]] = {} for relates_to_id, event_id, sender in cast( - List[Tuple[str, str, str]], txn + list[tuple[str, str, str]], txn ): result.setdefault(relates_to_id, []).append( _RelatedEvent(event_id, sender) @@ -578,14 +573,14 @@ async def get_applicable_edits( ORDER by edit.origin_server_ts, edit.event_id """ - def _get_applicable_edits_txn(txn: LoggingTransaction) -> Dict[str, str]: + def _get_applicable_edits_txn(txn: LoggingTransaction) -> dict[str, str]: clause, args = make_in_list_sql_clause( txn.database_engine, "relates_to_id", event_ids ) args.append(RelationTypes.REPLACE) txn.execute(sql % (clause,), args) - return dict(cast(Iterable[Tuple[str, str]], txn.fetchall())) + return dict(cast(Iterable[tuple[str, str]], txn.fetchall())) edit_ids = await self.db_pool.runInteraction( "get_applicable_edits", _get_applicable_edits_txn @@ -603,14 +598,14 @@ def _get_applicable_edits_txn(txn: LoggingTransaction) -> Dict[str, str]: } @cached() # type: ignore[synapse-@cached-mutable] - def get_thread_summary(self, event_id: str) -> Optional[Tuple[int, EventBase]]: + def get_thread_summary(self, event_id: str) -> Optional[tuple[int, EventBase]]: raise NotImplementedError() # TODO: This returns a mutable object, which is generally bad. @cachedList(cached_method_name="get_thread_summary", list_name="event_ids") # type: ignore[synapse-@cached-mutable] async def get_thread_summaries( self, event_ids: Collection[str] - ) -> Mapping[str, Optional[Tuple[int, EventBase]]]: + ) -> Mapping[str, Optional[tuple[int, EventBase]]]: """Get the number of threaded replies and the latest reply (if any) for the given events. Args: @@ -627,7 +622,7 @@ async def get_thread_summaries( def _get_thread_summaries_txn( txn: LoggingTransaction, - ) -> Tuple[Dict[str, int], Dict[str, str]]: + ) -> tuple[dict[str, int], dict[str, str]]: # Fetch the count of threaded events and the latest event ID. # TODO Should this only allow m.room.message events. if isinstance(self.database_engine, PostgresEngine): @@ -698,7 +693,7 @@ def _get_thread_summaries_txn( args.append(RelationTypes.THREAD) txn.execute(sql % (clause,), args) - counts = dict(cast(List[Tuple[str, int]], txn.fetchall())) + counts = dict(cast(list[tuple[str, int]], txn.fetchall())) return counts, latest_event_ids @@ -726,8 +721,8 @@ def _get_thread_summaries_txn( async def get_threaded_messages_per_user( self, event_ids: Collection[str], - users: FrozenSet[str] = frozenset(), - ) -> Dict[Tuple[str, str], int]: + users: frozenset[str] = frozenset(), + ) -> dict[tuple[str, str], int]: """Get the number of threaded replies for a set of users. This is used, in conjunction with get_thread_summaries, to calculate an @@ -759,7 +754,7 @@ async def get_threaded_messages_per_user( def _get_threaded_messages_per_user_txn( txn: LoggingTransaction, - ) -> Dict[Tuple[str, str], int]: + ) -> dict[tuple[str, str], int]: users_sql, users_args = make_in_list_sql_clause( self.database_engine, "child.sender", users ) @@ -799,7 +794,7 @@ async def get_threads_participated( user participated in that event's thread, otherwise false. """ - def _get_threads_participated_txn(txn: LoggingTransaction) -> Set[str]: + def _get_threads_participated_txn(txn: LoggingTransaction) -> set[str]: # Fetch whether the requester has participated or not. sql = """ SELECT DISTINCT relates_to_id @@ -830,10 +825,10 @@ def _get_threads_participated_txn(txn: LoggingTransaction) -> Set[str]: async def events_have_relations( self, - parent_ids: List[str], - relation_senders: Optional[List[str]], - relation_types: Optional[List[str]], - ) -> List[str]: + parent_ids: list[str], + relation_senders: Optional[list[str]], + relation_types: Optional[list[str]], + ) -> list[str]: """Check which events have a relationship from the given senders of the given types. @@ -856,8 +851,8 @@ async def events_have_relations( %s; """ - def _get_if_events_have_relations(txn: LoggingTransaction) -> List[str]: - clauses: List[str] = [] + def _get_if_events_have_relations(txn: LoggingTransaction) -> list[str]: + clauses: list[str] = [] clause, args = make_in_list_sql_clause( txn.database_engine, "relates_to_id", parent_ids ) @@ -936,7 +931,7 @@ async def get_threads( room_id: str, limit: int = 5, from_token: Optional[ThreadsNextBatch] = None, - ) -> Tuple[Sequence[str], Optional[ThreadsNextBatch]]: + ) -> tuple[Sequence[str], Optional[ThreadsNextBatch]]: """Get a list of thread IDs, ordered by topological ordering of their latest reply. @@ -976,10 +971,10 @@ async def get_threads( def _get_threads_txn( txn: LoggingTransaction, - ) -> Tuple[List[str], Optional[ThreadsNextBatch]]: + ) -> tuple[list[str], Optional[ThreadsNextBatch]]: txn.execute(sql, (room_id, *pagination_args, limit + 1)) - rows = cast(List[Tuple[str, int, int]], txn.fetchall()) + rows = cast(list[tuple[str, int, int]], txn.fetchall()) thread_ids = [r[0] for r in rows] # If there are more events, generate the next pagination key from the diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py index 9f03c084a5..7a294de558 100644 --- a/synapse/storage/databases/main/room.py +++ b/synapse/storage/databases/main/room.py @@ -27,12 +27,8 @@ AbstractSet, Any, Collection, - Dict, - List, Mapping, Optional, - Set, - Tuple, Union, cast, ) @@ -139,7 +135,7 @@ class RoomSortOrder(Enum): @attr.s(slots=True, frozen=True, auto_attribs=True) class PartialStateResyncInfo: joined_via: Optional[str] - servers_in_room: Set[str] = attr.ib(factory=set) + servers_in_room: set[str] = attr.ib(factory=set) class RoomWorkerStore(CacheInvalidationWorkerStore): @@ -209,7 +205,7 @@ async def store_room( logger.error("store_room with room_id=%s failed: %s", room_id, e) raise StoreError(500, "Problem creating room.") - async def get_room(self, room_id: str) -> Optional[Tuple[bool, bool]]: + async def get_room(self, room_id: str) -> Optional[tuple[bool, bool]]: """Retrieve a room. Args: @@ -222,7 +218,7 @@ async def get_room(self, room_id: str) -> Optional[Tuple[bool, bool]]: or None if the room is unknown. """ row = cast( - Optional[Tuple[Optional[Union[int, bool]], Optional[Union[int, bool]]]], + Optional[tuple[Optional[Union[int, bool]], Optional[Union[int, bool]]]], await self.db_pool.simple_select_one( table="rooms", keyvalues={"room_id": room_id}, @@ -287,7 +283,7 @@ def get_room_with_stats_txn( "get_room_with_stats", get_room_with_stats_txn, room_id ) - async def get_public_room_ids(self) -> List[str]: + async def get_public_room_ids(self) -> list[str]: return await self.db_pool.simple_select_onecol( table="rooms", keyvalues={"is_public": True}, @@ -296,8 +292,8 @@ async def get_public_room_ids(self) -> List[str]: ) def _construct_room_type_where_clause( - self, room_types: Union[List[Union[str, None]], None] - ) -> Tuple[Union[str, None], list]: + self, room_types: Union[list[Union[str, None]], None] + ) -> tuple[Union[str, None], list]: if not room_types: return None, [] @@ -387,7 +383,7 @@ def _count_public_rooms_txn(txn: LoggingTransaction) -> int: """ txn.execute(sql, query_args) - return cast(Tuple[int], txn.fetchone())[0] + return cast(tuple[int], txn.fetchone())[0] return await self.db_pool.runInteraction( "count_public_rooms", _count_public_rooms_txn @@ -399,7 +395,7 @@ async def get_room_count(self) -> int: def f(txn: LoggingTransaction) -> int: sql = "SELECT count(*) FROM rooms" txn.execute(sql) - row = cast(Tuple[int], txn.fetchone()) + row = cast(tuple[int], txn.fetchone()) return row[0] return await self.db_pool.runInteraction("get_rooms", f) @@ -409,10 +405,10 @@ async def get_largest_public_rooms( network_tuple: Optional[ThirdPartyInstanceID], search_filter: Optional[dict], limit: Optional[int], - bounds: Optional[Tuple[int, str]], + bounds: Optional[tuple[int, str]], forwards: bool, ignore_non_federatable: bool = False, - ) -> List[LargestRoomStats]: + ) -> list[LargestRoomStats]: """Gets the largest public rooms (where largest is in terms of joined members, as tracked in the statistics table). @@ -433,7 +429,7 @@ async def get_largest_public_rooms( """ where_clauses = [] - query_args: List[Union[str, int]] = [] + query_args: list[Union[str, int]] = [] if network_tuple: if network_tuple.appservice_id: @@ -549,7 +545,7 @@ async def get_largest_public_rooms( def _get_largest_public_rooms_txn( txn: LoggingTransaction, - ) -> List[LargestRoomStats]: + ) -> list[LargestRoomStats]: txn.execute(sql, query_args) results = [ @@ -611,7 +607,7 @@ async def get_rooms_paginate( search_term: Optional[str], public_rooms: Optional[bool], empty_rooms: Optional[bool], - ) -> Tuple[List[Dict[str, Any]], int]: + ) -> tuple[list[dict[str, Any]], int]: """Function to retrieve a paginated list of rooms as json. Args: @@ -760,7 +756,7 @@ async def get_rooms_paginate( def _get_rooms_paginate_txn( txn: LoggingTransaction, - ) -> Tuple[List[Dict[str, Any]], int]: + ) -> tuple[list[dict[str, Any]], int]: # Add the search term into the WHERE clause # and execute the data query txn.execute(info_sql, where_args + [limit, start]) @@ -795,7 +791,7 @@ def _get_rooms_paginate_txn( # Add the search term into the WHERE clause if present txn.execute(count_sql, where_args) - room_count = cast(Tuple[int], txn.fetchone()) + room_count = cast(tuple[int], txn.fetchone()) return rooms, room_count[0] return await self.db_pool.runInteraction( @@ -909,7 +905,7 @@ async def get_retention_policy_for_room(self, room_id: str) -> RetentionPolicy: def get_retention_policy_for_room_txn( txn: LoggingTransaction, - ) -> Optional[Tuple[Optional[int], Optional[int]]]: + ) -> Optional[tuple[Optional[int], Optional[int]]]: txn.execute( """ SELECT min_lifetime, max_lifetime FROM room_retention @@ -919,7 +915,7 @@ def get_retention_policy_for_room_txn( (room_id,), ) - return cast(Optional[Tuple[Optional[int], Optional[int]]], txn.fetchone()) + return cast(Optional[tuple[Optional[int], Optional[int]]], txn.fetchone()) ret = await self.db_pool.runInteraction( "get_retention_policy_for_room", @@ -951,7 +947,7 @@ def get_retention_policy_for_room_txn( max_lifetime=max_lifetime, ) - async def get_media_mxcs_in_room(self, room_id: str) -> Tuple[List[str], List[str]]: + async def get_media_mxcs_in_room(self, room_id: str) -> tuple[list[str], list[str]]: """Retrieves all the local and remote media MXC URIs in a given room Args: @@ -963,7 +959,7 @@ async def get_media_mxcs_in_room(self, room_id: str) -> Tuple[List[str], List[st def _get_media_mxcs_in_room_txn( txn: LoggingTransaction, - ) -> Tuple[List[str], List[str]]: + ) -> tuple[list[str], list[str]]: local_mxcs, remote_mxcs = self._get_media_mxcs_in_room_txn(txn, room_id) local_media_mxcs = [] remote_media_mxcs = [] @@ -1001,7 +997,7 @@ def _quarantine_media_in_room_txn(txn: LoggingTransaction) -> int: def _get_media_mxcs_in_room_txn( self, txn: LoggingTransaction, room_id: str - ) -> Tuple[List[str], List[Tuple[str, str]]]: + ) -> tuple[list[str], list[tuple[str, str]]]: """Retrieves all the local and remote media MXC URIs in a given room Returns: @@ -1107,7 +1103,7 @@ def _quarantine_media_by_user_txn(txn: LoggingTransaction) -> int: def _get_media_ids_by_user_txn( self, txn: LoggingTransaction, user_id: str, filter_quarantined: bool = True - ) -> List[str]: + ) -> list[str]: """Retrieves local media IDs by a given user Args: @@ -1137,8 +1133,8 @@ def _get_media_ids_by_user_txn( def _quarantine_local_media_txn( self, txn: LoggingTransaction, - hashes: Set[str], - media_ids: Set[str], + hashes: set[str], + media_ids: set[str], quarantined_by: Optional[str], ) -> int: """Quarantine and unquarantine local media items. @@ -1192,8 +1188,8 @@ def _quarantine_local_media_txn( def _quarantine_remote_media_txn( self, txn: LoggingTransaction, - hashes: Set[str], - media: Set[Tuple[str, str]], + hashes: set[str], + media: set[tuple[str, str]], quarantined_by: Optional[str], ) -> int: """Quarantine and unquarantine remote items @@ -1240,8 +1236,8 @@ def _quarantine_remote_media_txn( def _quarantine_media_txn( self, txn: LoggingTransaction, - local_mxcs: List[str], - remote_mxcs: List[Tuple[str, str]], + local_mxcs: list[str], + remote_mxcs: list[tuple[str, str]], quarantined_by: Optional[str], ) -> int: """Quarantine and unquarantine local and remote media items @@ -1346,7 +1342,7 @@ async def unblock_room(self, room_id: str) -> None: async def get_rooms_for_retention_period_in_range( self, min_ms: Optional[int], max_ms: Optional[int], include_null: bool = False - ) -> Dict[str, RetentionPolicy]: + ) -> dict[str, RetentionPolicy]: """Retrieves all of the rooms within the given retention range. Optionally includes the rooms which don't have a retention policy. @@ -1368,7 +1364,7 @@ async def get_rooms_for_retention_period_in_range( def get_rooms_for_retention_period_in_range_txn( txn: LoggingTransaction, - ) -> Dict[str, RetentionPolicy]: + ) -> dict[str, RetentionPolicy]: range_conditions = [] args = [] @@ -1464,10 +1460,10 @@ async def get_partial_state_room_resync_info( A dictionary of rooms with partial state, with room IDs as keys and lists of servers in rooms as values. """ - room_servers: Dict[str, PartialStateResyncInfo] = {} + room_servers: dict[str, PartialStateResyncInfo] = {} rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="partial_state_rooms", keyvalues={}, @@ -1480,7 +1476,7 @@ async def get_partial_state_room_resync_info( room_servers[room_id] = PartialStateResyncInfo(joined_via=joined_via) rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( "partial_state_rooms_servers", keyvalues=None, @@ -1533,7 +1529,7 @@ async def is_partial_state_room_batched( """ rows = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_many_batch( table="partial_state_rooms", column="room_id", @@ -1571,7 +1567,7 @@ def _get_partial_rooms_for_user_txn( async def get_join_event_id_and_device_lists_stream_id_for_partial_state( self, room_id: str - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Get the event ID of the initial join that started the partial join, and the device list stream ID at the point we started the partial join. @@ -1583,7 +1579,7 @@ async def get_join_event_id_and_device_lists_stream_id_for_partial_state( """ return cast( - Tuple[str, int], + tuple[str, int], await self.db_pool.simple_select_one( table="partial_state_rooms", keyvalues={"room_id": room_id}, @@ -1602,7 +1598,7 @@ def get_un_partial_stated_rooms_id_generator(self) -> MultiWriterIdGenerator: async def get_un_partial_stated_rooms_between( self, last_id: int, current_id: int, room_ids: Collection[str] - ) -> Set[str]: + ) -> set[str]: """Get all rooms that got un partial stated between `last_id` exclusive and `current_id` inclusive. @@ -1615,7 +1611,7 @@ async def get_un_partial_stated_rooms_between( def _get_un_partial_stated_rooms_between_txn( txn: LoggingTransaction, - ) -> Set[str]: + ) -> set[str]: sql = """ SELECT DISTINCT room_id FROM un_partial_stated_room_stream WHERE ? < stream_id AND stream_id <= ? AND @@ -1636,7 +1632,7 @@ def _get_un_partial_stated_rooms_between_txn( async def get_un_partial_stated_rooms_from_stream( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, Tuple[str]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str]]], int, bool]: """Get updates for un partial stated rooms replication stream. Args: @@ -1663,7 +1659,7 @@ async def get_un_partial_stated_rooms_from_stream( def get_un_partial_stated_rooms_from_stream_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, Tuple[str]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str]]], int, bool]: sql = """ SELECT stream_id, room_id FROM un_partial_stated_room_stream @@ -1686,7 +1682,7 @@ def get_un_partial_stated_rooms_from_stream_txn( get_un_partial_stated_rooms_from_stream_txn, ) - async def get_event_report(self, report_id: int) -> Optional[Dict[str, Any]]: + async def get_event_report(self, report_id: int) -> Optional[dict[str, Any]]: """Retrieve an event report Args: @@ -1698,7 +1694,7 @@ async def get_event_report(self, report_id: int) -> Optional[Dict[str, Any]]: def _get_event_report_txn( txn: LoggingTransaction, report_id: int - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: sql = """ SELECT er.id, @@ -1755,7 +1751,7 @@ async def get_event_reports_paginate( user_id: Optional[str] = None, room_id: Optional[str] = None, event_sender_user_id: Optional[str] = None, - ) -> Tuple[List[Dict[str, Any]], int]: + ) -> tuple[list[dict[str, Any]], int]: """Retrieve a paginated list of event reports Args: @@ -1775,9 +1771,9 @@ async def get_event_reports_paginate( def _get_event_reports_paginate_txn( txn: LoggingTransaction, - ) -> Tuple[List[Dict[str, Any]], int]: + ) -> tuple[list[dict[str, Any]], int]: filters = [] - args: List[object] = [] + args: list[object] = [] if user_id: filters.append("er.user_id LIKE ?") @@ -1810,7 +1806,7 @@ def _get_event_reports_paginate_txn( {} """.format(where_clause) txn.execute(sql, args) - count = cast(Tuple[int], txn.fetchone())[0] + count = cast(tuple[int], txn.fetchone())[0] sql = """ SELECT @@ -2214,7 +2210,7 @@ async def _remove_tombstoned_rooms_from_directory( last_room = progress.get("room_id", "") - def _get_rooms(txn: LoggingTransaction) -> List[str]: + def _get_rooms(txn: LoggingTransaction) -> list[str]: txn.execute( """ SELECT room_id @@ -2460,7 +2456,7 @@ def __init__( self._instance_name = hs.get_instance_name() async def upsert_room_on_join( - self, room_id: str, room_version: RoomVersion, state_events: List[EventBase] + self, room_id: str, room_version: RoomVersion, state_events: list[EventBase] ) -> None: """Ensure that the room is stored in the table diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index 65caf4b1ea..1e22ab4e6d 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -24,15 +24,10 @@ TYPE_CHECKING, AbstractSet, Collection, - Dict, - FrozenSet, Iterable, - List, Mapping, Optional, Sequence, - Set, - Tuple, Union, cast, ) @@ -187,7 +182,7 @@ async def get_users_in_room(self, room_id: str) -> Sequence[str]: desc="get_users_in_room", ) - def get_users_in_room_txn(self, txn: LoggingTransaction, room_id: str) -> List[str]: + def get_users_in_room_txn(self, txn: LoggingTransaction, room_id: str) -> list[str]: """Returns a list of users in the room.""" return self.db_pool.simple_select_onecol_txn( @@ -242,7 +237,7 @@ async def get_subset_users_in_room_with_profiles( def _get_subset_users_in_room_with_profiles( txn: LoggingTransaction, - ) -> Dict[str, ProfileInfo]: + ) -> dict[str, ProfileInfo]: clause, ids = make_in_list_sql_clause( self.database_engine, "c.state_key", user_ids ) @@ -287,7 +282,7 @@ async def get_users_in_room_with_profiles( def _get_users_in_room_with_profiles( txn: LoggingTransaction, - ) -> Dict[str, ProfileInfo]: + ) -> dict[str, ProfileInfo]: sql = """ SELECT state_key, display_name, avatar_url FROM room_memberships as m INNER JOIN current_state_events as c @@ -328,14 +323,14 @@ async def get_room_summary(self, room_id: str) -> Mapping[str, MemberSummary]: def _get_room_summary_txn( txn: LoggingTransaction, - ) -> Dict[str, MemberSummary]: + ) -> dict[str, MemberSummary]: # first get counts. # We do this all in one transaction to keep the cache small. # FIXME: get rid of this when we have room_stats counts = self._get_member_counts_txn(txn, room_id) - res: Dict[str, MemberSummary] = {} + res: dict[str, MemberSummary] = {} for membership, count in counts.items(): res.setdefault(membership, MemberSummary([], count)) @@ -392,7 +387,7 @@ async def get_member_counts(self, room_id: str) -> Mapping[str, int]: def _get_member_counts_txn( self, txn: LoggingTransaction, room_id: str - ) -> Dict[str, int]: + ) -> dict[str, int]: """Get a mapping of number of users by membership""" # Note, rejected events will have a null membership field, so @@ -473,7 +468,7 @@ async def get_rooms_for_local_user_where_membership_is( user_id: str, membership_list: Collection[str], excluded_rooms: StrCollection = (), - ) -> List[RoomsForUser]: + ) -> list[RoomsForUser]: """Get all the rooms for this *local* user where the membership for this user matches one in the membership list. @@ -536,8 +531,8 @@ def _get_rooms_for_local_user_where_membership_is_txn( self, txn: LoggingTransaction, user_id: str, - membership_list: List[str], - ) -> List[RoomsForUser]: + membership_list: list[str], + ) -> list[RoomsForUser]: """Get all the rooms for this *local* user where the membership for this user matches one in the membership list. @@ -603,12 +598,12 @@ async def get_local_users_in_room(self, room_id: str) -> Sequence[str]: async def get_local_users_related_to_room( self, room_id: str - ) -> List[Tuple[str, str]]: + ) -> list[tuple[str, str]]: """ Retrieves a list of the current roommembers who are local to the server and their membership status. """ return cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="local_current_membership", keyvalues={"room_id": room_id}, @@ -660,7 +655,7 @@ async def is_server_notice_room(self, room_id: str) -> bool: async def get_local_current_membership_for_user_in_room( self, user_id: str, room_id: str - ) -> Tuple[Optional[str], Optional[str]]: + ) -> tuple[Optional[str], Optional[str]]: """Retrieve the current local membership state and event ID for a user in a room. Args: @@ -677,7 +672,7 @@ async def get_local_current_membership_for_user_in_room( raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.BAD_JSON) results = cast( - Optional[Tuple[str, str]], + Optional[tuple[str, str]], await self.db_pool.simple_select_one( "local_current_membership", {"room_id": room_id, "user_id": user_id}, @@ -693,7 +688,7 @@ async def get_local_current_membership_for_user_in_room( async def get_users_server_still_shares_room_with( self, user_ids: Collection[str] - ) -> Set[str]: + ) -> set[str]: """Given a list of users return the set that the server still share a room with. """ @@ -711,7 +706,7 @@ def get_users_server_still_shares_room_with_txn( self, txn: LoggingTransaction, user_ids: Collection[str], - ) -> Set[str]: + ) -> set[str]: if not user_ids: return set() @@ -734,7 +729,7 @@ def get_users_server_still_shares_room_with_txn( async def get_rooms_user_currently_banned_from( self, user_id: str - ) -> FrozenSet[str]: + ) -> frozenset[str]: """Returns a set of room_ids the user is currently banned from. If a remote user only returns rooms this server is currently @@ -754,7 +749,7 @@ async def get_rooms_user_currently_banned_from( return frozenset(room_ids) @cached(max_entries=500000, iterable=True) - async def get_rooms_for_user(self, user_id: str) -> FrozenSet[str]: + async def get_rooms_for_user(self, user_id: str) -> frozenset[str]: """Returns a set of room_ids the user is currently joined to. If a remote user only returns rooms this server is currently @@ -780,7 +775,7 @@ async def get_rooms_for_user(self, user_id: str) -> FrozenSet[str]: ) async def _get_rooms_for_users( self, user_ids: Collection[str] - ) -> Mapping[str, FrozenSet[str]]: + ) -> Mapping[str, frozenset[str]]: """A batched version of `get_rooms_for_user`. Returns: @@ -788,7 +783,7 @@ async def _get_rooms_for_users( """ rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_many_batch( table="current_state_events", column="state_key", @@ -805,7 +800,7 @@ async def _get_rooms_for_users( ), ) - user_rooms: Dict[str, Set[str]] = {user_id: set() for user_id in user_ids} + user_rooms: dict[str, set[str]] = {user_id: set() for user_id in user_ids} for state_key, room_id in rows: user_rooms[state_key].add(room_id) @@ -814,11 +809,11 @@ async def _get_rooms_for_users( async def get_rooms_for_users( self, user_ids: Collection[str] - ) -> Dict[str, FrozenSet[str]]: + ) -> dict[str, frozenset[str]]: """A batched wrapper around `_get_rooms_for_users`, to prevent locking other calls to `get_rooms_for_user` for large user lists. """ - all_user_rooms: Dict[str, FrozenSet[str]] = {} + all_user_rooms: dict[str, frozenset[str]] = {} # 250 users is pretty arbitrary but the data can be quite large if users # are in many rooms. @@ -848,7 +843,7 @@ async def _do_users_share_a_room( def do_users_share_a_room_txn( txn: LoggingTransaction, user_ids: Collection[str] - ) -> Dict[str, bool]: + ) -> dict[str, bool]: clause, args = make_in_list_sql_clause( self.database_engine, "state_key", user_ids ) @@ -882,7 +877,7 @@ def do_users_share_a_room_txn( async def do_users_share_a_room( self, user_id: str, other_user_ids: Collection[str] - ) -> Set[str]: + ) -> set[str]: """Return the set of users who share a room with the first users""" user_dict = await self._do_users_share_a_room(user_id, other_user_ids) @@ -911,7 +906,7 @@ async def _do_users_share_a_room_joined_or_invited( def do_users_share_a_room_joined_or_invited_txn( txn: LoggingTransaction, user_ids: Collection[str] - ) -> Dict[str, bool]: + ) -> dict[str, bool]: clause, args = make_in_list_sql_clause( self.database_engine, "state_key", user_ids ) @@ -947,7 +942,7 @@ def do_users_share_a_room_joined_or_invited_txn( async def do_users_share_a_room_joined_or_invited( self, user_id: str, other_user_ids: Collection[str] - ) -> Set[str]: + ) -> set[str]: """Return the set of users who share a room with the first users via being either joined or invited""" user_dict = await self._do_users_share_a_room_joined_or_invited( @@ -956,11 +951,11 @@ async def do_users_share_a_room_joined_or_invited( return {u for u, share_room in user_dict.items() if share_room} - async def get_users_who_share_room_with_user(self, user_id: str) -> Set[str]: + async def get_users_who_share_room_with_user(self, user_id: str) -> set[str]: """Returns the set of users who share a room with `user_id`""" room_ids = await self.get_rooms_for_user(user_id) - user_who_share_room: Set[str] = set() + user_who_share_room: set[str] = set() for room_id in room_ids: user_ids = await self.get_users_in_room(room_id) user_who_share_room.update(user_ids) @@ -969,8 +964,8 @@ async def get_users_who_share_room_with_user(self, user_id: str) -> Set[str]: @cached(cache_context=True, iterable=True) async def get_mutual_rooms_between_users( - self, user_ids: FrozenSet[str], cache_context: _CacheContext - ) -> FrozenSet[str]: + self, user_ids: frozenset[str], cache_context: _CacheContext + ) -> frozenset[str]: """ Returns the set of rooms that all users in `user_ids` share. @@ -979,7 +974,7 @@ async def get_mutual_rooms_between_users( overlapping joined rooms for. cache_context """ - shared_room_ids: Optional[FrozenSet[str]] = None + shared_room_ids: Optional[frozenset[str]] = None for user_id in user_ids: room_ids = await self.get_rooms_for_user( user_id, on_invalidate=cache_context.invalidate @@ -993,7 +988,7 @@ async def get_mutual_rooms_between_users( async def get_joined_user_ids_from_state( self, room_id: str, state: StateMap[str] - ) -> Set[str]: + ) -> set[str]: """ For a given set of state IDs, get a set of user IDs in the room. @@ -1050,7 +1045,7 @@ async def get_joined_user_ids_from_state( ) def _get_user_id_from_membership_event_id( self, event_id: str - ) -> Optional[Tuple[str, ProfileInfo]]: + ) -> Optional[tuple[str, ProfileInfo]]: raise NotImplementedError() @cachedList( @@ -1071,7 +1066,7 @@ async def _get_user_ids_from_membership_event_ids( """ rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_many_batch( table="room_memberships", column="event_id", @@ -1148,7 +1143,7 @@ async def get_current_hosts_in_room(self, room_id: str) -> AbstractSet[str]: # For PostgreSQL we can use a regex to pull out the domains from the # joined users in `current_state_events` via regex. - def get_current_hosts_in_room_txn(txn: LoggingTransaction) -> Set[str]: + def get_current_hosts_in_room_txn(txn: LoggingTransaction) -> set[str]: sql = """ SELECT DISTINCT substring(state_key FROM '@[^:]*:(.*)$') FROM current_state_events @@ -1165,7 +1160,7 @@ def get_current_hosts_in_room_txn(txn: LoggingTransaction) -> Set[str]: ) @cached(iterable=True, max_entries=10000) - async def get_current_hosts_in_room_ordered(self, room_id: str) -> Tuple[str, ...]: + async def get_current_hosts_in_room_ordered(self, room_id: str) -> tuple[str, ...]: """ Get current hosts in room based on current state. @@ -1201,7 +1196,7 @@ async def get_current_hosts_in_room_ordered(self, room_id: str) -> Tuple[str, .. def get_current_hosts_in_room_ordered_txn( txn: LoggingTransaction, - ) -> Tuple[str, ...]: + ) -> tuple[str, ...]: # Returns a list of servers currently joined in the room sorted by # longest in the room first (aka. with the lowest depth). The # heuristic of sorting by servers who have been in the room the @@ -1245,7 +1240,7 @@ async def _get_approximate_current_memberships_in_room( """ rows = cast( - List[Tuple[str, Optional[str]]], + list[tuple[str, Optional[str]]], await self.db_pool.simple_select_list( "current_state_events", keyvalues={"room_id": room_id}, @@ -1297,7 +1292,7 @@ async def get_forgotten_rooms_for_user(self, user_id: str) -> AbstractSet[str]: The forgotten rooms. """ - def _get_forgotten_rooms_for_user_txn(txn: LoggingTransaction) -> Set[str]: + def _get_forgotten_rooms_for_user_txn(txn: LoggingTransaction) -> set[str]: # This is a slightly convoluted query that first looks up all rooms # that the user has forgotten in the past, then rechecks that list # to see if any have subsequently been updated. This is done so that @@ -1348,7 +1343,7 @@ async def is_locally_forgotten_room(self, room_id: str) -> bool: # If any rows still exist it means someone has not forgotten this room yet return not rows[0][0] - async def get_rooms_user_has_been_in(self, user_id: str) -> Set[str]: + async def get_rooms_user_has_been_in(self, user_id: str) -> set[str]: """Get all rooms that the user has ever been in. Args: @@ -1369,7 +1364,7 @@ async def get_rooms_user_has_been_in(self, user_id: str) -> Set[str]: async def get_membership_event_ids_for_user( self, user_id: str, room_id: str - ) -> Set[str]: + ) -> set[str]: """Get all event_ids for the given user and room. Args: @@ -1409,7 +1404,7 @@ async def get_membership_from_event_ids( """ rows = cast( - List[Tuple[str, str, str]], + list[tuple[str, str, str]], await self.db_pool.simple_select_many_batch( table="room_memberships", column="event_id", @@ -1533,7 +1528,7 @@ async def get_sliding_sync_rooms_for_user_from_membership_snapshots( def _txn( txn: LoggingTransaction, - ) -> Dict[str, RoomsForUserSlidingSync]: + ) -> dict[str, RoomsForUserSlidingSync]: # XXX: If you use any new columns that can change (like from # `sliding_sync_joined_rooms` or `forgotten`), make sure to bust the # `get_sliding_sync_rooms_for_user_from_membership_snapshots` cache in the @@ -1582,7 +1577,7 @@ async def get_sliding_sync_self_leave_rooms_after_to_token( self, user_id: str, to_token: StreamToken, - ) -> Dict[str, RoomsForUserSlidingSync]: + ) -> dict[str, RoomsForUserSlidingSync]: """ Get all the self-leave rooms for a user after the `to_token` (outside the token range) that are potentially relevant[1] and needed to handle a sliding sync @@ -1614,7 +1609,7 @@ async def get_sliding_sync_self_leave_rooms_after_to_token( def _txn( txn: LoggingTransaction, - ) -> Dict[str, RoomsForUserSlidingSync]: + ) -> dict[str, RoomsForUserSlidingSync]: sql = """ SELECT m.room_id, m.sender, m.membership, m.membership_event_id, r.room_version, @@ -1641,7 +1636,7 @@ def _txn( txn.execute(sql, (user_id, min_to_token_position)) # Map from room_id to membership info - room_membership_for_user_map: Dict[str, RoomsForUserSlidingSync] = {} + room_membership_for_user_map: dict[str, RoomsForUserSlidingSync] = {} for row in txn: room_for_user = RoomsForUserSlidingSync( room_id=row[0], @@ -1728,7 +1723,7 @@ def get_sliding_sync_room_for_user_txn( async def get_sliding_sync_room_for_user_batch( self, user_id: str, room_ids: StrCollection - ) -> Dict[str, RoomsForUserSlidingSync]: + ) -> dict[str, RoomsForUserSlidingSync]: """Get the sliding sync room entry for the given user and rooms.""" if not room_ids: @@ -1736,7 +1731,7 @@ async def get_sliding_sync_room_for_user_batch( def get_sliding_sync_room_for_user_batch_txn( txn: LoggingTransaction, - ) -> Dict[str, RoomsForUserSlidingSync]: + ) -> dict[str, RoomsForUserSlidingSync]: clause, args = make_in_list_sql_clause( self.database_engine, "m.room_id", room_ids ) @@ -1779,7 +1774,7 @@ def get_sliding_sync_room_for_user_batch_txn( async def get_rooms_for_user_by_date( self, user_id: str, from_ts: int - ) -> FrozenSet[str]: + ) -> frozenset[str]: """ Fetch a list of rooms that the user has joined at or after the given timestamp, including those they subsequently have left/been banned from. @@ -1993,7 +1988,7 @@ async def _background_current_state_membership( def _background_current_state_membership_txn( txn: LoggingTransaction, last_processed_room: str - ) -> Tuple[int, bool]: + ) -> tuple[int, bool]: processed = 0 while processed < batch_size: txn.execute( @@ -2063,7 +2058,7 @@ def __init__( def extract_heroes_from_room_summary( details: Mapping[str, MemberSummary], me: str -) -> List[str]: +) -> list[str]: """Determine the users that represent a room, from the perspective of the `me` user. This function expects `MemberSummary.members` to already be sorted by @@ -2105,7 +2100,7 @@ class _JoinedHostsCache: """The cached data used by the `_get_joined_hosts_cache`.""" # Dict of host to the set of their users in the room at the state group. - hosts_to_joined_users: Dict[str, Set[str]] = attr.Factory(dict) + hosts_to_joined_users: dict[str, set[str]] = attr.Factory(dict) # The state group `hosts_to_joined_users` is derived from. Will be an object # if the instance is newly created or if the state is not based on a state diff --git a/synapse/storage/databases/main/search.py b/synapse/storage/databases/main/search.py index 47dfdf64e5..63489f5c27 100644 --- a/synapse/storage/databases/main/search.py +++ b/synapse/storage/databases/main/search.py @@ -28,10 +28,7 @@ Any, Collection, Iterable, - List, Optional, - Set, - Tuple, Union, cast, ) @@ -362,7 +359,7 @@ def create_index(conn: LoggingDatabaseConnection) -> None: pg, ) - def reindex_search_txn(txn: LoggingTransaction) -> Tuple[int, bool]: + def reindex_search_txn(txn: LoggingTransaction) -> tuple[int, bool]: sql = """ UPDATE event_search AS es SET stream_ordering = e.stream_ordering, origin_server_ts = e.origin_server_ts @@ -451,7 +448,7 @@ async def search_msgs( """ clauses = [] - args: List[Any] = [] + args: list[Any] = [] # Make sure we don't explode because the person is in too many rooms. # We filter the results below regardless. @@ -471,7 +468,7 @@ async def search_msgs( count_args = args count_clauses = clauses - sqlite_highlights: List[str] = [] + sqlite_highlights: list[str] = [] if isinstance(self.database_engine, PostgresEngine): search_query = search_term @@ -519,7 +516,7 @@ async def search_msgs( # List of tuples of (rank, room_id, event_id). results = cast( - List[Tuple[Union[int, float], str, str]], + list[tuple[Union[int, float], str, str]], await self.db_pool.execute("search_msgs", sql, *args), ) @@ -544,7 +541,7 @@ async def search_msgs( # List of tuples of (room_id, count). count_results = cast( - List[Tuple[str, int]], + list[tuple[str, int]], await self.db_pool.execute("search_rooms_count", count_sql, *count_args), ) @@ -580,7 +577,7 @@ async def search_rooms( Each match as a dictionary. """ clauses = [] - args: List[Any] = [] + args: list[Any] = [] # Make sure we don't explode because the person is in too many rooms. # We filter the results below regardless. @@ -602,7 +599,7 @@ async def search_rooms( count_args = list(args) count_clauses = list(clauses) - sqlite_highlights: List[str] = [] + sqlite_highlights: list[str] = [] if pagination_token: try: @@ -686,7 +683,7 @@ async def search_rooms( # List of tuples of (rank, room_id, event_id, origin_server_ts, stream_ordering). results = cast( - List[Tuple[Union[int, float], str, str, int, int]], + list[tuple[Union[int, float], str, str, int, int]], await self.db_pool.execute("search_rooms", sql, *args), ) @@ -711,7 +708,7 @@ async def search_rooms( # List of tuples of (room_id, count). count_results = cast( - List[Tuple[str, int]], + list[tuple[str, int]], await self.db_pool.execute("search_rooms_count", count_sql, *count_args), ) @@ -732,8 +729,8 @@ async def search_rooms( } async def _find_highlights_in_postgres( - self, search_query: str, events: List[EventBase] - ) -> Set[str]: + self, search_query: str, events: list[EventBase] + ) -> set[str]: """Given a list of events and a search term, return a list of words that match from the content of the event. @@ -748,7 +745,7 @@ async def _find_highlights_in_postgres( A set of strings. """ - def f(txn: LoggingTransaction) -> Set[str]: + def f(txn: LoggingTransaction) -> set[str]: highlight_words = set() for event in events: # As a hack we simply join values of all possible keys. This is @@ -811,7 +808,7 @@ def _to_postgres_options(options_dict: JsonDict) -> str: @dataclass class Phrase: - phrase: List[str] + phrase: list[str] class SearchToken(enum.Enum): @@ -821,7 +818,7 @@ class SearchToken(enum.Enum): Token = Union[str, Phrase, SearchToken] -TokenList = List[Token] +TokenList = list[Token] def _is_stop_word(word: str) -> bool: @@ -901,7 +898,7 @@ def _tokenize_query(query: str) -> TokenList: return tokens -def _tokens_to_sqlite_match_query(tokens: TokenList) -> Tuple[str, List[str]]: +def _tokens_to_sqlite_match_query(tokens: TokenList) -> tuple[str, list[str]]: """ Convert the list of tokens to a string suitable for passing to sqlite's MATCH. Assume sqlite was compiled with enhanced query syntax. @@ -934,7 +931,7 @@ def _tokens_to_sqlite_match_query(tokens: TokenList) -> Tuple[str, List[str]]: return "".join(match_query), highlights -def _parse_query_for_sqlite(search_term: str) -> Tuple[str, List[str]]: +def _parse_query_for_sqlite(search_term: str) -> tuple[str, list[str]]: """Takes a plain unicode string from the user and converts it into a form that can be passed to sqllite's matchinfo(). diff --git a/synapse/storage/databases/main/signatures.py b/synapse/storage/databases/main/signatures.py index ef86151e31..8072a8c741 100644 --- a/synapse/storage/databases/main/signatures.py +++ b/synapse/storage/databases/main/signatures.py @@ -19,7 +19,7 @@ # # -from typing import Collection, Dict, List, Mapping, Tuple +from typing import Collection, Mapping from unpaddedbase64 import encode_base64 @@ -59,7 +59,7 @@ async def get_event_reference_hashes( allow_rejected=True, ) - hashes: Dict[str, Dict[str, bytes]] = {} + hashes: dict[str, dict[str, bytes]] = {} for event_id in event_ids: event = events.get(event_id) if event is None: @@ -72,7 +72,7 @@ async def get_event_reference_hashes( async def add_event_hashes( self, event_ids: Collection[str] - ) -> List[Tuple[str, Dict[str, str]]]: + ) -> list[tuple[str, dict[str, str]]]: """ Args: diff --git a/synapse/storage/databases/main/sliding_sync.py b/synapse/storage/databases/main/sliding_sync.py index c0c5087b13..62463c0259 100644 --- a/synapse/storage/databases/main/sliding_sync.py +++ b/synapse/storage/databases/main/sliding_sync.py @@ -14,7 +14,7 @@ import logging -from typing import TYPE_CHECKING, Dict, List, Mapping, Optional, Set, cast +from typing import TYPE_CHECKING, Mapping, Optional, cast import attr @@ -222,7 +222,7 @@ def persist_per_connection_state_txn( # with the updates to `required_state` # Dict from required state json -> required state ID - required_state_to_id: Dict[str, int] = {} + required_state_to_id: dict[str, int] = {} if previous_connection_position is not None: rows = self.db_pool.simple_select_list_txn( txn, @@ -233,8 +233,8 @@ def persist_per_connection_state_txn( for required_state_id, required_state in rows: required_state_to_id[required_state] = required_state_id - room_to_state_ids: Dict[str, int] = {} - unique_required_state: Dict[str, List[str]] = {} + room_to_state_ids: dict[str, int] = {} + unique_required_state: dict[str, list[str]] = {} for room_id, room_state in per_connection_state.room_configs.items(): serialized_state = json_encoder.encode( # We store the required state as a sorted list of event type / @@ -418,7 +418,7 @@ def _get_and_clear_connection_positions_txn( ), ) - required_state_map: Dict[int, Dict[str, Set[str]]] = {} + required_state_map: dict[int, dict[str, set[str]]] = {} for row in rows: state = required_state_map[row[0]] = {} for event_type, state_key in db_to_json(row[1]): @@ -437,7 +437,7 @@ def _get_and_clear_connection_positions_txn( ), ) - room_configs: Dict[str, RoomSyncConfig] = {} + room_configs: dict[str, RoomSyncConfig] = {} for ( room_id, timeline_limit, @@ -449,9 +449,9 @@ def _get_and_clear_connection_positions_txn( ) # Now look up the per-room stream data. - rooms: Dict[str, HaveSentRoom[str]] = {} - receipts: Dict[str, HaveSentRoom[str]] = {} - account_data: Dict[str, HaveSentRoom[str]] = {} + rooms: dict[str, HaveSentRoom[str]] = {} + receipts: dict[str, HaveSentRoom[str]] = {} + account_data: dict[str, HaveSentRoom[str]] = {} receipt_rows = self.db_pool.simple_select_list_txn( txn, diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py index cfcc731f86..c2c1b62d7e 100644 --- a/synapse/storage/databases/main/state.py +++ b/synapse/storage/databases/main/state.py @@ -25,15 +25,10 @@ TYPE_CHECKING, Any, Collection, - Dict, - FrozenSet, Iterable, - List, Mapping, MutableMapping, Optional, - Set, - Tuple, TypeVar, Union, cast, @@ -199,7 +194,7 @@ def get_room_version_id_txn(self, txn: LoggingTransaction, room_id: str) -> str: @trace async def get_metadata_for_events( self, event_ids: Collection[str] - ) -> Dict[str, EventMetadata]: + ) -> dict[str, EventMetadata]: """Get some metadata (room_id, type, state_key) for the given events. This method is a faster alternative than fetching the full events from @@ -212,7 +207,7 @@ async def get_metadata_for_events( def get_metadata_for_events_txn( txn: LoggingTransaction, batch_ids: Collection[str], - ) -> Dict[str, EventMetadata]: + ) -> dict[str, EventMetadata]: clause, args = make_in_list_sql_clause( self.database_engine, "e.event_id", batch_ids ) @@ -236,7 +231,7 @@ def get_metadata_for_events_txn( for event_id, room_id, event_type, state_key, rejection_reason in txn } - result_map: Dict[str, EventMetadata] = {} + result_map: dict[str, EventMetadata] = {} for batch_ids in batch_iter(event_ids, 1000): result_map.update( await self.db_pool.runInteraction( @@ -329,7 +324,7 @@ async def get_room_type(self, room_id: str) -> Union[Optional[str], Sentinel]: @cachedList(cached_method_name="get_room_type", list_name="room_ids") async def bulk_get_room_type( - self, room_ids: Set[str] + self, room_ids: set[str] ) -> Mapping[str, Union[Optional[str], Sentinel]]: """ Bulk fetch room types for the given rooms (via current state). @@ -408,7 +403,7 @@ async def get_room_encryption(self, room_id: str) -> Optional[str]: @cachedList(cached_method_name="get_room_encryption", list_name="room_ids") async def bulk_get_room_encryption( - self, room_ids: Set[str] + self, room_ids: set[str] ) -> Mapping[str, Union[Optional[str], Sentinel]]: """ Bulk fetch room encryption for the given rooms (via current state). @@ -469,7 +464,7 @@ def txn( # If we haven't updated `room_stats_state` with the room yet, query the state # directly. This should happen only rarely so we don't mind if we do this in a # loop. - encryption_event_ids: List[str] = [] + encryption_event_ids: list[str] = [] for room_id in room_ids - results.keys(): state_map = await self.get_partial_filtered_current_state_ids( room_id, @@ -541,7 +536,7 @@ def _get_current_state_ids_txn(txn: LoggingTransaction) -> StateMap[str]: async def check_if_events_in_current_state( self, event_ids: StrCollection - ) -> FrozenSet[str]: + ) -> frozenset[str]: """Checks and returns which of the given events is part of the current state.""" rows = await self.db_pool.simple_select_many_batch( table="current_state_events", @@ -632,7 +627,7 @@ async def _get_state_group_for_events( RuntimeError if the state is unknown at any of the given events """ rows = cast( - List[Tuple[str, int]], + list[tuple[str, int]], await self.db_pool.simple_select_many_batch( table="event_to_state_groups", column="event_id", @@ -651,7 +646,7 @@ async def _get_state_group_for_events( async def get_referenced_state_groups( self, state_groups: Iterable[int] - ) -> Set[int]: + ) -> set[int]: """Check if the state groups are referenced by events. Args: @@ -662,7 +657,7 @@ async def get_referenced_state_groups( """ rows = cast( - List[Tuple[int]], + list[tuple[int]], await self.db_pool.simple_select_many_batch( table="event_to_state_groups", column="state_group", @@ -803,7 +798,7 @@ async def _background_remove_left_rooms( def _background_remove_left_rooms_txn( txn: LoggingTransaction, - ) -> Tuple[bool, Set[str]]: + ) -> tuple[bool, set[str]]: # get a batch of room ids to consider sql = """ SELECT DISTINCT room_id FROM current_state_events @@ -884,7 +879,7 @@ def _background_remove_left_rooms_txn( # server didn't share a room with the remote user and therefore may # have missed any device updates. rows = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_many_txn( txn, table="current_state_events", @@ -975,7 +970,7 @@ def __init__( @attr.s(auto_attribs=True, slots=True) -class StateMapWrapper(Dict[StateKey, str]): +class StateMapWrapper(dict[StateKey, str]): """A wrapper around a StateMap[str] to ensure that we only query for items that were not filtered out. diff --git a/synapse/storage/databases/main/state_deltas.py b/synapse/storage/databases/main/state_deltas.py index 303b232d7b..3df5c8b6f4 100644 --- a/synapse/storage/databases/main/state_deltas.py +++ b/synapse/storage/databases/main/state_deltas.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional import attr @@ -79,7 +79,7 @@ def __init__( async def get_partial_current_state_deltas( self, prev_stream_id: int, max_stream_id: int - ) -> Tuple[int, List[StateDelta]]: + ) -> tuple[int, list[StateDelta]]: """Fetch a list of room state changes since the given stream id This may be the partial state if we're lazy joining the room. @@ -114,7 +114,7 @@ async def get_partial_current_state_deltas( def get_current_state_deltas_txn( txn: LoggingTransaction, - ) -> Tuple[int, List[StateDelta]]: + ) -> tuple[int, list[StateDelta]]: # First we calculate the max stream id that will give us less than # N results. # We arbitrarily limit to 100 stream_id entries to ensure we don't @@ -193,7 +193,7 @@ def get_current_state_deltas_for_room_txn( *, from_token: Optional[RoomStreamToken], to_token: Optional[RoomStreamToken], - ) -> List[StateDelta]: + ) -> list[StateDelta]: """ Get the state deltas between two tokens. @@ -239,7 +239,7 @@ async def get_current_state_deltas_for_room( *, from_token: Optional[RoomStreamToken], to_token: Optional[RoomStreamToken], - ) -> List[StateDelta]: + ) -> list[StateDelta]: """ Get the state deltas between two tokens. @@ -275,7 +275,7 @@ async def get_current_state_deltas_for_rooms( room_ids: StrCollection, from_token: RoomStreamToken, to_token: RoomStreamToken, - ) -> List[StateDelta]: + ) -> list[StateDelta]: """Get the state deltas between two tokens for the set of rooms.""" room_ids = self._curr_state_delta_stream_cache.get_entities_changed( @@ -287,7 +287,7 @@ async def get_current_state_deltas_for_rooms( def get_current_state_deltas_for_rooms_txn( txn: LoggingTransaction, room_ids: StrCollection, - ) -> List[StateDelta]: + ) -> list[StateDelta]: clause, args = make_in_list_sql_clause( self.database_engine, "room_id", room_ids ) diff --git a/synapse/storage/databases/main/stats.py b/synapse/storage/databases/main/stats.py index 74830b7129..19e525a3cd 100644 --- a/synapse/storage/databases/main/stats.py +++ b/synapse/storage/databases/main/stats.py @@ -26,11 +26,8 @@ TYPE_CHECKING, Any, Counter, - Dict, Iterable, - List, Optional, - Tuple, Union, cast, ) @@ -154,7 +151,7 @@ async def _populate_stats_process_users( last_user_id = progress.get("last_user_id", "") - def _get_next_batch(txn: LoggingTransaction) -> List[str]: + def _get_next_batch(txn: LoggingTransaction) -> list[str]: sql = """ SELECT DISTINCT name FROM users WHERE name > ? @@ -200,7 +197,7 @@ async def _populate_stats_process_rooms( last_room_id = progress.get("last_room_id", "") - def _get_next_batch(txn: LoggingTransaction) -> List[str]: + def _get_next_batch(txn: LoggingTransaction) -> list[str]: sql = """ SELECT DISTINCT room_id FROM current_state_events WHERE room_id > ? @@ -245,7 +242,7 @@ async def get_stats_positions(self) -> int: desc="stats_incremental_position", ) - async def update_room_state(self, room_id: str, fields: Dict[str, Any]) -> None: + async def update_room_state(self, room_id: str, fields: dict[str, Any]) -> None: """Update the state of a room. fields can contain the following keys with string values: @@ -320,7 +317,7 @@ async def get_earliest_token_for_stats( ) async def bulk_update_stats_delta( - self, ts: int, updates: Dict[str, Dict[str, Counter[str]]], stream_id: int + self, ts: int, updates: dict[str, dict[str, Counter[str]]], stream_id: int ) -> None: """Bulk update stats tables for a given stream_id and updates the stats incremental position. @@ -363,9 +360,9 @@ async def update_stats_delta( ts: int, stats_type: str, stats_id: str, - fields: Dict[str, int], + fields: dict[str, int], complete_with_stream_id: int, - absolute_field_overrides: Optional[Dict[str, int]] = None, + absolute_field_overrides: Optional[dict[str, int]] = None, ) -> None: """ Updates the statistics for a subject, with a delta (difference/relative @@ -401,9 +398,9 @@ def _update_stats_delta_txn( ts: int, stats_type: str, stats_id: str, - fields: Dict[str, int], + fields: dict[str, int], complete_with_stream_id: int, - absolute_field_overrides: Optional[Dict[str, int]] = None, + absolute_field_overrides: Optional[dict[str, int]] = None, ) -> None: if absolute_field_overrides is None: absolute_field_overrides = {} @@ -450,9 +447,9 @@ def _upsert_with_additive_relatives_txn( self, txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], - absolutes: Dict[str, Any], - additive_relatives: Dict[str, int], + keyvalues: dict[str, Any], + absolutes: dict[str, Any], + additive_relatives: dict[str, int], ) -> None: """Used to update values in the stats tables. @@ -510,11 +507,11 @@ async def _calculate_and_set_initial_state_for_room(self, room_id: str) -> None: def _fetch_current_state_stats( txn: LoggingTransaction, - ) -> Tuple[List[str], Dict[str, int], int, List[str], int]: + ) -> tuple[list[str], dict[str, int], int, list[str], int]: pos = self.get_room_max_stream_ordering() # type: ignore[attr-defined] rows = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_many_txn( txn, table="current_state_events", @@ -544,7 +541,7 @@ def _fetch_current_state_stats( """, (room_id,), ) - membership_counts = dict(cast(Iterable[Tuple[str, int]], txn)) + membership_counts = dict(cast(Iterable[tuple[str, int]], txn)) txn.execute( """ @@ -554,7 +551,7 @@ def _fetch_current_state_stats( (room_id,), ) - current_state_events_count = cast(Tuple[int], txn.fetchone())[0] + current_state_events_count = cast(tuple[int], txn.fetchone())[0] users_in_room = self.get_users_in_room_txn(txn, room_id) # type: ignore[attr-defined] @@ -588,7 +585,7 @@ def _fetch_current_state_stats( ) return - room_state: Dict[str, Union[None, bool, str]] = { + room_state: dict[str, Union[None, bool, str]] = { "join_rules": None, "history_visibility": None, "encryption": None, @@ -651,7 +648,7 @@ def _fetch_current_state_stats( async def _calculate_and_set_initial_state_for_user(self, user_id: str) -> None: def _calculate_and_set_initial_state_for_user_txn( txn: LoggingTransaction, - ) -> Tuple[int, int]: + ) -> tuple[int, int]: pos = self._get_max_stream_id_in_current_state_deltas_txn(txn) txn.execute( @@ -662,7 +659,7 @@ def _calculate_and_set_initial_state_for_user_txn( """, (user_id,), ) - count = cast(Tuple[int], txn.fetchone())[0] + count = cast(tuple[int], txn.fetchone())[0] return count, pos joined_rooms, pos = await self.db_pool.runInteraction( @@ -688,7 +685,7 @@ async def get_users_media_usage_paginate( order_by: Optional[str] = UserSortOrder.USER_ID.value, direction: Direction = Direction.FORWARDS, search_term: Optional[str] = None, - ) -> Tuple[List[Tuple[str, Optional[str], int, int]], int]: + ) -> tuple[list[tuple[str, Optional[str], int, int]], int]: """Function to retrieve a paginated list of users and their uploaded local media (size and number). This will return a json list of users and the total number of users matching the filter criteria. @@ -713,7 +710,7 @@ async def get_users_media_usage_paginate( def get_users_media_usage_paginate_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[str, Optional[str], int, int]], int]: + ) -> tuple[list[tuple[str, Optional[str], int, int]], int]: filters = [] args: list = [] @@ -766,7 +763,7 @@ def get_users_media_usage_paginate_txn( sql_base=sql_base, ) txn.execute(sql, args) - count = cast(Tuple[int], txn.fetchone())[0] + count = cast(tuple[int], txn.fetchone())[0] sql = """ SELECT @@ -785,7 +782,7 @@ def get_users_media_usage_paginate_txn( args += [limit, start] txn.execute(sql, args) - users = cast(List[Tuple[str, Optional[str], int, int]], txn.fetchall()) + users = cast(list[tuple[str, Optional[str], int, int]], txn.fetchall()) return users, count diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py index 66280f2f9a..e8ea1e5480 100644 --- a/synapse/storage/databases/main/stream.py +++ b/synapse/storage/databases/main/stream.py @@ -47,15 +47,11 @@ AbstractSet, Any, Collection, - Dict, Iterable, - List, Literal, Mapping, Optional, Protocol, - Set, - Tuple, cast, overload, ) @@ -109,7 +105,7 @@ async def __call__( to_key: Optional[RoomStreamToken] = None, direction: Direction = Direction.BACKWARDS, limit: int = 0, - ) -> Tuple[List[EventBase], RoomStreamToken, bool]: ... + ) -> tuple[list[EventBase], RoomStreamToken, bool]: ... # Used as return values for pagination APIs @@ -122,8 +118,8 @@ class _EventDictReturn: @attr.s(slots=True, frozen=True, auto_attribs=True) class _EventsAround: - events_before: List[EventBase] - events_after: List[EventBase] + events_before: list[EventBase] + events_after: list[EventBase] start: RoomStreamToken end: RoomStreamToken @@ -156,9 +152,9 @@ class CurrentStateDeltaMembership: def generate_pagination_where_clause( direction: Direction, - column_names: Tuple[str, str], - from_token: Optional[Tuple[Optional[int], int]], - to_token: Optional[Tuple[Optional[int], int]], + column_names: tuple[str, str], + from_token: Optional[tuple[Optional[int], int]], + to_token: Optional[tuple[Optional[int], int]], engine: BaseDatabaseEngine, ) -> str: """Creates an SQL expression to bound the columns by the pagination @@ -224,8 +220,8 @@ def generate_pagination_bounds( direction: Direction, from_token: Optional[RoomStreamToken], to_token: Optional[RoomStreamToken], -) -> Tuple[ - str, Optional[Tuple[Optional[int], int]], Optional[Tuple[Optional[int], int]] +) -> tuple[ + str, Optional[tuple[Optional[int], int]], Optional[tuple[Optional[int], int]] ]: """ Generate a start and end point for this page of events. @@ -261,7 +257,7 @@ def generate_pagination_bounds( # by fetching all events between the min stream token and the maximum # stream token (as returned by `RoomStreamToken.get_max_stream_pos`) and # then filtering the results. - from_bound: Optional[Tuple[Optional[int], int]] = None + from_bound: Optional[tuple[Optional[int], int]] = None if from_token: if from_token.topological is not None: from_bound = from_token.as_historical_tuple() @@ -276,7 +272,7 @@ def generate_pagination_bounds( from_token.stream, ) - to_bound: Optional[Tuple[Optional[int], int]] = None + to_bound: Optional[tuple[Optional[int], int]] = None if to_token: if to_token.topological is not None: to_bound = to_token.as_historical_tuple() @@ -320,8 +316,8 @@ def generate_next_token( def _make_generic_sql_bound( bound: str, - column_names: Tuple[str, str], - values: Tuple[Optional[int], int], + column_names: tuple[str, str], + values: tuple[Optional[int], int], engine: BaseDatabaseEngine, ) -> str: """Create an SQL expression that bounds the given column names by the @@ -484,7 +480,7 @@ def _filter_results_by_stream( return True -def filter_to_clause(event_filter: Optional[Filter]) -> Tuple[str, List[str]]: +def filter_to_clause(event_filter: Optional[Filter]) -> tuple[str, list[str]]: # NB: This may create SQL clauses that don't optimise well (and we don't # have indices on all possible clauses). E.g. it may create # "room_id == X AND room_id != X", which postgres doesn't optimise. @@ -669,7 +665,7 @@ async def get_room_events_stream_for_rooms( to_key: Optional[RoomStreamToken] = None, direction: Direction = Direction.BACKWARDS, limit: int = 0, - ) -> Dict[str, Tuple[List[EventBase], RoomStreamToken, bool]]: + ) -> dict[str, tuple[list[EventBase], RoomStreamToken, bool]]: """Get new room events in stream ordering since `from_key`. Args: @@ -730,7 +726,7 @@ async def get_room_events_stream_for_rooms( def get_rooms_that_changed( self, room_ids: Collection[str], from_key: RoomStreamToken - ) -> Set[str]: + ) -> set[str]: """Given a list of rooms and a token, return rooms where there may have been changes. """ @@ -765,7 +761,7 @@ def get_rooms_that_have_updates_since_sliding_sync_table_txn( AND event_stream_ordering > ? """ - results: Set[str] = set() + results: set[str] = set() for batch in batch_iter(room_ids, 1000): clause, args = make_in_list_sql_clause( self.database_engine, "room_id", batch @@ -791,7 +787,7 @@ async def paginate_room_events_by_stream_ordering( to_key: Optional[RoomStreamToken] = None, direction: Direction = Direction.BACKWARDS, limit: int = 0, - ) -> Tuple[List[EventBase], RoomStreamToken, bool]: + ) -> tuple[list[EventBase], RoomStreamToken, bool]: """ Paginate events by `stream_ordering` in the room from the `from_key` in the given `direction` to the `to_key` or `limit`. @@ -876,7 +872,7 @@ async def paginate_room_events_by_stream_ordering( engine=self.database_engine, ) - def f(txn: LoggingTransaction) -> Tuple[List[_EventDictReturn], bool]: + def f(txn: LoggingTransaction) -> tuple[list[_EventDictReturn], bool]: sql = f""" SELECT event_id, instance_name, stream_ordering FROM events @@ -940,8 +936,8 @@ async def get_current_state_delta_membership_changes_for_user( user_id: str, from_key: RoomStreamToken, to_key: RoomStreamToken, - excluded_room_ids: Optional[List[str]] = None, - ) -> List[CurrentStateDeltaMembership]: + excluded_room_ids: Optional[list[str]] = None, + ) -> list[CurrentStateDeltaMembership]: """ Fetch membership events (and the previous event that was replaced by that one) for a given user. @@ -995,13 +991,13 @@ async def get_current_state_delta_membership_changes_for_user( if not has_changed: return [] - def f(txn: LoggingTransaction) -> List[CurrentStateDeltaMembership]: + def f(txn: LoggingTransaction) -> list[CurrentStateDeltaMembership]: # To handle tokens with a non-empty instance_map we fetch more # results than necessary and then filter down min_from_id = from_key.stream max_to_id = to_key.get_max_stream_pos() - args: List[Any] = [min_from_id, max_to_id, EventTypes.Member, user_id] + args: list[Any] = [min_from_id, max_to_id, EventTypes.Member, user_id] # TODO: It would be good to assert that the `from_token`/`to_token` is >= # the first row in `current_state_delta_stream` for the rooms we're @@ -1044,7 +1040,7 @@ def f(txn: LoggingTransaction) -> List[CurrentStateDeltaMembership]: txn.execute(sql, args) - membership_changes: List[CurrentStateDeltaMembership] = [] + membership_changes: list[CurrentStateDeltaMembership] = [] for ( room_id, event_id, @@ -1136,7 +1132,7 @@ async def get_sliding_sync_membership_changes( from_key: RoomStreamToken, to_key: RoomStreamToken, excluded_room_ids: Optional[AbstractSet[str]] = None, - ) -> Dict[str, RoomsForUserStateReset]: + ) -> dict[str, RoomsForUserStateReset]: """ Fetch membership events that result in a meaningful membership change for a given user. @@ -1185,7 +1181,7 @@ async def get_sliding_sync_membership_changes( if excluded_room_ids is not None: room_ids_to_exclude = excluded_room_ids - def f(txn: LoggingTransaction) -> Dict[str, RoomsForUserStateReset]: + def f(txn: LoggingTransaction) -> dict[str, RoomsForUserStateReset]: # To handle tokens with a non-empty instance_map we fetch more # results than necessary and then filter down min_from_id = from_key.stream @@ -1248,7 +1244,7 @@ def f(txn: LoggingTransaction) -> Dict[str, RoomsForUserStateReset]: (user_id, EventTypes.Member, user_id, min_from_id, max_to_id), ) - membership_changes: Dict[str, RoomsForUserStateReset] = {} + membership_changes: dict[str, RoomsForUserStateReset] = {} for ( room_id, membership_event_id, @@ -1332,8 +1328,8 @@ async def get_membership_changes_for_user( user_id: str, from_key: RoomStreamToken, to_key: RoomStreamToken, - excluded_rooms: Optional[List[str]] = None, - ) -> List[EventBase]: + excluded_rooms: Optional[list[str]] = None, + ) -> list[EventBase]: """Fetch membership events for a given user. All such events whose stream ordering `s` lies in the range @@ -1351,13 +1347,13 @@ async def get_membership_changes_for_user( if not has_changed: return [] - def f(txn: LoggingTransaction) -> List[_EventDictReturn]: + def f(txn: LoggingTransaction) -> list[_EventDictReturn]: # To handle tokens with a non-empty instance_map we fetch more # results than necessary and then filter down min_from_id = from_key.stream max_to_id = to_key.get_max_stream_pos() - args: List[Any] = [user_id, min_from_id, max_to_id] + args: list[Any] = [user_id, min_from_id, max_to_id] ignore_room_clause = "" if excluded_rooms is not None and len(excluded_rooms) > 0: @@ -1403,7 +1399,7 @@ def f(txn: LoggingTransaction) -> List[_EventDictReturn]: async def get_recent_events_for_room( self, room_id: str, limit: int, end_token: RoomStreamToken - ) -> Tuple[List[EventBase], RoomStreamToken]: + ) -> tuple[list[EventBase], RoomStreamToken]: """Get the most recent events in the room in topological ordering. Args: @@ -1428,7 +1424,7 @@ async def get_recent_events_for_room( async def get_recent_event_ids_for_room( self, room_id: str, limit: int, end_token: RoomStreamToken - ) -> Tuple[List[_EventDictReturn], RoomStreamToken]: + ) -> tuple[list[_EventDictReturn], RoomStreamToken]: """Get the most recent events in the room in topological ordering. Args: @@ -1459,7 +1455,7 @@ async def get_recent_event_ids_for_room( async def get_room_event_before_stream_ordering( self, room_id: str, stream_ordering: int - ) -> Optional[Tuple[int, int, str]]: + ) -> Optional[tuple[int, int, str]]: """Gets details of the first event in a room at or before a stream ordering Args: @@ -1470,7 +1466,7 @@ async def get_room_event_before_stream_ordering( A tuple of (stream ordering, topological ordering, event_id) """ - def _f(txn: LoggingTransaction) -> Optional[Tuple[int, int, str]]: + def _f(txn: LoggingTransaction) -> Optional[tuple[int, int, str]]: sql = """ SELECT stream_ordering, topological_ordering, event_id FROM events @@ -1483,7 +1479,7 @@ def _f(txn: LoggingTransaction) -> Optional[Tuple[int, int, str]]: LIMIT 1 """ txn.execute(sql, (room_id, stream_ordering)) - return cast(Optional[Tuple[int, int, str]], txn.fetchone()) + return cast(Optional[tuple[int, int, str]], txn.fetchone()) return await self.db_pool.runInteraction( "get_room_event_before_stream_ordering", _f @@ -1519,7 +1515,7 @@ async def get_last_event_pos_in_room( self, room_id: str, event_types: Optional[StrCollection] = None, - ) -> Optional[Tuple[str, PersistedEventPosition]]: + ) -> Optional[tuple[str, PersistedEventPosition]]: """ Returns the ID and event position of the last event in a room. @@ -1536,9 +1532,9 @@ async def get_last_event_pos_in_room( def _get_last_event_pos_in_room_txn( txn: LoggingTransaction, - ) -> Optional[Tuple[str, PersistedEventPosition]]: + ) -> Optional[tuple[str, PersistedEventPosition]]: event_type_clause = "" - event_type_args: List[str] = [] + event_type_args: list[str] = [] if event_types is not None and len(event_types) > 0: event_type_clause, event_type_args = make_in_list_sql_clause( txn.database_engine, "type", event_types @@ -1562,7 +1558,7 @@ def _get_last_event_pos_in_room_txn( [room_id] + event_type_args, ) - row = cast(Optional[Tuple[str, int, str]], txn.fetchone()) + row = cast(Optional[tuple[str, int, str]], txn.fetchone()) if row is not None: event_id, stream_ordering, instance_name = row @@ -1585,7 +1581,7 @@ async def get_last_event_pos_in_room_before_stream_ordering( room_id: str, end_token: RoomStreamToken, event_types: Optional[StrCollection] = None, - ) -> Optional[Tuple[str, PersistedEventPosition]]: + ) -> Optional[tuple[str, PersistedEventPosition]]: """ Returns the ID and event position of the last event in a room at or before a stream ordering. @@ -1602,7 +1598,7 @@ async def get_last_event_pos_in_room_before_stream_ordering( def get_last_event_pos_in_room_before_stream_ordering_txn( txn: LoggingTransaction, - ) -> Optional[Tuple[str, PersistedEventPosition]]: + ) -> Optional[tuple[str, PersistedEventPosition]]: # We're looking for the closest event at or before the token. We need to # handle the fact that the stream token can be a vector clock (with an # `instance_map`) and events can be persisted on different instances @@ -1616,7 +1612,7 @@ def get_last_event_pos_in_room_before_stream_ordering_txn( max_stream = end_token.get_max_stream_pos() event_type_clause = "" - event_type_args: List[str] = [] + event_type_args: list[str] = [] if event_types is not None and len(event_types) > 0: event_type_clause, event_type_args = make_in_list_sql_clause( txn.database_engine, "type", event_types @@ -1692,7 +1688,7 @@ async def bulk_get_last_event_pos_in_room_before_stream_ordering( self, room_ids: StrCollection, end_token: RoomStreamToken, - ) -> Dict[str, int]: + ) -> dict[str, int]: """Bulk fetch the stream position of the latest events in the given rooms """ @@ -1705,8 +1701,8 @@ async def bulk_get_last_event_pos_in_room_before_stream_ordering( # Check that the stream position for the rooms are from before the # minimum position of the token. If not then we need to fetch more # rows. - results: Dict[str, int] = {} - recheck_rooms: Set[str] = set() + results: dict[str, int] = {} + recheck_rooms: set[str] = set() min_token = end_token.stream for room_id, stream in uncapped_results.items(): if stream is None: @@ -1747,11 +1743,11 @@ async def _bulk_get_max_event_pos( now_token = self.get_room_max_token() max_pos = now_token.get_max_stream_pos() - results: Dict[str, int] = {} + results: dict[str, int] = {} # First, we check for the rooms in the stream change cache to see if we # can just use the latest position from it. - missing_room_ids: Set[str] = set() + missing_room_ids: set[str] = set() for room_id in room_ids: stream_pos = self._events_stream_cache.get_max_pos_of_last_change(room_id) if stream_pos is not None: @@ -1770,7 +1766,7 @@ async def _bulk_get_max_event_pos( def bulk_get_max_event_pos_fallback_txn( txn: LoggingTransaction, batched_room_ids: StrCollection - ) -> Dict[str, int]: + ) -> dict[str, int]: clause, args = make_in_list_sql_clause( self.database_engine, "room_id", batched_room_ids ) @@ -1795,7 +1791,7 @@ def bulk_get_max_event_pos_fallback_txn( # the joins and sub-queries. def bulk_get_max_event_pos_from_sliding_sync_tables_txn( txn: LoggingTransaction, batched_room_ids: StrCollection - ) -> Dict[str, int]: + ) -> dict[str, int]: clause, args = make_in_list_sql_clause( self.database_engine, "room_id", batched_room_ids ) @@ -1808,7 +1804,7 @@ def bulk_get_max_event_pos_from_sliding_sync_tables_txn( txn.execute(sql, args) return {row[0]: row[1] for row in txn} - recheck_rooms: Set[str] = set() + recheck_rooms: set[str] = set() for batched in batch_iter(room_ids, 1000): if await self.have_finished_sliding_sync_background_jobs(): batch_results = await self.db_pool.runInteraction( @@ -2077,7 +2073,7 @@ async def get_all_new_event_ids_stream( from_id: int, current_id: int, limit: int, - ) -> Tuple[int, Dict[str, Optional[int]]]: + ) -> tuple[int, dict[str, Optional[int]]]: """Get all new events Returns all event ids with from_id < stream_ordering <= current_id. @@ -2098,7 +2094,7 @@ async def get_all_new_event_ids_stream( def get_all_new_event_ids_stream_txn( txn: LoggingTransaction, - ) -> Tuple[int, Dict[str, Optional[int]]]: + ) -> tuple[int, dict[str, Optional[int]]]: sql = ( "SELECT e.stream_ordering, e.event_id, e.received_ts" " FROM events AS e" @@ -2115,7 +2111,7 @@ def get_all_new_event_ids_stream_txn( if len(rows) == limit: upper_bound = rows[-1][0] - event_to_received_ts: Dict[str, Optional[int]] = { + event_to_received_ts: dict[str, Optional[int]] = { row[1]: row[2] for row in rows } return upper_bound, event_to_received_ts @@ -2194,7 +2190,7 @@ def _reset_federation_positions_txn(self, txn: LoggingTransaction) -> None: """ txn.execute(sql) min_positions = dict( - cast(Iterable[Tuple[str, int]], txn) + cast(Iterable[tuple[str, int]], txn) ) # Map from type -> min position # Ensure we do actually have some values here @@ -2229,7 +2225,7 @@ def _paginate_room_events_by_topological_ordering_txn( direction: Direction = Direction.BACKWARDS, limit: int = 0, event_filter: Optional[Filter] = None, - ) -> Tuple[List[_EventDictReturn], RoomStreamToken, bool]: + ) -> tuple[list[_EventDictReturn], RoomStreamToken, bool]: """Returns list of events before or after a given token. Args: @@ -2269,7 +2265,7 @@ def _paginate_room_events_by_topological_ordering_txn( # Token selection matches what we do below if there are no rows return [], to_token if to_token else from_token, False - args: List[Any] = [room_id] + args: list[Any] = [room_id] order, from_bound, to_bound = generate_pagination_bounds( direction, from_token, to_token @@ -2403,7 +2399,7 @@ async def paginate_room_events_by_topological_ordering( direction: Direction = Direction.BACKWARDS, limit: int = 0, event_filter: Optional[Filter] = None, - ) -> Tuple[List[EventBase], RoomStreamToken, bool]: + ) -> tuple[list[EventBase], RoomStreamToken, bool]: """ Paginate events by `topological_ordering` (tie-break with `stream_ordering`) in the room from the `from_key` in the given `direction` to the `to_key` or diff --git a/synapse/storage/databases/main/tags.py b/synapse/storage/databases/main/tags.py index 94cf7f4052..0768dd78c0 100644 --- a/synapse/storage/databases/main/tags.py +++ b/synapse/storage/databases/main/tags.py @@ -21,7 +21,7 @@ # import logging -from typing import Any, Dict, Iterable, List, Mapping, Tuple, cast +from typing import Any, Iterable, Mapping, cast from synapse.api.constants import AccountDataTypes from synapse.replication.tcp.streams import AccountDataStream @@ -52,13 +52,13 @@ async def get_tags_for_user( """ rows = cast( - List[Tuple[str, str, str]], + list[tuple[str, str, str]], await self.db_pool.simple_select_list( "room_tags", {"user_id": user_id}, ["room_id", "tag", "content"] ), ) - tags_by_room: Dict[str, Dict[str, JsonDict]] = {} + tags_by_room: dict[str, dict[str, JsonDict]] = {} for room_id, tag, content in rows: room_tags = tags_by_room.setdefault(room_id, {}) room_tags[tag] = db_to_json(content) @@ -66,7 +66,7 @@ async def get_tags_for_user( async def get_all_updated_tags( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, str, str]], int, bool]: + ) -> tuple[list[tuple[int, str, str]], int, bool]: """Get updates for tags replication stream. Args: @@ -93,7 +93,7 @@ async def get_all_updated_tags( def get_all_updated_tags_txn( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str]]: + ) -> list[tuple[int, str, str]]: sql = ( "SELECT stream_id, user_id, room_id" " FROM room_tags_revisions as r" @@ -102,7 +102,7 @@ def get_all_updated_tags_txn( ) txn.execute(sql, (last_id, current_id, limit)) # mypy doesn't understand what the query is selecting. - return cast(List[Tuple[int, str, str]], txn.fetchall()) + return cast(list[tuple[int, str, str]], txn.fetchall()) tag_ids = await self.db_pool.runInteraction( "get_all_updated_tags", get_all_updated_tags_txn @@ -131,7 +131,7 @@ async def get_updated_tags( rooms that changed since the stream_id token. """ - def get_updated_tags_txn(txn: LoggingTransaction) -> List[str]: + def get_updated_tags_txn(txn: LoggingTransaction) -> list[str]: sql = ( "SELECT room_id from room_tags_revisions" " WHERE user_id = ? AND stream_id > ?" @@ -218,7 +218,7 @@ async def get_tags_for_room( A mapping of tags to tag content. """ rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="room_tags", keyvalues={"user_id": user_id, "room_id": room_id}, @@ -338,8 +338,8 @@ def process_replication_rows( if stream_name == AccountDataStream.NAME: # Cast is safe because the `AccountDataStream` should only be giving us # `AccountDataStreamRow` - account_data_stream_rows: List[AccountDataStream.AccountDataStreamRow] = ( - cast(List[AccountDataStream.AccountDataStreamRow], rows) + account_data_stream_rows: list[AccountDataStream.AccountDataStreamRow] = ( + cast(list[AccountDataStream.AccountDataStreamRow], rows) ) for row in account_data_stream_rows: diff --git a/synapse/storage/databases/main/task_scheduler.py b/synapse/storage/databases/main/task_scheduler.py index 2d4804fef6..7410507255 100644 --- a/synapse/storage/databases/main/task_scheduler.py +++ b/synapse/storage/databases/main/task_scheduler.py @@ -19,7 +19,7 @@ # # -from typing import TYPE_CHECKING, Any, List, Optional, Tuple, cast +from typing import TYPE_CHECKING, Any, Optional, cast from synapse.storage._base import SQLBaseStore, db_to_json from synapse.storage.database import ( @@ -34,7 +34,7 @@ if TYPE_CHECKING: from synapse.server import HomeServer -ScheduledTaskRow = Tuple[str, str, str, int, str, str, str, str] +ScheduledTaskRow = tuple[str, str, str, int, str, str, str, str] class TaskSchedulerWorkerStore(SQLBaseStore): @@ -63,12 +63,12 @@ def _convert_row_to_task(row: ScheduledTaskRow) -> ScheduledTask: async def get_scheduled_tasks( self, *, - actions: Optional[List[str]] = None, + actions: Optional[list[str]] = None, resource_id: Optional[str] = None, - statuses: Optional[List[TaskStatus]] = None, + statuses: Optional[list[TaskStatus]] = None, max_timestamp: Optional[int] = None, limit: Optional[int] = None, - ) -> List[ScheduledTask]: + ) -> list[ScheduledTask]: """Get a list of scheduled tasks from the DB. Args: @@ -82,9 +82,9 @@ async def get_scheduled_tasks( Returns: a list of `ScheduledTask`, ordered by increasing timestamps """ - def get_scheduled_tasks_txn(txn: LoggingTransaction) -> List[ScheduledTaskRow]: - clauses: List[str] = [] - args: List[Any] = [] + def get_scheduled_tasks_txn(txn: LoggingTransaction) -> list[ScheduledTaskRow]: + clauses: list[str] = [] + args: list[Any] = [] if resource_id: clauses.append("resource_id = ?") args.append(resource_id) @@ -115,7 +115,7 @@ def get_scheduled_tasks_txn(txn: LoggingTransaction) -> List[ScheduledTaskRow]: args.append(limit) txn.execute(sql, args) - return cast(List[ScheduledTaskRow], txn.fetchall()) + return cast(list[ScheduledTaskRow], txn.fetchall()) rows = await self.db_pool.runInteraction( "get_scheduled_tasks", get_scheduled_tasks_txn diff --git a/synapse/storage/databases/main/thread_subscriptions.py b/synapse/storage/databases/main/thread_subscriptions.py index 50084887a4..1c02ab1611 100644 --- a/synapse/storage/databases/main/thread_subscriptions.py +++ b/synapse/storage/databases/main/thread_subscriptions.py @@ -14,11 +14,8 @@ from typing import ( TYPE_CHECKING, Any, - FrozenSet, Iterable, - List, Optional, - Tuple, Union, cast, ) @@ -479,7 +476,7 @@ async def get_subscription_for_thread( @cached(max_entries=100) async def get_subscribers_to_thread( self, room_id: str, thread_root_event_id: str - ) -> FrozenSet[str]: + ) -> frozenset[str]: """ Returns: the set of user_ids for local users who are subscribed to the given thread. @@ -510,7 +507,7 @@ def get_thread_subscriptions_stream_id_generator(self) -> MultiWriterIdGenerator async def get_updated_thread_subscriptions( self, *, from_id: int, to_id: int, limit: int - ) -> List[Tuple[int, str, str, str]]: + ) -> list[tuple[int, str, str, str]]: """Get updates to thread subscriptions between two stream IDs. Args: @@ -524,7 +521,7 @@ async def get_updated_thread_subscriptions( def get_updated_thread_subscriptions_txn( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str, str]]: + ) -> list[tuple[int, str, str, str]]: sql = """ SELECT stream_id, user_id, room_id, event_id FROM thread_subscriptions @@ -534,7 +531,7 @@ def get_updated_thread_subscriptions_txn( """ txn.execute(sql, (from_id, to_id, limit)) - return cast(List[Tuple[int, str, str, str]], txn.fetchall()) + return cast(list[tuple[int, str, str, str]], txn.fetchall()) return await self.db_pool.runInteraction( "get_updated_thread_subscriptions", @@ -543,7 +540,7 @@ def get_updated_thread_subscriptions_txn( async def get_latest_updated_thread_subscriptions_for_user( self, user_id: str, *, from_id: int, to_id: int, limit: int - ) -> List[Tuple[int, str, str, bool, Optional[bool]]]: + ) -> list[tuple[int, str, str, bool, Optional[bool]]]: """Get the latest updates to thread subscriptions for a specific user. Args: @@ -561,7 +558,7 @@ async def get_latest_updated_thread_subscriptions_for_user( def get_updated_thread_subscriptions_for_user_txn( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str, bool, Optional[bool]]]: + ) -> list[tuple[int, str, str, bool, Optional[bool]]]: sql = """ WITH the_updates AS ( SELECT stream_id, room_id, event_id, subscribed, automatic diff --git a/synapse/storage/databases/main/transactions.py b/synapse/storage/databases/main/transactions.py index 41c9483927..e0422f7459 100644 --- a/synapse/storage/databases/main/transactions.py +++ b/synapse/storage/databases/main/transactions.py @@ -21,7 +21,7 @@ import logging from enum import Enum -from typing import TYPE_CHECKING, Iterable, List, Mapping, Optional, Tuple, cast +from typing import TYPE_CHECKING, Iterable, Mapping, Optional, cast import attr from canonicaljson import encode_canonical_json @@ -97,7 +97,7 @@ def _cleanup_transactions_txn(txn: LoggingTransaction) -> None: async def get_received_txn_response( self, transaction_id: str, origin: str - ) -> Optional[Tuple[int, JsonDict]]: + ) -> Optional[tuple[int, JsonDict]]: """For an incoming transaction from a given origin, check if we have already responded to it. If so, return the response code and response body (as a dict). @@ -120,7 +120,7 @@ async def get_received_txn_response( def _get_received_txn_response( self, txn: LoggingTransaction, transaction_id: str, origin: str - ) -> Optional[Tuple[int, JsonDict]]: + ) -> Optional[tuple[int, JsonDict]]: result = self.db_pool.simple_select_one_txn( txn, table="received_transactions", @@ -215,7 +215,7 @@ async def get_destination_retry_timings_batch( self, destinations: StrCollection ) -> Mapping[str, Optional[DestinationRetryTimings]]: rows = cast( - List[Tuple[str, Optional[int], Optional[int], Optional[int]]], + list[tuple[str, Optional[int], Optional[int], Optional[int]]], await self.db_pool.simple_select_many_batch( table="destinations", iterable=destinations, @@ -377,7 +377,7 @@ async def get_catch_up_room_event_ids( self, destination: str, last_successful_stream_ordering: int, - ) -> List[str]: + ) -> list[str]: """ Returns at most 50 event IDs and their corresponding stream_orderings that correspond to the oldest events that have not yet been sent to @@ -403,7 +403,7 @@ def _get_catch_up_room_event_ids_txn( txn: LoggingTransaction, destination: str, last_successful_stream_ordering: int, - ) -> List[str]: + ) -> list[str]: q = """ SELECT event_id FROM destination_rooms JOIN events USING (stream_ordering) @@ -421,7 +421,7 @@ def _get_catch_up_room_event_ids_txn( async def get_catch_up_outstanding_destinations( self, after_destination: Optional[str] - ) -> List[str]: + ) -> list[str]: """ Get a list of destinations we should retry transaction sending to. @@ -450,7 +450,7 @@ async def get_catch_up_outstanding_destinations( @staticmethod def _get_catch_up_outstanding_destinations_txn( txn: LoggingTransaction, now_time_ms: int, after_destination: Optional[str] - ) -> List[str]: + ) -> list[str]: # We're looking for destinations which satisfy either of the following # conditions: # @@ -540,8 +540,8 @@ async def get_destinations_paginate( destination: Optional[str] = None, order_by: str = DestinationSortOrder.DESTINATION.value, direction: Direction = Direction.FORWARDS, - ) -> Tuple[ - List[Tuple[str, Optional[int], Optional[int], Optional[int], Optional[int]]], + ) -> tuple[ + list[tuple[str, Optional[int], Optional[int], Optional[int], Optional[int]]], int, ]: """Function to retrieve a paginated list of destinations. @@ -566,9 +566,9 @@ async def get_destinations_paginate( def get_destinations_paginate_txn( txn: LoggingTransaction, - ) -> Tuple[ - List[ - Tuple[str, Optional[int], Optional[int], Optional[int], Optional[int]] + ) -> tuple[ + list[ + tuple[str, Optional[int], Optional[int], Optional[int], Optional[int]] ], int, ]: @@ -579,7 +579,7 @@ def get_destinations_paginate_txn( else: order = "ASC" - args: List[object] = [] + args: list[object] = [] where_statement = "" if destination: args.extend(["%" + destination.lower() + "%"]) @@ -588,7 +588,7 @@ def get_destinations_paginate_txn( sql_base = f"FROM destinations {where_statement} " sql = f"SELECT COUNT(*) as total_destinations {sql_base}" txn.execute(sql, args) - count = cast(Tuple[int], txn.fetchone())[0] + count = cast(tuple[int], txn.fetchone())[0] sql = f""" SELECT destination, retry_last_ts, retry_interval, failure_ts, @@ -599,8 +599,8 @@ def get_destinations_paginate_txn( """ txn.execute(sql, args + [limit, start]) destinations = cast( - List[ - Tuple[ + list[ + tuple[ str, Optional[int], Optional[int], Optional[int], Optional[int] ] ], @@ -618,7 +618,7 @@ async def get_destination_rooms_paginate( start: int, limit: int, direction: Direction = Direction.FORWARDS, - ) -> Tuple[List[Tuple[str, int]], int]: + ) -> tuple[list[tuple[str, int]], int]: """Function to retrieve a paginated list of destination's rooms. This will return a json list of rooms and the total number of rooms. @@ -636,7 +636,7 @@ async def get_destination_rooms_paginate( def get_destination_rooms_paginate_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[str, int]], int]: + ) -> tuple[list[tuple[str, int]], int]: if direction == Direction.BACKWARDS: order = "DESC" else: @@ -648,10 +648,10 @@ def get_destination_rooms_paginate_txn( WHERE destination = ? """ txn.execute(sql, [destination]) - count = cast(Tuple[int], txn.fetchone())[0] + count = cast(tuple[int], txn.fetchone())[0] rooms = cast( - List[Tuple[str, int]], + list[tuple[str, int]], self.db_pool.simple_select_list_paginate_txn( txn=txn, table="destination_rooms", diff --git a/synapse/storage/databases/main/ui_auth.py b/synapse/storage/databases/main/ui_auth.py index 569925e39f..69a4431f29 100644 --- a/synapse/storage/databases/main/ui_auth.py +++ b/synapse/storage/databases/main/ui_auth.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict, List, Optional, Tuple, Union, cast +from typing import Any, Optional, Union, cast import attr @@ -170,7 +170,7 @@ async def mark_ui_auth_stage_complete( async def get_completed_ui_auth_stages( self, session_id: str - ) -> Dict[str, Union[str, bool, JsonDict]]: + ) -> dict[str, Union[str, bool, JsonDict]]: """ Retrieve the completed stages of a UI authentication session. @@ -182,7 +182,7 @@ async def get_completed_ui_auth_stages( """ results = {} rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="ui_auth_sessions_credentials", keyvalues={"session_id": session_id}, @@ -302,14 +302,14 @@ async def add_user_agent_ip_to_ui_auth_session( async def get_user_agents_ips_to_ui_auth_session( self, session_id: str, - ) -> List[Tuple[str, str]]: + ) -> list[tuple[str, str]]: """Get the given user agents / IPs used during the ui auth process Returns: List of user_agent/ip pairs """ return cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="ui_auth_sessions_ips", keyvalues={"session_id": session_id}, @@ -353,7 +353,7 @@ def _delete_old_ui_auth_sessions_txn( # If a registration token was used, decrement the pending counter # before deleting the session. rows = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_many_txn( txn, table="ui_auth_sessions_credentials", @@ -365,7 +365,7 @@ def _delete_old_ui_auth_sessions_txn( ) # Get the tokens used and how much pending needs to be decremented by. - token_counts: Dict[str, int] = {} + token_counts: dict[str, int] = {} for r in rows: # If registration was successfully completed, the result of the # registration token stage for that session will be True. @@ -378,7 +378,7 @@ def _delete_old_ui_auth_sessions_txn( # Update the `pending` counters. if len(token_counts) > 0: token_rows = cast( - List[Tuple[str, int]], + list[tuple[str, int]], self.db_pool.simple_select_many_txn( txn, table="registration_tokens", diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py index 9deb9ab73c..895d7e6148 100644 --- a/synapse/storage/databases/main/user_directory.py +++ b/synapse/storage/databases/main/user_directory.py @@ -26,11 +26,8 @@ TYPE_CHECKING, Collection, Iterable, - List, Optional, Sequence, - Set, - Tuple, TypedDict, cast, ) @@ -209,7 +206,7 @@ async def _populate_user_directory_process_rooms( def _get_next_batch( txn: LoggingTransaction, - ) -> Optional[Sequence[Tuple[str, int]]]: + ) -> Optional[Sequence[tuple[str, int]]]: # Only fetch 250 rooms, so we don't fetch too many at once, even # if those 250 rooms have less than batch_size state events. sql = """ @@ -218,7 +215,7 @@ def _get_next_batch( LIMIT 250 """ % (TEMP_TABLE + "_rooms",) txn.execute(sql) - rooms_to_work_on = cast(List[Tuple[str, int]], txn.fetchall()) + rooms_to_work_on = cast(list[tuple[str, int]], txn.fetchall()) if not rooms_to_work_on: return None @@ -369,14 +366,14 @@ def _populate_user_directory_process_users_txn( RETURNING user_id """ txn.execute(sql, (batch_size,)) - user_result = cast(List[Tuple[str]], txn.fetchall()) + user_result = cast(list[tuple[str]], txn.fetchall()) else: sql = "SELECT user_id FROM %s ORDER BY user_id LIMIT %s" % ( TEMP_TABLE + "_users", str(batch_size), ) txn.execute(sql) - user_result = cast(List[Tuple[str]], txn.fetchall()) + user_result = cast(list[tuple[str]], txn.fetchall()) if not user_result: return None @@ -408,7 +405,7 @@ def _populate_user_directory_process_users_txn( # Next fetch their profiles. Note that not all users have profiles. profile_rows = cast( - List[Tuple[str, Optional[str], Optional[str]]], + list[tuple[str, Optional[str], Optional[str]]], self.db_pool.simple_select_many_txn( txn, table="profiles", @@ -514,7 +511,7 @@ def _filter_local_users_for_dir_txn( ] rows = cast( - List[Tuple[str, Optional[str]]], + list[tuple[str, Optional[str]]], self.db_pool.simple_select_many_txn( txn, table="users", @@ -608,7 +605,7 @@ async def clear_remote_user_profile_in_user_dir_stale(self, user_id: str) -> Non async def get_remote_servers_with_profiles_to_refresh( self, now_ts: int, limit: int - ) -> List[str]: + ) -> list[str]: """ Get a list of up to `limit` server names which have users whose locally-cached profiles we believe to be stale @@ -617,7 +614,7 @@ async def get_remote_servers_with_profiles_to_refresh( def _get_remote_servers_with_refreshable_profiles_txn( txn: LoggingTransaction, - ) -> List[str]: + ) -> list[str]: sql = """ SELECT user_server_name FROM user_directory_stale_remote_users @@ -636,7 +633,7 @@ def _get_remote_servers_with_refreshable_profiles_txn( async def get_remote_users_to_refresh_on_server( self, server_name: str, now_ts: int, limit: int - ) -> List[Tuple[str, int, int]]: + ) -> list[tuple[str, int, int]]: """ Get a list of up to `limit` user IDs from the server `server_name` whose locally-cached profiles we believe to be stale @@ -651,7 +648,7 @@ async def get_remote_users_to_refresh_on_server( def _get_remote_users_to_refresh_on_server_txn( txn: LoggingTransaction, - ) -> List[Tuple[str, int, int]]: + ) -> list[tuple[str, int, int]]: sql = """ SELECT user_id, retry_counter, next_try_at_ts FROM user_directory_stale_remote_users @@ -660,7 +657,7 @@ def _get_remote_users_to_refresh_on_server_txn( LIMIT ? """ txn.execute(sql, (server_name, now_ts, limit)) - return cast(List[Tuple[str, int, int]], txn.fetchall()) + return cast(list[tuple[str, int, int]], txn.fetchall()) return await self.db_pool.runInteraction( "get_remote_users_to_refresh_on_server", @@ -771,7 +768,7 @@ def _update_profiles_in_user_dir_txn( raise Exception("Unrecognized database engine") async def add_users_who_share_private_room( - self, room_id: str, user_id_tuples: Iterable[Tuple[str, str]] + self, room_id: str, user_id_tuples: Iterable[tuple[str, str]] ) -> None: """Insert entries into the users_who_share_private_rooms table. The first user should be a local user. @@ -834,7 +831,7 @@ def _delete_all_from_user_dir_txn(txn: LoggingTransaction) -> None: async def _get_user_in_directory( self, user_id: str - ) -> Optional[Tuple[Optional[str], Optional[str]]]: + ) -> Optional[tuple[Optional[str], Optional[str]]]: """ Fetch the user information in the user directory. @@ -843,7 +840,7 @@ async def _get_user_in_directory( avatar URL (both of which may be None). """ return cast( - Optional[Tuple[Optional[str], Optional[str]]], + Optional[tuple[Optional[str], Optional[str]]], await self.db_pool.simple_select_one( table="user_directory", keyvalues={"user_id": user_id}, @@ -864,7 +861,7 @@ async def update_user_directory_stream_pos(self, stream_id: Optional[int]) -> No class SearchResult(TypedDict): limited: bool - results: List[UserProfile] + results: list[UserProfile] class UserDirectoryStore(UserDirectoryBackgroundUpdateStore): @@ -911,7 +908,7 @@ def _remove_from_user_dir_txn(txn: LoggingTransaction) -> None: "remove_from_user_dir", _remove_from_user_dir_txn ) - async def get_users_in_dir_due_to_room(self, room_id: str) -> Set[str]: + async def get_users_in_dir_due_to_room(self, room_id: str) -> set[str]: """Get all user_ids that are in the room directory because they're in the given room_id """ @@ -965,7 +962,7 @@ def _remove_user_who_share_room_txn(txn: LoggingTransaction) -> None: "remove_user_who_share_room", _remove_user_who_share_room_txn ) - async def get_user_dir_rooms_user_is_in(self, user_id: str) -> List[str]: + async def get_user_dir_rooms_user_is_in(self, user_id: str) -> list[str]: """ Returns the rooms that a user is in. @@ -1031,7 +1028,7 @@ async def search_user_dir( } """ - join_args: Tuple[str, ...] = (user_id,) + join_args: tuple[str, ...] = (user_id,) if self.hs.config.userdirectory.user_directory_search_all_users: where_clause = "user_id != ?" @@ -1060,7 +1057,7 @@ async def search_user_dir( # We allow manipulating the ranking algorithm by injecting statements # based on config options. additional_ordering_statements = [] - ordering_arguments: Tuple[str, ...] = () + ordering_arguments: tuple[str, ...] = () if isinstance(self.database_engine, PostgresEngine): full_query, exact_query, prefix_query = _parse_query_postgres(search_term) @@ -1166,7 +1163,7 @@ async def search_user_dir( raise Exception("Unrecognized database engine") results = cast( - List[Tuple[str, Optional[str], Optional[str]]], + list[tuple[str, Optional[str], Optional[str]]], await self.db_pool.execute("search_user_dir", sql, *args), ) @@ -1232,7 +1229,7 @@ def _parse_query_sqlite(search_term: str) -> str: return " & ".join("(%s* OR %s)" % (result, result) for result in results) -def _parse_query_postgres(search_term: str) -> Tuple[str, str, str]: +def _parse_query_postgres(search_term: str) -> tuple[str, str, str]: """Takes a plain unicode string from the user and converts it into a form that can be passed to the database. We use this so that we can add prefix matching, which isn't something @@ -1263,7 +1260,7 @@ def _parse_query_postgres(search_term: str) -> Tuple[str, str, str]: return both, exact, prefix -def _parse_words(search_term: str) -> List[str]: +def _parse_words(search_term: str) -> list[str]: """Split the provided search string into a list of its words using ICU. Args: @@ -1275,7 +1272,7 @@ def _parse_words(search_term: str) -> List[str]: return _parse_words_with_icu(search_term) -def _parse_words_with_icu(search_term: str) -> List[str]: +def _parse_words_with_icu(search_term: str) -> list[str]: """Break down the provided search string into its individual words using ICU (International Components for Unicode). @@ -1298,7 +1295,7 @@ def _parse_words_with_icu(search_term: str) -> List[str]: # # In particular, user-71 in postgres gets tokenised to "user, -71", and this # will not match a query for "user, 71". - new_results: List[str] = [] + new_results: list[str] = [] i = 0 while i < len(results): curr = results[i] diff --git a/synapse/storage/databases/main/user_erasure_store.py b/synapse/storage/databases/main/user_erasure_store.py index cceed484c3..f89f11e149 100644 --- a/synapse/storage/databases/main/user_erasure_store.py +++ b/synapse/storage/databases/main/user_erasure_store.py @@ -18,7 +18,7 @@ # # -from typing import Iterable, List, Mapping, Tuple, cast +from typing import Iterable, Mapping, cast from synapse.storage.database import LoggingTransaction from synapse.storage.databases.main import CacheInvalidationWorkerStore @@ -57,7 +57,7 @@ async def are_users_erased(self, user_ids: Iterable[str]) -> Mapping[str, bool]: for each user, whether the user has requested erasure. """ rows = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_many_batch( table="erased_users", column="user_id", diff --git a/synapse/storage/databases/state/bg_updates.py b/synapse/storage/databases/state/bg_updates.py index ac38b2ab19..a0d8667b07 100644 --- a/synapse/storage/databases/state/bg_updates.py +++ b/synapse/storage/databases/state/bg_updates.py @@ -22,11 +22,8 @@ import logging from typing import ( TYPE_CHECKING, - Dict, - List, Mapping, Optional, - Tuple, Union, ) @@ -106,7 +103,7 @@ def _count_state_group_hops_txn( def _get_state_groups_from_groups_txn( self, txn: LoggingTransaction, - groups: List[int], + groups: list[int], state_filter: Optional[StateFilter] = None, ) -> Mapping[int, StateMap[str]]: """ @@ -123,7 +120,7 @@ def _get_state_groups_from_groups_txn( if state_filter is None: state_filter = StateFilter.all() - results: Dict[int, MutableStateMap[str]] = {group: {} for group in groups} + results: dict[int, MutableStateMap[str]] = {group: {} for group in groups} if isinstance(self.database_engine, PostgresEngine): # Temporarily disable sequential scans in this transaction. This is @@ -147,7 +144,7 @@ def _get_state_groups_from_groups_txn( %s """ - overall_select_query_args: List[Union[int, str]] = [] + overall_select_query_args: list[Union[int, str]] = [] # This is an optimization to create a select clause per-condition. This # makes the query planner a lot smarter on what rows should pull out in the @@ -156,7 +153,7 @@ def _get_state_groups_from_groups_txn( use_condition_optimization = ( not state_filter.include_others and not state_filter.is_full() ) - state_filter_condition_combos: List[Tuple[str, Optional[str]]] = [] + state_filter_condition_combos: list[tuple[str, Optional[str]]] = [] # We don't need to caclculate this list if we're not using the condition # optimization if use_condition_optimization: @@ -173,7 +170,7 @@ def _get_state_groups_from_groups_txn( # `filter_events_for_client` which just uses 2 conditions # (`EventTypes.RoomHistoryVisibility` and `EventTypes.Member`). if use_condition_optimization and len(state_filter_condition_combos) < 10: - select_clause_list: List[str] = [] + select_clause_list: list[str] = [] for etype, skey in state_filter_condition_combos: if skey is None: where_clause = "(type = ?)" @@ -216,7 +213,7 @@ def _get_state_groups_from_groups_txn( """ for group in groups: - args: List[Union[int, str]] = [group] + args: list[Union[int, str]] = [group] args.extend(overall_select_query_args) txn.execute(sql % (overall_select_clause,), args) @@ -347,7 +344,7 @@ async def _background_deduplicate_state( ) max_group = rows[0][0] - def reindex_txn(txn: LoggingTransaction) -> Tuple[bool, int]: + def reindex_txn(txn: LoggingTransaction) -> tuple[bool, int]: new_last_state_group = last_state_group for count in range(batch_size): txn.execute( diff --git a/synapse/storage/databases/state/deletion.py b/synapse/storage/databases/state/deletion.py index 9b62c1d814..6975690c51 100644 --- a/synapse/storage/databases/state/deletion.py +++ b/synapse/storage/databases/state/deletion.py @@ -21,8 +21,6 @@ Collection, Mapping, Optional, - Set, - Tuple, ) from synapse.events.snapshot import EventPersistencePair @@ -233,7 +231,7 @@ async def persisting_state_group_references( any state groups referenced still exist and that they don't get deleted during this.""" - referenced_state_groups: Set[int] = set() + referenced_state_groups: set[int] = set() for event, ctx in event_and_contexts: if ctx.rejected or event.internal_metadata.is_outlier(): continue @@ -269,7 +267,7 @@ async def persisting_state_group_references( ) def _mark_state_groups_as_persisting_txn( - self, txn: LoggingTransaction, state_groups: Set[int] + self, txn: LoggingTransaction, state_groups: set[int] ) -> None: """Marks the given state groups as being persisted.""" @@ -508,7 +506,7 @@ def get_state_groups_ready_for_potential_deletion_txn( async def get_next_state_group_collection_to_delete( self, - ) -> Optional[Tuple[str, Mapping[int, int]]]: + ) -> Optional[tuple[str, Mapping[int, int]]]: """Get the next set of state groups to try and delete Returns: @@ -522,7 +520,7 @@ async def get_next_state_group_collection_to_delete( def _get_next_state_group_collection_to_delete_txn( self, txn: LoggingTransaction, - ) -> Optional[Tuple[str, Mapping[int, int]]]: + ) -> Optional[tuple[str, Mapping[int, int]]]: """Implementation of `get_next_state_group_collection_to_delete`""" # We want to return chunks of state groups that were marked for deletion diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py index b62f3e6f5b..6f25e7f0bc 100644 --- a/synapse/storage/databases/state/store.py +++ b/synapse/storage/databases/state/store.py @@ -22,13 +22,9 @@ import logging from typing import ( TYPE_CHECKING, - Dict, Iterable, - List, Mapping, Optional, - Set, - Tuple, cast, ) @@ -174,7 +170,7 @@ def _get_state_group_delta_txn(txn: LoggingTransaction) -> _GetStateGroupDelta: return _GetStateGroupDelta(None, None) delta_ids = cast( - List[Tuple[str, str, str]], + list[tuple[str, str, str]], self.db_pool.simple_select_list_txn( txn, table="state_groups_state", @@ -199,8 +195,8 @@ def _get_state_group_delta_txn(txn: LoggingTransaction) -> _GetStateGroupDelta: @tag_args @cancellable async def _get_state_groups_from_groups( - self, groups: List[int], state_filter: StateFilter - ) -> Dict[int, StateMap[str]]: + self, groups: list[int], state_filter: StateFilter + ) -> dict[int, StateMap[str]]: """Returns the state groups for a given set of groups from the database, filtering on types of state events. @@ -211,7 +207,7 @@ async def _get_state_groups_from_groups( Returns: Dict of state group to state map. """ - results: Dict[int, StateMap[str]] = {} + results: dict[int, StateMap[str]] = {} chunks = [groups[i : i + 100] for i in range(0, len(groups), 100)] for chunk in chunks: @@ -232,7 +228,7 @@ def _get_state_for_group_using_cache( cache: DictionaryCache[int, StateKey, str], group: int, state_filter: StateFilter, - ) -> Tuple[MutableStateMap[str], bool]: + ) -> tuple[MutableStateMap[str], bool]: """Checks if group is in cache. See `get_state_for_groups` Args: @@ -284,7 +280,7 @@ def _get_state_for_group_using_cache( @cancellable async def _get_state_for_groups( self, groups: Iterable[int], state_filter: Optional[StateFilter] = None - ) -> Dict[int, MutableStateMap[str]]: + ) -> dict[int, MutableStateMap[str]]: """Gets the state at each of a list of state groups, optionally filtering by type/state_key @@ -355,7 +351,7 @@ def _get_state_for_groups_using_cache( groups: Iterable[int], cache: DictionaryCache[int, StateKey, str], state_filter: StateFilter, - ) -> Tuple[Dict[int, MutableStateMap[str]], Set[int]]: + ) -> tuple[dict[int, MutableStateMap[str]], set[int]]: """Gets the state at each of a list of state groups, optionally filtering by type/state_key, querying from a specific cache. @@ -387,7 +383,7 @@ def _get_state_for_groups_using_cache( def _insert_into_cache( self, - group_to_state_dict: Dict[int, StateMap[str]], + group_to_state_dict: dict[int, StateMap[str]], state_filter: StateFilter, cache_seq_num_members: int, cache_seq_num_non_members: int, @@ -452,10 +448,10 @@ def _insert_into_cache( @tag_args async def store_state_deltas_for_batched( self, - events_and_context: List[Tuple[EventBase, UnpersistedEventContextBase]], + events_and_context: list[tuple[EventBase, UnpersistedEventContextBase]], room_id: str, prev_group: int, - ) -> List[Tuple[EventBase, UnpersistedEventContext]]: + ) -> list[tuple[EventBase, UnpersistedEventContext]]: """Generate and store state deltas for a group of events and contexts created to be batch persisted. Note that all the events must be in a linear chain (ie a <- b <- c). @@ -469,9 +465,9 @@ async def store_state_deltas_for_batched( def insert_deltas_group_txn( txn: LoggingTransaction, - events_and_context: List[Tuple[EventBase, UnpersistedEventContext]], + events_and_context: list[tuple[EventBase, UnpersistedEventContext]], prev_group: int, - ) -> List[Tuple[EventBase, UnpersistedEventContext]]: + ) -> list[tuple[EventBase, UnpersistedEventContext]]: """Generate and store state groups for the provided events and contexts. Requires that we have the state as a delta from the last persisted state group. @@ -782,7 +778,7 @@ def _purge_unreferenced_state_groups( ) rows = cast( - List[Tuple[int]], + list[tuple[int]], self.db_pool.simple_select_many_txn( txn, table="state_group_edges", @@ -853,7 +849,7 @@ def _purge_unreferenced_state_groups( @tag_args async def get_previous_state_groups( self, state_groups: Iterable[int] - ) -> Dict[int, int]: + ) -> dict[int, int]: """Fetch the previous groups of the given state groups. Args: @@ -864,7 +860,7 @@ async def get_previous_state_groups( """ rows = cast( - List[Tuple[int, int]], + list[tuple[int, int]], await self.db_pool.simple_select_many_batch( table="state_group_edges", column="state_group", @@ -881,7 +877,7 @@ async def get_previous_state_groups( @tag_args async def get_next_state_groups( self, state_groups: Iterable[int] - ) -> Dict[int, int]: + ) -> dict[int, int]: """Fetch the groups that have the given state groups as their previous state groups. @@ -893,7 +889,7 @@ async def get_next_state_groups( """ rows = cast( - List[Tuple[int, int]], + list[tuple[int, int]], await self.db_pool.simple_select_many_batch( table="state_group_edges", column="prev_state_group", diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py index e4cd359201..8a1bbfa0f5 100644 --- a/synapse/storage/engines/postgres.py +++ b/synapse/storage/engines/postgres.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Any, Mapping, NoReturn, Optional, Tuple, cast +from typing import TYPE_CHECKING, Any, Mapping, NoReturn, Optional, cast import psycopg2.extensions @@ -79,11 +79,11 @@ def _disable_bytes_adapter(_: bytes) -> NoReturn: def single_threaded(self) -> bool: return False - def get_db_locale(self, txn: Cursor) -> Tuple[str, str]: + def get_db_locale(self, txn: Cursor) -> tuple[str, str]: txn.execute( "SELECT datcollate, datctype FROM pg_database WHERE datname = current_database()" ) - collation, ctype = cast(Tuple[str, str], txn.fetchone()) + collation, ctype = cast(tuple[str, str], txn.fetchone()) return collation, ctype def check_database( diff --git a/synapse/storage/engines/sqlite.py b/synapse/storage/engines/sqlite.py index 9d1795ebe5..ac3dc25bb5 100644 --- a/synapse/storage/engines/sqlite.py +++ b/synapse/storage/engines/sqlite.py @@ -22,7 +22,7 @@ import sqlite3 import struct import threading -from typing import TYPE_CHECKING, Any, List, Mapping, Optional +from typing import TYPE_CHECKING, Any, Mapping, Optional from synapse.storage.engines import BaseDatabaseEngine from synapse.storage.engines._base import AUTO_INCREMENT_PRIMARY_KEYPLACEHOLDER @@ -182,7 +182,7 @@ def executescript(cursor: sqlite3.Cursor, script: str) -> None: # Following functions taken from: https://github.com/coleifer/peewee -def _parse_match_info(buf: bytes) -> List[int]: +def _parse_match_info(buf: bytes) -> list[int]: bufsize = len(buf) return [struct.unpack("@I", buf[i : i + 4])[0] for i in range(0, bufsize, 4)] diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index bf087702ea..d4bd8020e1 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -28,10 +28,8 @@ Counter as CounterType, Generator, Iterable, - List, Optional, TextIO, - Tuple, ) import attr @@ -270,7 +268,7 @@ def _setup_new_database( for database in databases ) - directory_entries: List[_DirectoryListing] = [] + directory_entries: list[_DirectoryListing] = [] for directory in directories: directory_entries.extend( _DirectoryListing(file_name, os.path.join(directory, file_name)) @@ -453,7 +451,7 @@ def _upgrade_existing_database( file_name_counter: CounterType[str] = Counter() # Now find which directories have anything of interest. - directory_entries: List[_DirectoryListing] = [] + directory_entries: list[_DirectoryListing] = [] for directory in directories: logger.debug("Looking for schema deltas in %s", directory) try: @@ -593,7 +591,7 @@ def _apply_module_schema_files( cur: Cursor, database_engine: BaseDatabaseEngine, modname: str, - names_and_streams: Iterable[Tuple[str, TextIO]], + names_and_streams: Iterable[tuple[str, TextIO]], ) -> None: """Apply the module schemas for a single module diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 9dc6c395e8..35da5351f8 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -20,7 +20,7 @@ # import logging -from typing import List, Optional, Tuple +from typing import Optional import attr @@ -84,6 +84,6 @@ class ProfileInfo: class MemberSummary: # A truncated list of (user_id, event_id) tuples for users of a given # membership type, suitable for use in calculating heroes for a room. - members: List[Tuple[str, str]] + members: list[tuple[str, str]] # The total number of users of a given membership type. count: int diff --git a/synapse/storage/schema/main/delta/30/as_users.py b/synapse/storage/schema/main/delta/30/as_users.py index 060217575b..b7e9a11c2f 100644 --- a/synapse/storage/schema/main/delta/30/as_users.py +++ b/synapse/storage/schema/main/delta/30/as_users.py @@ -19,7 +19,7 @@ # # import logging -from typing import Dict, Iterable, List, Tuple, cast +from typing import Iterable, cast from synapse.config.appservice import load_appservices from synapse.config.homeserver import HomeServerConfig @@ -44,7 +44,7 @@ def run_upgrade( config: HomeServerConfig, ) -> None: cur.execute("SELECT name FROM users") - rows = cast(Iterable[Tuple[str]], cur.fetchall()) + rows = cast(Iterable[tuple[str]], cur.fetchall()) config_files = [] try: @@ -54,7 +54,7 @@ def run_upgrade( appservices = load_appservices(config.server.server_name, config_files) - owned: Dict[str, List[str]] = {} + owned: dict[str, list[str]] = {} for row in rows: user_id = row[0] diff --git a/synapse/storage/types.py b/synapse/storage/types.py index 4329d88c9a..fedf10dfc0 100644 --- a/synapse/storage/types.py +++ b/synapse/storage/types.py @@ -23,13 +23,10 @@ Any, Callable, Iterator, - List, Mapping, Optional, Protocol, Sequence, - Tuple, - Type, Union, ) @@ -47,11 +44,11 @@ def executemany( self, sql: str, parameters: Sequence[SQLQueryParameters] ) -> Any: ... - def fetchone(self) -> Optional[Tuple]: ... + def fetchone(self) -> Optional[tuple]: ... - def fetchmany(self, size: Optional[int] = ...) -> List[Tuple]: ... + def fetchmany(self, size: Optional[int] = ...) -> list[tuple]: ... - def fetchall(self) -> List[Tuple]: ... + def fetchall(self) -> list[tuple]: ... @property def description( @@ -66,7 +63,7 @@ def description( def rowcount(self) -> int: return 0 - def __iter__(self) -> Iterator[Tuple]: ... + def __iter__(self) -> Iterator[tuple]: ... def close(self) -> None: ... @@ -84,7 +81,7 @@ def __enter__(self) -> "Connection": ... def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], ) -> Optional[bool]: ... @@ -117,20 +114,20 @@ class DBAPI2Module(Protocol): # explain why this is necessary for safety. TL;DR: we shouldn't be able to write # to `x`, only read from it. See also https://github.com/python/mypy/issues/6002 . @property - def Warning(self) -> Type[Exception]: ... + def Warning(self) -> type[Exception]: ... @property - def Error(self) -> Type[Exception]: ... + def Error(self) -> type[Exception]: ... # Errors are divided into `InterfaceError`s (something went wrong in the database # driver) and `DatabaseError`s (something went wrong in the database). These are # both subclasses of `Error`, but we can't currently express this in type # annotations due to https://github.com/python/mypy/issues/8397 @property - def InterfaceError(self) -> Type[Exception]: ... + def InterfaceError(self) -> type[Exception]: ... @property - def DatabaseError(self) -> Type[Exception]: ... + def DatabaseError(self) -> type[Exception]: ... # Everything below is a subclass of `DatabaseError`. @@ -139,7 +136,7 @@ def DatabaseError(self) -> Type[Exception]: ... # - An invalid date time was provided. # - A string contained a null code point. @property - def DataError(self) -> Type[Exception]: ... + def DataError(self) -> type[Exception]: ... # Roughly: something went wrong in the database, but it's not within the application # programmer's control. Examples: @@ -150,18 +147,18 @@ def DataError(self) -> Type[Exception]: ... # - The database ran out of resources, such as storage, memory, connections, etc. # - The database encountered an error from the operating system. @property - def OperationalError(self) -> Type[Exception]: ... + def OperationalError(self) -> type[Exception]: ... # Roughly: we've given the database data which breaks a rule we asked it to enforce. # Examples: # - Stop, criminal scum! You violated the foreign key constraint # - Also check constraints, non-null constraints, etc. @property - def IntegrityError(self) -> Type[Exception]: ... + def IntegrityError(self) -> type[Exception]: ... # Roughly: something went wrong within the database server itself. @property - def InternalError(self) -> Type[Exception]: ... + def InternalError(self) -> type[Exception]: ... # Roughly: the application did something silly that needs to be fixed. Examples: # - We don't have permissions to do something. @@ -169,11 +166,11 @@ def InternalError(self) -> Type[Exception]: ... # - We tried to use a reserved name. # - We referred to a column that doesn't exist. @property - def ProgrammingError(self) -> Type[Exception]: ... + def ProgrammingError(self) -> type[Exception]: ... # Roughly: we've tried to do something that this database doesn't support. @property - def NotSupportedError(self) -> Type[Exception]: ... + def NotSupportedError(self) -> type[Exception]: ... # We originally wrote # def connect(self, *args, **kwargs) -> Connection: ... diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index 1b7c5dac7a..5bf5c2b4bf 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -28,15 +28,10 @@ TYPE_CHECKING, AsyncContextManager, ContextManager, - Dict, Generic, Iterable, - List, Optional, Sequence, - Set, - Tuple, - Type, TypeVar, Union, cast, @@ -223,9 +218,9 @@ def __init__( stream_name: str, server_name: str, instance_name: str, - tables: List[Tuple[str, str, str]], + tables: list[tuple[str, str, str]], sequence_name: str, - writers: List[str], + writers: list[str], positive: bool = True, ) -> None: self._db = db @@ -243,7 +238,7 @@ def __init__( # Note: If we are a negative stream then we still store all the IDs as # positive to make life easier for us, and simply negate the IDs when we # return them. - self._current_positions: Dict[str, int] = {} + self._current_positions: dict[str, int] = {} # Set of local IDs that we're still processing. The current position # should be less than the minimum of this set (if not empty). @@ -260,7 +255,7 @@ def __init__( # Set of local IDs that we've processed that are larger than the current # position, due to there being smaller unpersisted IDs. - self._finished_ids: Set[int] = set() + self._finished_ids: set[int] = set() # We track the max position where we know everything before has been # persisted. This is done by a) looking at the min across all instances @@ -281,7 +276,7 @@ def __init__( self._persisted_upto_position = ( min(self._current_positions.values()) if self._current_positions else 1 ) - self._known_persisted_positions: List[int] = [] + self._known_persisted_positions: list[int] = [] # The maximum stream ID that we have seen been allocated across any writer. # Since this defaults to 1, this means that ID 1 is assumed to have already @@ -348,7 +343,7 @@ def __init__( def _load_current_ids( self, db_conn: LoggingDatabaseConnection, - tables: List[Tuple[str, str, str]], + tables: list[tuple[str, str, str]], sequence_name: str, ) -> None: cur = db_conn.cursor(txn_name="_load_current_ids") @@ -439,7 +434,7 @@ def _load_current_ids( self._persisted_upto_position = min_stream_id - rows: List[Tuple[str, int]] = [] + rows: list[tuple[str, int]] = [] for table, instance_column, id_column in tables: sql = """ SELECT %(instance)s, %(id)s FROM %(table)s @@ -453,13 +448,13 @@ def _load_current_ids( cur.execute(sql, (min_stream_id * self._return_factor,)) # Cast safety: this corresponds to the types returned by the query above. - rows.extend(cast(Iterable[Tuple[str, int]], cur)) + rows.extend(cast(Iterable[tuple[str, int]], cur)) # Sort by stream_id (ascending, lowest -> highest) so that we handle # rows in order for each instance because we don't want to overwrite # the current_position of an instance to a lower stream ID than # we're actually at. - def sort_by_stream_id_key_func(row: Tuple[str, int]) -> int: + def sort_by_stream_id_key_func(row: tuple[str, int]) -> int: (instance, stream_id) = row # If `stream_id` is ever `None`, we will see a `TypeError: '<' # not supported between instances of 'NoneType' and 'X'` error. @@ -492,7 +487,7 @@ def _load_next_id_txn(self, txn: Cursor) -> int: stream_ids = self._load_next_mult_id_txn(txn, 1) return stream_ids[0] - def _load_next_mult_id_txn(self, txn: Cursor, n: int) -> List[int]: + def _load_next_mult_id_txn(self, txn: Cursor, n: int) -> list[int]: # We need to track that we've requested some more stream IDs, and what # the current max allocated stream ID is. This is to prevent a race # where we've been allocated stream IDs but they have not yet been added @@ -529,7 +524,7 @@ def get_next(self) -> AsyncContextManager[int]: AsyncContextManager[int], _MultiWriterCtxManager(self, self._notifier) ) - def get_next_mult(self, n: int) -> AsyncContextManager[List[int]]: + def get_next_mult(self, n: int) -> AsyncContextManager[list[int]]: # If we have a list of instances that are allowed to write to this # stream, make sure we're in it. if self._writers and self._instance_name not in self._writers: @@ -537,7 +532,7 @@ def get_next_mult(self, n: int) -> AsyncContextManager[List[int]]: # Cast safety: see get_next. return cast( - AsyncContextManager[List[int]], + AsyncContextManager[list[int]], _MultiWriterCtxManager(self, self._notifier, n), ) @@ -578,7 +573,7 @@ def get_next_txn(self, txn: LoggingTransaction) -> int: return self._return_factor * next_id - def get_next_mult_txn(self, txn: LoggingTransaction, n: int) -> List[int]: + def get_next_mult_txn(self, txn: LoggingTransaction, n: int) -> list[int]: """ Usage: @@ -615,7 +610,7 @@ def get_next_mult_txn(self, txn: LoggingTransaction, n: int) -> List[int]: return [self._return_factor * next_id for next_id in next_ids] - def _mark_ids_as_finished(self, next_ids: List[int]) -> None: + def _mark_ids_as_finished(self, next_ids: list[int]) -> None: """These IDs have finished being processed so we should advance the current position if possible. """ @@ -707,7 +702,7 @@ def get_minimal_local_current_token(self) -> int: self._instance_name, self._persisted_upto_position ) - def get_positions(self) -> Dict[str, int]: + def get_positions(self) -> dict[str, int]: """Get a copy of the current positon map. Note that this won't necessarily include all configured writers if some @@ -849,7 +844,7 @@ async def __aenter__(self) -> T: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType], ) -> Optional[bool]: @@ -863,9 +858,9 @@ class _MultiWriterCtxManager: id_gen: MultiWriterIdGenerator notifier: "ReplicationNotifier" multiple_ids: Optional[int] = None - stream_ids: List[int] = attr.Factory(list) + stream_ids: list[int] = attr.Factory(list) - async def __aenter__(self) -> Union[int, List[int]]: + async def __aenter__(self) -> Union[int, list[int]]: # It's safe to run this in autocommit mode as fetching values from a # sequence ignores transaction semantics anyway. self.stream_ids = await self.id_gen._db.runInteraction( @@ -882,7 +877,7 @@ async def __aenter__(self) -> Union[int, List[int]]: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType], ) -> bool: diff --git a/synapse/storage/util/partial_state_events_tracker.py b/synapse/storage/util/partial_state_events_tracker.py index f8addf38f6..5078f6367b 100644 --- a/synapse/storage/util/partial_state_events_tracker.py +++ b/synapse/storage/util/partial_state_events_tracker.py @@ -21,7 +21,7 @@ import logging from collections import defaultdict -from typing import Collection, Dict, Set +from typing import Collection from twisted.internet import defer from twisted.internet.defer import Deferred @@ -43,7 +43,7 @@ def __init__(self, store: EventsWorkerStore): self._store = store # a map from event id to a set of Deferreds which are waiting for that event to be # un-partial-stated. - self._observers: Dict[str, Set[Deferred[None]]] = defaultdict(set) + self._observers: dict[str, set[Deferred[None]]] = defaultdict(set) def notify_un_partial_stated(self, event_id: str) -> None: """Notify that we now have full state for a given event @@ -93,7 +93,7 @@ async def await_full_state(self, event_ids: Collection[str]) -> None: ) # create an observer for each lazy-joined event - observers: Dict[str, Deferred[None]] = { + observers: dict[str, Deferred[None]] = { event_id: Deferred() for event_id in partial_state_event_ids } for event_id, observer in observers.items(): @@ -140,7 +140,7 @@ def __init__(self, store: RoomWorkerStore): # a map from room id to a set of Deferreds which are waiting for that room to be # un-partial-stated. - self._observers: Dict[str, Set[Deferred[None]]] = defaultdict(set) + self._observers: dict[str, set[Deferred[None]]] = defaultdict(set) def notify_un_partial_stated(self, room_id: str) -> None: """Notify that we now have full current state for a given room diff --git a/synapse/storage/util/sequence.py b/synapse/storage/util/sequence.py index cac3eba1a5..e2256aa109 100644 --- a/synapse/storage/util/sequence.py +++ b/synapse/storage/util/sequence.py @@ -21,7 +21,7 @@ import abc import logging import threading -from typing import TYPE_CHECKING, Callable, List, Optional +from typing import TYPE_CHECKING, Callable, Optional from synapse.storage.engines import ( BaseDatabaseEngine, @@ -61,7 +61,7 @@ def get_next_id_txn(self, txn: Cursor) -> int: ... @abc.abstractmethod - def get_next_mult_txn(self, txn: Cursor, n: int) -> List[int]: + def get_next_mult_txn(self, txn: Cursor, n: int) -> list[int]: """Get the next `n` IDs in the sequence""" ... @@ -105,7 +105,7 @@ def get_next_id_txn(self, txn: Cursor) -> int: assert fetch_res is not None return fetch_res[0] - def get_next_mult_txn(self, txn: Cursor, n: int) -> List[int]: + def get_next_mult_txn(self, txn: Cursor, n: int) -> list[int]: txn.execute( "SELECT nextval(?) FROM generate_series(1, ?)", (self._sequence_name, n) ) @@ -241,7 +241,7 @@ def get_next_id_txn(self, txn: Cursor) -> int: self._current_max_id += 1 return self._current_max_id - def get_next_mult_txn(self, txn: Cursor, n: int) -> List[int]: + def get_next_mult_txn(self, txn: Cursor, n: int) -> list[int]: with self._lock: if self._current_max_id is None: assert self._callback is not None diff --git a/synapse/streams/__init__.py b/synapse/streams/__init__.py index 67635d7ebe..faf453b8a1 100644 --- a/synapse/streams/__init__.py +++ b/synapse/streams/__init__.py @@ -19,7 +19,7 @@ # # from abc import ABC, abstractmethod -from typing import Generic, List, Optional, Tuple, TypeVar +from typing import Generic, Optional, TypeVar from synapse.types import StrCollection, UserID @@ -39,5 +39,5 @@ async def get_new_events( room_ids: StrCollection, is_guest: bool, explicit_room_id: Optional[str] = None, - ) -> Tuple[List[R], K]: + ) -> tuple[list[R], K]: raise NotImplementedError() diff --git a/synapse/streams/events.py b/synapse/streams/events.py index 1e4bebe46d..143f659499 100644 --- a/synapse/streams/events.py +++ b/synapse/streams/events.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Sequence, Tuple +from typing import TYPE_CHECKING, Sequence import attr @@ -52,7 +52,7 @@ class _EventSourcesInner: receipt: ReceiptEventSource account_data: AccountDataEventSource - def get_sources(self) -> Sequence[Tuple[StreamKeyType, EventSource]]: + def get_sources(self) -> Sequence[tuple[StreamKeyType, EventSource]]: return [ (StreamKeyType.ROOM, self.room), (StreamKeyType.PRESENCE, self.presence), diff --git a/synapse/synapse_rust/acl.pyi b/synapse/synapse_rust/acl.pyi index 985994d313..934d0de80a 100644 --- a/synapse/synapse_rust/acl.pyi +++ b/synapse/synapse_rust/acl.pyi @@ -13,10 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List - class ServerAclEvaluator: def __init__( - self, allow_ip_literals: bool, allow: List[str], deny: List[str] + self, allow_ip_literals: bool, allow: list[str], deny: list[str] ) -> None: ... def server_matches_acl_event(self, server_name: str) -> bool: ... diff --git a/synapse/synapse_rust/events.pyi b/synapse/synapse_rust/events.pyi index a82211283b..08c976121a 100644 --- a/synapse/synapse_rust/events.pyi +++ b/synapse/synapse_rust/events.pyi @@ -10,7 +10,7 @@ # See the GNU Affero General Public License for more details: # . -from typing import List, Mapping, Optional, Tuple +from typing import Mapping, Optional from synapse.types import JsonDict @@ -115,7 +115,7 @@ def event_visible_to_server( history_visibility: str, erased_senders: Mapping[str, bool], partial_state_invisible: bool, - memberships: List[Tuple[str, str]], + memberships: list[tuple[str, str]], ) -> bool: """Determine whether the server is allowed to see the unredacted event. diff --git a/synapse/synapse_rust/push.pyi b/synapse/synapse_rust/push.pyi index a3e12ad648..1e135b8c69 100644 --- a/synapse/synapse_rust/push.pyi +++ b/synapse/synapse_rust/push.pyi @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Collection, Dict, Mapping, Optional, Sequence, Tuple, Union +from typing import Any, Collection, Mapping, Optional, Sequence, Union from synapse.types import JsonDict, JsonValue @@ -43,7 +43,7 @@ class FilteredPushRules: def __init__( self, push_rules: PushRules, - enabled_map: Dict[str, bool], + enabled_map: dict[str, bool], msc1767_enabled: bool, msc3381_polls_enabled: bool, msc3664_enabled: bool, @@ -51,7 +51,7 @@ class FilteredPushRules: msc4210_enabled: bool, msc4306_enabled: bool, ): ... - def rules(self) -> Collection[Tuple[PushRule, bool]]: ... + def rules(self) -> Collection[tuple[PushRule, bool]]: ... def get_base_rule_ids() -> Collection[str]: ... @@ -65,7 +65,7 @@ class PushRuleEvaluator: notification_power_levels: Mapping[str, int], related_events_flattened: Mapping[str, Mapping[str, JsonValue]], related_event_match_enabled: bool, - room_version_feature_flags: Tuple[str, ...], + room_version_feature_flags: tuple[str, ...], msc3931_enabled: bool, msc4210_enabled: bool, msc4306_enabled: bool, diff --git a/synapse/synapse_rust/segmenter.pyi b/synapse/synapse_rust/segmenter.pyi index 5f36765947..19a0a4d83c 100644 --- a/synapse/synapse_rust/segmenter.pyi +++ b/synapse/synapse_rust/segmenter.pyi @@ -1,3 +1 @@ -from typing import List - -def parse_words(text: str) -> List[str]: ... +def parse_words(text: str) -> list[str]: ... diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index 0386cb77d6..87436459ac 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -29,17 +29,12 @@ AbstractSet, Any, ClassVar, - Dict, - List, Literal, Mapping, Match, MutableMapping, NoReturn, Optional, - Set, - Tuple, - Type, TypedDict, TypeVar, Union, @@ -84,16 +79,16 @@ # Define a state map type from type/state_key to T (usually an event ID or # event) T = TypeVar("T") -StateKey = Tuple[str, str] +StateKey = tuple[str, str] StateMap = Mapping[StateKey, T] MutableStateMap = MutableMapping[StateKey, T] # JSON types. These could be made stronger, but will do for now. # A "simple" (canonical) JSON value. SimpleJsonValue = Optional[Union[str, int, bool]] -JsonValue = Union[List[SimpleJsonValue], Tuple[SimpleJsonValue, ...], SimpleJsonValue] +JsonValue = Union[list[SimpleJsonValue], tuple[SimpleJsonValue, ...], SimpleJsonValue] # A JSON-serialisable dict. -JsonDict = Dict[str, Any] +JsonDict = dict[str, Any] # A JSON-serialisable mapping; roughly speaking an immutable JSONDict. # Useful when you have a TypedDict which isn't going to be mutated and you don't want # to cast to JsonDict everywhere. @@ -106,12 +101,12 @@ # # StrCollection is an unordered collection of strings. If ordering is important, # StrSequence can be used instead. -StrCollection = Union[Tuple[str, ...], List[str], AbstractSet[str]] +StrCollection = Union[tuple[str, ...], list[str], AbstractSet[str]] # Sequence[str] that does not include str itself; str being a Sequence[str] # is very misleading and results in bugs. # # Unlike StrCollection, StrSequence is an ordered collection of strings. -StrSequence = Union[Tuple[str, ...], List[str]] +StrSequence = Union[tuple[str, ...], list[str]] # Note that this seems to require inheriting *directly* from Interface in order @@ -165,13 +160,13 @@ class Requester: user: "UserID" access_token_id: Optional[int] is_guest: bool - scope: Set[str] + scope: set[str] shadow_banned: bool device_id: Optional[str] app_service: Optional["ApplicationService"] authenticated_entity: str - def serialize(self) -> Dict[str, Any]: + def serialize(self) -> dict[str, Any]: """Converts self to a type that can be serialized as JSON, and then deserialized by `deserialize` @@ -191,7 +186,7 @@ def serialize(self) -> Dict[str, Any]: @staticmethod def deserialize( - store: "ApplicationServiceWorkerStore", input: Dict[str, Any] + store: "ApplicationServiceWorkerStore", input: dict[str, Any] ) -> "Requester": """Converts a dict that was produced by `serialize` back into a Requester. @@ -305,11 +300,11 @@ class DomainSpecificString(metaclass=abc.ABCMeta): def __copy__(self: DS) -> DS: return self - def __deepcopy__(self: DS, memo: Dict[str, object]) -> DS: + def __deepcopy__(self: DS, memo: dict[str, object]) -> DS: return self @classmethod - def from_string(cls: Type[DS], s: str) -> DS: + def from_string(cls: type[DS], s: str) -> DS: """Parse the string given by 's' into a structure object.""" if len(s) < 1 or s[0:1] != cls.SIGIL: raise SynapseError( @@ -337,7 +332,7 @@ def to_string(self) -> str: return "%s%s:%s" % (self.SIGIL, self.localpart, self.domain) @classmethod - def is_valid(cls: Type[DS], s: str) -> bool: + def is_valid(cls: type[DS], s: str) -> bool: """Parses the input string and attempts to ensure it is valid.""" # TODO: this does not reject an empty localpart or an overly-long string. # See https://spec.matrix.org/v1.2/appendices/#identifier-grammar @@ -393,7 +388,7 @@ class RoomID: room_id_with_domain: Optional[RoomIdWithDomain] @classmethod - def is_valid(cls: Type["RoomID"], s: str) -> bool: + def is_valid(cls: type["RoomID"], s: str) -> bool: if ":" in s: return RoomIdWithDomain.is_valid(s) try: @@ -415,7 +410,7 @@ def to_string(self) -> str: __repr__ = to_string @classmethod - def from_string(cls: Type["RoomID"], s: str) -> "RoomID": + def from_string(cls: type["RoomID"], s: str) -> "RoomID": # sigil check if len(s) < 1 or s[0] != cls.SIGIL: raise SynapseError( @@ -829,7 +824,7 @@ def copy_and_advance(self, other: "RoomStreamToken") -> "RoomStreamToken": return super().copy_and_advance(other) - def as_historical_tuple(self) -> Tuple[int, int]: + def as_historical_tuple(self) -> tuple[int, int]: """Returns a tuple of `(topological, stream)` for historical tokens. Raises if not an historical token (i.e. doesn't have a topological part). @@ -1412,7 +1407,7 @@ def __iter__(self) -> NoReturn: def __copy__(self) -> "ThirdPartyInstanceID": return self - def __deepcopy__(self, memo: Dict[str, object]) -> "ThirdPartyInstanceID": + def __deepcopy__(self, memo: dict[str, object]) -> "ThirdPartyInstanceID": return self @classmethod @@ -1436,7 +1431,7 @@ class ReadReceipt: room_id: str receipt_type: str user_id: str - event_ids: List[str] + event_ids: list[str] thread_id: Optional[str] data: JsonDict @@ -1459,8 +1454,8 @@ class DeviceListUpdates: # The latter happening only once, thus always giving you the same sets # across multiple DeviceListUpdates instances. # Also see: don't define mutable default arguments. - changed: Set[str] = attr.ib(factory=set) - left: Set[str] = attr.ib(factory=set) + changed: set[str] = attr.ib(factory=set) + left: set[str] = attr.ib(factory=set) def __bool__(self) -> bool: return bool(self.changed or self.left) @@ -1468,7 +1463,7 @@ def __bool__(self) -> bool: def get_verify_key_from_cross_signing_key( key_info: Mapping[str, Any], -) -> Tuple[str, VerifyKey]: +) -> tuple[str, VerifyKey]: """Get the key ID and signedjson verify key from a cross-signing key dict Args: diff --git a/synapse/types/handlers/__init__.py b/synapse/types/handlers/__init__.py index f2fbc1dddf..80651bb685 100644 --- a/synapse/types/handlers/__init__.py +++ b/synapse/types/handlers/__init__.py @@ -19,7 +19,7 @@ # -from typing import List, Optional, TypedDict +from typing import Optional, TypedDict from synapse.api.constants import EventTypes @@ -87,7 +87,7 @@ class ShutdownRoomResponse(TypedDict): new_room_id: A string representing the room ID of the new room. """ - kicked_users: List[str] - failed_to_kick_users: List[str] - local_aliases: List[str] + kicked_users: list[str] + failed_to_kick_users: list[str] + local_aliases: list[str] new_room_id: Optional[str] diff --git a/synapse/types/handlers/sliding_sync.py b/synapse/types/handlers/sliding_sync.py index b7bc565464..aef7db8e98 100644 --- a/synapse/types/handlers/sliding_sync.py +++ b/synapse/types/handlers/sliding_sync.py @@ -21,16 +21,12 @@ AbstractSet, Any, Callable, - Dict, Final, Generic, - List, Mapping, MutableMapping, Optional, Sequence, - Set, - Tuple, TypeVar, cast, ) @@ -178,17 +174,17 @@ class StrippedHero: name: Optional[str] avatar: Optional[str] - heroes: Optional[List[StrippedHero]] + heroes: Optional[list[StrippedHero]] is_dm: bool initial: bool unstable_expanded_timeline: bool # Should be empty for invite/knock rooms with `stripped_state` - required_state: List[EventBase] + required_state: list[EventBase] # Should be empty for invite/knock rooms with `stripped_state` - timeline_events: List[EventBase] - bundled_aggregations: Optional[Dict[str, "BundledAggregations"]] + timeline_events: list[EventBase] + bundled_aggregations: Optional[dict[str, "BundledAggregations"]] # Optional because it's only relevant to invite/knock rooms - stripped_state: List[JsonDict] + stripped_state: list[JsonDict] # Only optional because it won't be included for invite/knock rooms with `stripped_state` prev_batch: Optional[StreamToken] # Only optional because it won't be included for invite/knock rooms with `stripped_state` @@ -240,11 +236,11 @@ class Operation: """ op: OperationType - range: Tuple[int, int] - room_ids: List[str] + range: tuple[int, int] + room_ids: list[str] count: int - ops: List[Operation] + ops: list[Operation] @attr.s(slots=True, frozen=True, auto_attribs=True) class Extensions: @@ -415,7 +411,7 @@ def __bool__(self) -> bool: next_pos: SlidingSyncStreamToken lists: Mapping[str, SlidingWindowList] - rooms: Dict[str, RoomResult] + rooms: dict[str, RoomResult] extensions: Extensions def __bool__(self) -> bool: @@ -485,7 +481,7 @@ def from_room_config( Args: room_params: `SlidingSyncConfig.SlidingSyncList` or `SlidingSyncConfig.RoomSubscription` """ - required_state_map: Dict[str, Set[str]] = {} + required_state_map: dict[str, set[str]] = {} for ( state_type, state_key, diff --git a/synapse/types/rest/client/__init__.py b/synapse/types/rest/client/__init__.py index 11d7e59b43..4940fabd12 100644 --- a/synapse/types/rest/client/__init__.py +++ b/synapse/types/rest/client/__init__.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Optional, Union from synapse._pydantic_compat import ( Extra, @@ -72,7 +72,7 @@ class ThreepidRequestTokenBody(RequestBodyModel): @validator("id_access_token", always=True) def token_required_for_identity_server( - cls, token: Optional[str], values: Dict[str, object] + cls, token: Optional[str], values: dict[str, object] ) -> Optional[str]: if values.get("id_server") is not None and token is None: raise ValueError("id_access_token is required if an id_server is supplied.") @@ -144,7 +144,7 @@ class CommonRoomParameters(RequestBodyModel): (Max 1000 messages) """ - required_state: List[Tuple[StrictStr, StrictStr]] + required_state: list[tuple[StrictStr, StrictStr]] # mypy workaround via https://github.com/pydantic/pydantic/issues/156#issuecomment-1130883884 if TYPE_CHECKING: timeline_limit: int @@ -242,21 +242,21 @@ class Filters(RequestBodyModel): """ is_dm: Optional[StrictBool] = None - spaces: Optional[List[StrictStr]] = None + spaces: Optional[list[StrictStr]] = None is_encrypted: Optional[StrictBool] = None is_invite: Optional[StrictBool] = None - room_types: Optional[List[Union[StrictStr, None]]] = None - not_room_types: Optional[List[Union[StrictStr, None]]] = None + room_types: Optional[list[Union[StrictStr, None]]] = None + not_room_types: Optional[list[Union[StrictStr, None]]] = None room_name_like: Optional[StrictStr] = None - tags: Optional[List[StrictStr]] = None - not_tags: Optional[List[StrictStr]] = None + tags: Optional[list[StrictStr]] = None + not_tags: Optional[list[StrictStr]] = None # mypy workaround via https://github.com/pydantic/pydantic/issues/156#issuecomment-1130883884 if TYPE_CHECKING: - ranges: Optional[List[Tuple[int, int]]] = None + ranges: Optional[list[tuple[int, int]]] = None else: ranges: Optional[ - List[Tuple[conint(ge=0, strict=True), conint(ge=0, strict=True)]] + list[tuple[conint(ge=0, strict=True), conint(ge=0, strict=True)]] ] = None # type: ignore[valid-type] slow_get_all_rooms: Optional[StrictBool] = False filters: Optional[Filters] = None @@ -327,9 +327,9 @@ class AccountDataExtension(RequestBodyModel): enabled: Optional[StrictBool] = False # Process all lists defined in the Sliding Window API. (This is the default.) - lists: Optional[List[StrictStr]] = ["*"] + lists: Optional[list[StrictStr]] = ["*"] # Process all room subscriptions defined in the Room Subscription API. (This is the default.) - rooms: Optional[List[StrictStr]] = ["*"] + rooms: Optional[list[StrictStr]] = ["*"] class ReceiptsExtension(RequestBodyModel): """The Receipts extension (MSC3960) @@ -344,9 +344,9 @@ class ReceiptsExtension(RequestBodyModel): enabled: Optional[StrictBool] = False # Process all lists defined in the Sliding Window API. (This is the default.) - lists: Optional[List[StrictStr]] = ["*"] + lists: Optional[list[StrictStr]] = ["*"] # Process all room subscriptions defined in the Room Subscription API. (This is the default.) - rooms: Optional[List[StrictStr]] = ["*"] + rooms: Optional[list[StrictStr]] = ["*"] class TypingExtension(RequestBodyModel): """The Typing Notification extension (MSC3961) @@ -361,9 +361,9 @@ class TypingExtension(RequestBodyModel): enabled: Optional[StrictBool] = False # Process all lists defined in the Sliding Window API. (This is the default.) - lists: Optional[List[StrictStr]] = ["*"] + lists: Optional[list[StrictStr]] = ["*"] # Process all room subscriptions defined in the Room Subscription API. (This is the default.) - rooms: Optional[List[StrictStr]] = ["*"] + rooms: Optional[list[StrictStr]] = ["*"] class ThreadSubscriptionsExtension(RequestBodyModel): """The Thread Subscriptions extension (MSC4308) @@ -389,18 +389,18 @@ class ThreadSubscriptionsExtension(RequestBodyModel): # mypy workaround via https://github.com/pydantic/pydantic/issues/156#issuecomment-1130883884 if TYPE_CHECKING: - lists: Optional[Dict[str, SlidingSyncList]] = None + lists: Optional[dict[str, SlidingSyncList]] = None else: - lists: Optional[Dict[constr(max_length=64, strict=True), SlidingSyncList]] = ( + lists: Optional[dict[constr(max_length=64, strict=True), SlidingSyncList]] = ( None # type: ignore[valid-type] ) - room_subscriptions: Optional[Dict[StrictStr, RoomSubscription]] = None + room_subscriptions: Optional[dict[StrictStr, RoomSubscription]] = None extensions: Optional[Extensions] = None @validator("lists") def lists_length_check( - cls, value: Optional[Dict[str, SlidingSyncList]] - ) -> Optional[Dict[str, SlidingSyncList]]: + cls, value: Optional[dict[str, SlidingSyncList]] + ) -> Optional[dict[str, SlidingSyncList]]: if value is not None: assert len(value) <= 100, f"Max lists: 100 but saw {len(value)}" return value diff --git a/synapse/types/state.py b/synapse/types/state.py index 6420e050a5..1b4de61d3e 100644 --- a/synapse/types/state.py +++ b/synapse/types/state.py @@ -25,13 +25,9 @@ Any, Callable, Collection, - Dict, Iterable, - List, Mapping, Optional, - Set, - Tuple, TypeVar, ) @@ -42,7 +38,7 @@ from synapse.types import MutableStateMap, StateKey, StateMap if TYPE_CHECKING: - from typing import FrozenSet # noqa: used within quoted type hint; flake8 sad + pass # noqa: used within quoted type hint; flake8 sad logger = logging.getLogger(__name__) @@ -64,7 +60,7 @@ class StateFilter: appear in `types`. """ - types: "immutabledict[str, Optional[FrozenSet[str]]]" + types: "immutabledict[str, Optional[frozenset[str]]]" include_others: bool = False def __attrs_post_init__(self) -> None: @@ -105,7 +101,7 @@ def none() -> "StateFilter": return _NONE_STATE_FILTER @staticmethod - def from_types(types: Iterable[Tuple[str, Optional[str]]]) -> "StateFilter": + def from_types(types: Iterable[tuple[str, Optional[str]]]) -> "StateFilter": """Creates a filter that only fetches the given types Args: @@ -115,7 +111,7 @@ def from_types(types: Iterable[Tuple[str, Optional[str]]]) -> "StateFilter": Returns: The new state filter. """ - type_dict: Dict[str, Optional[Set[str]]] = {} + type_dict: dict[str, Optional[set[str]]] = {} for typ, s in types: if typ in type_dict: if type_dict[typ] is None: @@ -134,7 +130,7 @@ def from_types(types: Iterable[Tuple[str, Optional[str]]]) -> "StateFilter": ) ) - def to_types(self) -> Iterable[Tuple[str, Optional[str]]]: + def to_types(self) -> Iterable[tuple[str, Optional[str]]]: """The inverse to `from_types`.""" for event_type, state_keys in self.types.items(): if state_keys is None: @@ -167,7 +163,7 @@ def freeze( Returns a (frozen) StateFilter with the same contents as the parameters specified here, which can be made of mutable types. """ - types_with_frozen_values: Dict[str, Optional[FrozenSet[str]]] = {} + types_with_frozen_values: dict[str, Optional[frozenset[str]]] = {} for state_types, state_keys in types.items(): if state_keys is not None: types_with_frozen_values[state_types] = frozenset(state_keys) @@ -240,7 +236,7 @@ def return_expanded(self) -> "StateFilter": # We want to return all non-members return _ALL_NON_MEMBER_STATE_FILTER - def make_sql_filter_clause(self) -> Tuple[str, List[str]]: + def make_sql_filter_clause(self) -> tuple[str, list[str]]: """Converts the filter to an SQL clause. For example: @@ -257,7 +253,7 @@ def make_sql_filter_clause(self) -> Tuple[str, List[str]]: """ where_clause = "" - where_args: List[str] = [] + where_args: list[str] = [] if self.is_full(): return where_clause, where_args @@ -353,7 +349,7 @@ def has_wildcards(self) -> bool: state_keys is None for state_keys in self.types.values() ) - def concrete_types(self) -> List[Tuple[str, str]]: + def concrete_types(self) -> list[tuple[str, str]]: """Returns a list of concrete type/state_keys (i.e. not None) that will be fetched. This will be a complete list if `has_wildcards` returns False, but otherwise will be a subset (or even empty). @@ -368,7 +364,7 @@ def concrete_types(self) -> List[Tuple[str, str]]: for s in state_keys ] - def wildcard_types(self) -> List[str]: + def wildcard_types(self) -> list[str]: """Returns a list of event types which require us to fetch all state keys. This will be empty unless `has_wildcards` returns True. @@ -377,7 +373,7 @@ def wildcard_types(self) -> List[str]: """ return [t for t, state_keys in self.types.items() if state_keys is None] - def get_member_split(self) -> Tuple["StateFilter", "StateFilter"]: + def get_member_split(self) -> tuple["StateFilter", "StateFilter"]: """Return the filter split into two: one which assumes it's exclusively matching against member state, and one which assumes it's matching against non member state. @@ -416,7 +412,7 @@ def get_member_split(self) -> Tuple["StateFilter", "StateFilter"]: def _decompose_into_four_parts( self, - ) -> Tuple[Tuple[bool, Set[str]], Tuple[Set[str], Set[StateKey]]]: + ) -> tuple[tuple[bool, set[str]], tuple[set[str], set[StateKey]]]: """ Decomposes this state filter into 4 constituent parts, which can be thought of as this: @@ -432,18 +428,18 @@ def _decompose_into_four_parts( correspondence. """ is_all = self.include_others - excluded_types: Set[str] = {t for t in self.types if is_all} - wildcard_types: Set[str] = {t for t, s in self.types.items() if s is None} - concrete_keys: Set[StateKey] = set(self.concrete_types()) + excluded_types: set[str] = {t for t in self.types if is_all} + wildcard_types: set[str] = {t for t, s in self.types.items() if s is None} + concrete_keys: set[StateKey] = set(self.concrete_types()) return (is_all, excluded_types), (wildcard_types, concrete_keys) @staticmethod def _recompose_from_four_parts( all_part: bool, - minus_wildcards: Set[str], - plus_wildcards: Set[str], - plus_state_keys: Set[StateKey], + minus_wildcards: set[str], + plus_wildcards: set[str], + plus_state_keys: set[StateKey], ) -> "StateFilter": """ Recomposes a state filter from 4 parts. @@ -454,7 +450,7 @@ def _recompose_from_four_parts( # {state type -> set of state keys OR None for wildcard} # (The same structure as that of a StateFilter.) - new_types: Dict[str, Optional[Set[str]]] = {} + new_types: dict[str, Optional[set[str]]] = {} # if we start with all, insert the excluded statetypes as empty sets # to prevent them from being included diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index 2ae2e245a9..0d3b7ca740 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -23,12 +23,10 @@ import logging import typing from typing import ( - Dict, Iterator, Mapping, Optional, Sequence, - Set, TypeVar, ) @@ -119,8 +117,8 @@ class MutableOverlayMapping(collections.abc.MutableMapping[K, V]): """ _underlying_map: Mapping[K, V] - _mutable_map: Dict[K, V] = attr.ib(factory=dict) - _deletions: Set[K] = attr.ib(factory=set) + _mutable_map: dict[K, V] = attr.ib(factory=dict) + _deletions: set[K] = attr.ib(factory=set) def __getitem__(self, key: K) -> V: if key in self._deletions: diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index 2a167f209c..c568b377d2 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -35,16 +35,12 @@ Callable, Collection, Coroutine, - Dict, Generator, Generic, Hashable, Iterable, - List, Literal, Optional, - Set, - Tuple, TypeVar, Union, overload, @@ -108,8 +104,8 @@ class ObservableDeferred(Generic[_T], AbstractObservableDeferred[_T]): __slots__ = ["_deferred", "_observers", "_result"] _deferred: "defer.Deferred[_T]" - _observers: Union[List["defer.Deferred[_T]"], Tuple[()]] - _result: Union[None, Tuple[Literal[True], _T], Tuple[Literal[False], Failure]] + _observers: Union[list["defer.Deferred[_T]"], tuple[()]] + _result: Union[None, tuple[Literal[True], _T], tuple[Literal[False], Failure]] def __init__(self, deferred: "defer.Deferred[_T]", consumeErrors: bool = False): object.__setattr__(self, "_deferred", deferred) @@ -268,7 +264,7 @@ async def yieldable_gather_results( iter: Iterable[T], *args: P.args, **kwargs: P.kwargs, -) -> List[R]: +) -> list[R]: """Executes the function with each argument concurrently. Args: @@ -310,7 +306,7 @@ async def yieldable_gather_results_delaying_cancellation( iter: Iterable[T], *args: P.args, **kwargs: P.kwargs, -) -> List[R]: +) -> list[R]: """Executes the function with each argument concurrently. Cancellation is delayed until after all the results have been gathered. @@ -350,49 +346,49 @@ async def yieldable_gather_results_delaying_cancellation( @overload def gather_results( - deferredList: Tuple[()], consumeErrors: bool = ... -) -> "defer.Deferred[Tuple[()]]": ... + deferredList: tuple[()], consumeErrors: bool = ... +) -> "defer.Deferred[tuple[()]]": ... @overload def gather_results( - deferredList: Tuple["defer.Deferred[T1]"], + deferredList: tuple["defer.Deferred[T1]"], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1]]": ... +) -> "defer.Deferred[tuple[T1]]": ... @overload def gather_results( - deferredList: Tuple["defer.Deferred[T1]", "defer.Deferred[T2]"], + deferredList: tuple["defer.Deferred[T1]", "defer.Deferred[T2]"], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1, T2]]": ... +) -> "defer.Deferred[tuple[T1, T2]]": ... @overload def gather_results( - deferredList: Tuple[ + deferredList: tuple[ "defer.Deferred[T1]", "defer.Deferred[T2]", "defer.Deferred[T3]" ], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1, T2, T3]]": ... +) -> "defer.Deferred[tuple[T1, T2, T3]]": ... @overload def gather_results( - deferredList: Tuple[ + deferredList: tuple[ "defer.Deferred[T1]", "defer.Deferred[T2]", "defer.Deferred[T3]", "defer.Deferred[T4]", ], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1, T2, T3, T4]]": ... +) -> "defer.Deferred[tuple[T1, T2, T3, T4]]": ... def gather_results( # type: ignore[misc] - deferredList: Tuple["defer.Deferred[T1]", ...], + deferredList: tuple["defer.Deferred[T1]", ...], consumeErrors: bool = False, -) -> "defer.Deferred[Tuple[T1, ...]]": +) -> "defer.Deferred[tuple[T1, ...]]": """Combines a tuple of `Deferred`s into a single `Deferred`. Wraps `defer.gatherResults` to provide type annotations that support heterogenous @@ -406,50 +402,50 @@ def gather_results( # type: ignore[misc] @overload async def gather_optional_coroutines( - *coroutines: Unpack[Tuple[Optional[Coroutine[Any, Any, T1]]]], -) -> Tuple[Optional[T1]]: ... + *coroutines: Unpack[tuple[Optional[Coroutine[Any, Any, T1]]]], +) -> tuple[Optional[T1]]: ... @overload async def gather_optional_coroutines( *coroutines: Unpack[ - Tuple[ + tuple[ Optional[Coroutine[Any, Any, T1]], Optional[Coroutine[Any, Any, T2]], ] ], -) -> Tuple[Optional[T1], Optional[T2]]: ... +) -> tuple[Optional[T1], Optional[T2]]: ... @overload async def gather_optional_coroutines( *coroutines: Unpack[ - Tuple[ + tuple[ Optional[Coroutine[Any, Any, T1]], Optional[Coroutine[Any, Any, T2]], Optional[Coroutine[Any, Any, T3]], ] ], -) -> Tuple[Optional[T1], Optional[T2], Optional[T3]]: ... +) -> tuple[Optional[T1], Optional[T2], Optional[T3]]: ... @overload async def gather_optional_coroutines( *coroutines: Unpack[ - Tuple[ + tuple[ Optional[Coroutine[Any, Any, T1]], Optional[Coroutine[Any, Any, T2]], Optional[Coroutine[Any, Any, T3]], Optional[Coroutine[Any, Any, T4]], ] ], -) -> Tuple[Optional[T1], Optional[T2], Optional[T3], Optional[T4]]: ... +) -> tuple[Optional[T1], Optional[T2], Optional[T3], Optional[T4]]: ... @overload async def gather_optional_coroutines( *coroutines: Unpack[ - Tuple[ + tuple[ Optional[Coroutine[Any, Any, T1]], Optional[Coroutine[Any, Any, T2]], Optional[Coroutine[Any, Any, T3]], @@ -457,13 +453,13 @@ async def gather_optional_coroutines( Optional[Coroutine[Any, Any, T5]], ] ], -) -> Tuple[Optional[T1], Optional[T2], Optional[T3], Optional[T4], Optional[T5]]: ... +) -> tuple[Optional[T1], Optional[T2], Optional[T3], Optional[T4], Optional[T5]]: ... @overload async def gather_optional_coroutines( *coroutines: Unpack[ - Tuple[ + tuple[ Optional[Coroutine[Any, Any, T1]], Optional[Coroutine[Any, Any, T2]], Optional[Coroutine[Any, Any, T3]], @@ -472,14 +468,14 @@ async def gather_optional_coroutines( Optional[Coroutine[Any, Any, T6]], ] ], -) -> Tuple[ +) -> tuple[ Optional[T1], Optional[T2], Optional[T3], Optional[T4], Optional[T5], Optional[T6] ]: ... async def gather_optional_coroutines( - *coroutines: Unpack[Tuple[Optional[Coroutine[Any, Any, T1]], ...]], -) -> Tuple[Optional[T1], ...]: + *coroutines: Unpack[tuple[Optional[Coroutine[Any, Any, T1]], ...]], +) -> tuple[Optional[T1], ...]: """Helper function that allows waiting on multiple coroutines at once. The return value is a tuple of the return values of the coroutines in order. @@ -563,7 +559,7 @@ def __init__( self._clock = clock # key_to_defer is a map from the key to a _LinearizerEntry. - self.key_to_defer: Dict[Hashable, _LinearizerEntry] = {} + self.key_to_defer: dict[Hashable, _LinearizerEntry] = {} def is_queued(self, key: Hashable) -> bool: """Checks whether there is a process queued up waiting""" @@ -698,10 +694,10 @@ class ReadWriteLock: def __init__(self) -> None: # Latest readers queued - self.key_to_current_readers: Dict[str, Set[defer.Deferred]] = {} + self.key_to_current_readers: dict[str, set[defer.Deferred]] = {} # Latest writer queued - self.key_to_current_writer: Dict[str, defer.Deferred] = {} + self.key_to_current_writer: dict[str, defer.Deferred] = {} def read(self, key: str) -> AsyncContextManager: @asynccontextmanager @@ -968,7 +964,7 @@ class AwakenableSleeper: """ def __init__(self, clock: Clock) -> None: - self._streams: Dict[str, Set[defer.Deferred[None]]] = {} + self._streams: dict[str, set[defer.Deferred[None]]] = {} self._clock = clock def wake(self, name: str) -> None: diff --git a/synapse/util/batching_queue.py b/synapse/util/batching_queue.py index f77301afd8..514abcbec1 100644 --- a/synapse/util/batching_queue.py +++ b/synapse/util/batching_queue.py @@ -24,12 +24,8 @@ TYPE_CHECKING, Awaitable, Callable, - Dict, Generic, Hashable, - List, - Set, - Tuple, TypeVar, ) @@ -102,7 +98,7 @@ def __init__( name: str, hs: "HomeServer", clock: Clock, - process_batch_callback: Callable[[List[V]], Awaitable[R]], + process_batch_callback: Callable[[list[V]], Awaitable[R]], ): self._name = name self.hs = hs @@ -110,11 +106,11 @@ def __init__( self._clock = clock # The set of keys currently being processed. - self._processing_keys: Set[Hashable] = set() + self._processing_keys: set[Hashable] = set() # The currently pending batch of values by key, with a Deferred to call # with the result of the corresponding `_process_batch_callback` call. - self._next_values: Dict[Hashable, List[Tuple[V, defer.Deferred]]] = {} + self._next_values: dict[Hashable, list[tuple[V, defer.Deferred]]] = {} # The function to call with batches of values. self._process_batch_callback = process_batch_callback diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py index 08ff842af0..c799fca550 100644 --- a/synapse/util/caches/__init__.py +++ b/synapse/util/caches/__init__.py @@ -24,7 +24,7 @@ import typing from enum import Enum, auto from sys import intern -from typing import Any, Callable, Dict, List, Optional, Sized, TypeVar +from typing import Any, Callable, Optional, Sized, TypeVar import attr from prometheus_client import REGISTRY @@ -162,7 +162,7 @@ def clear_memory_usage(self) -> None: if self.memory_usage is not None: self.memory_usage = 0 - def describe(self) -> List[str]: + def describe(self) -> list[str]: return [] def collect(self) -> None: @@ -283,7 +283,7 @@ def intern_string(string: T) -> T: return string -def intern_dict(dictionary: Dict[str, Any]) -> Dict[str, Any]: +def intern_dict(dictionary: dict[str, Any]) -> dict[str, Any]: """Takes a dictionary and interns well known keys and their values""" return { KNOWN_KEYS.get(key, key): _intern_known_values(key, value) diff --git a/synapse/util/caches/deferred_cache.py b/synapse/util/caches/deferred_cache.py index 016acbac71..380f2a78ca 100644 --- a/synapse/util/caches/deferred_cache.py +++ b/synapse/util/caches/deferred_cache.py @@ -26,13 +26,10 @@ from typing import ( Callable, Collection, - Dict, Generic, MutableMapping, Optional, - Set, Sized, - Tuple, TypeVar, Union, cast, @@ -203,7 +200,7 @@ def get_bulk( self, keys: Collection[KT], callback: Optional[Callable[[], None]] = None, - ) -> Tuple[Dict[KT, VT], Optional["defer.Deferred[Dict[KT, VT]]"], Collection[KT]]: + ) -> tuple[dict[KT, VT], Optional["defer.Deferred[dict[KT, VT]]"], Collection[KT]]: """Bulk lookup of items in the cache. Returns: @@ -458,7 +455,7 @@ class CacheEntrySingle(CacheEntry[KT, VT]): def __init__(self, deferred: "defer.Deferred[VT]") -> None: self._deferred = ObservableDeferred(deferred, consumeErrors=True) - self._callbacks: Set[Callable[[], None]] = set() + self._callbacks: set[Callable[[], None]] = set() def deferred(self, key: KT) -> "defer.Deferred[VT]": return self._deferred.observe() @@ -481,9 +478,9 @@ class CacheMultipleEntries(CacheEntry[KT, VT]): __slots__ = ["_deferred", "_callbacks", "_global_callbacks"] def __init__(self) -> None: - self._deferred: Optional[ObservableDeferred[Dict[KT, VT]]] = None - self._callbacks: Dict[KT, Set[Callable[[], None]]] = {} - self._global_callbacks: Set[Callable[[], None]] = set() + self._deferred: Optional[ObservableDeferred[dict[KT, VT]]] = None + self._callbacks: dict[KT, set[Callable[[], None]]] = {} + self._global_callbacks: set[Callable[[], None]] = set() def deferred(self, key: KT) -> "defer.Deferred[VT]": if not self._deferred: @@ -513,7 +510,7 @@ def add_global_invalidation_callback( def complete_bulk( self, cache: DeferredCache[KT, VT], - result: Dict[KT, VT], + result: dict[KT, VT], ) -> None: """Called when there is a result""" for key, value in result.items(): diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 6e3c8eada9..7cc83bad37 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -26,17 +26,13 @@ Awaitable, Callable, Collection, - Dict, Generic, Hashable, Iterable, - List, Mapping, Optional, Protocol, Sequence, - Tuple, - Type, TypeVar, Union, cast, @@ -57,15 +53,15 @@ logger = logging.getLogger(__name__) -CacheKey = Union[Tuple, Any] +CacheKey = Union[tuple, Any] F = TypeVar("F", bound=Callable[..., Any]) class CachedFunction(Generic[F]): - invalidate: Callable[[Tuple[Any, ...]], None] + invalidate: Callable[[tuple[Any, ...]], None] invalidate_all: Callable[[], None] - prefill: Callable[[Tuple[Any, ...], Any], None] + prefill: Callable[[tuple[Any, ...], Any], None] cache: Any = None num_args: Any = None @@ -247,7 +243,7 @@ def __init__( self.prune_unread_entries = prune_unread_entries def __get__( - self, obj: Optional[HasServerNameAndClock], owner: Optional[Type] + self, obj: Optional[HasServerNameAndClock], owner: Optional[type] ) -> Callable[..., "defer.Deferred[Any]"]: # We need access to instance-level `obj.server_name` attribute assert obj is not None, ( @@ -332,7 +328,7 @@ class DeferredCacheListDescriptor(_CacheDescriptorBase): def __init__( self, - orig: Callable[..., Awaitable[Dict]], + orig: Callable[..., Awaitable[dict]], cached_method_name: str, list_name: str, num_args: Optional[int] = None, @@ -363,8 +359,8 @@ def __init__( ) def __get__( - self, obj: Optional[Any], objtype: Optional[Type] = None - ) -> Callable[..., "defer.Deferred[Dict[Hashable, Any]]"]: + self, obj: Optional[Any], objtype: Optional[type] = None + ) -> Callable[..., "defer.Deferred[dict[Hashable, Any]]"]: cached_method = getattr(obj, self.cached_method_name) cache: DeferredCache[CacheKey, Any] = cached_method.cache num_args = cached_method.num_args @@ -376,7 +372,7 @@ def __get__( ) @functools.wraps(self.orig) - def wrapped(*args: Any, **kwargs: Any) -> "defer.Deferred[Dict]": + def wrapped(*args: Any, **kwargs: Any) -> "defer.Deferred[dict]": # If we're passed a cache_context then we'll want to call its # invalidate() whenever we are invalidated invalidate_callback = kwargs.pop("on_invalidate", None) @@ -412,10 +408,10 @@ def cache_key_to_arg(key: tuple) -> Hashable: results = {cache_key_to_arg(key): v for key, v in immediate_results.items()} - cached_defers: List["defer.Deferred[Any]"] = [] + cached_defers: list["defer.Deferred[Any]"] = [] if pending_deferred: - def update_results(r: Dict) -> None: + def update_results(r: dict) -> None: for k, v in r.items(): results[cache_key_to_arg(k)] = v @@ -425,7 +421,7 @@ def update_results(r: Dict) -> None: if missing: cache_entry = cache.start_bulk_input(missing, invalidate_callback) - def complete_all(res: Dict[Hashable, Any]) -> None: + def complete_all(res: dict[Hashable, Any]) -> None: missing_results = {} for key in missing: arg = cache_key_to_arg(key) @@ -478,7 +474,7 @@ class _CacheContext: Cache = Union[DeferredCache, LruCache] _cache_context_objects: """WeakValueDictionary[ - Tuple["_CacheContext.Cache", CacheKey], "_CacheContext" + tuple["_CacheContext.Cache", CacheKey], "_CacheContext" ]""" = WeakValueDictionary() def __init__(self, cache: "_CacheContext.Cache", cache_key: CacheKey) -> None: diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py index eb5493d322..dd6f413e79 100644 --- a/synapse/util/caches/dictionary_cache.py +++ b/synapse/util/caches/dictionary_cache.py @@ -22,13 +22,10 @@ import logging import threading from typing import ( - Dict, Generic, Iterable, Literal, Optional, - Set, - Tuple, TypeVar, Union, ) @@ -65,8 +62,8 @@ class DictionaryEntry(Generic[DKT, DV]): """ full: bool - known_absent: Set[DKT] - value: Dict[DKT, DV] + known_absent: set[DKT] + value: dict[DKT, DV] def __len__(self) -> int: return len(self.value) @@ -160,8 +157,8 @@ def __init__( # * A key of `(KT, DKT)` has a value of `_PerKeyValue` # * A key of `(KT, _FullCacheKey.KEY)` has a value of `Dict[DKT, DV]` self.cache: LruCache[ - Tuple[KT, Union[DKT, Literal[_FullCacheKey.KEY]]], - Union[_PerKeyValue, Dict[DKT, DV]], + tuple[KT, Union[DKT, Literal[_FullCacheKey.KEY]]], + Union[_PerKeyValue, dict[DKT, DV]], ] = LruCache( max_size=max_entries, clock=clock, @@ -297,7 +294,7 @@ def update( self, sequence: int, key: KT, - value: Dict[DKT, DV], + value: dict[DKT, DV], fetched_keys: Optional[Iterable[DKT]] = None, ) -> None: """Updates the entry in the cache. @@ -332,7 +329,7 @@ def update( self._update_subset(key, value, fetched_keys) def _update_subset( - self, key: KT, value: Dict[DKT, DV], fetched_keys: Iterable[DKT] + self, key: KT, value: dict[DKT, DV], fetched_keys: Iterable[DKT] ) -> None: """Add the given dictionary values as explicit keys in the cache. diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index 324acb728a..04549ab65f 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -30,15 +30,10 @@ Any, Callable, Collection, - Dict, Generic, Iterable, - List, Literal, Optional, - Set, - Tuple, - Type, TypeVar, Union, cast, @@ -308,7 +303,7 @@ def __init__( # footprint down. Storing `None` is free as its a singleton, while empty # lists are 56 bytes (and empty sets are 216 bytes, if we did the naive # thing and used sets). - self.callbacks: Optional[List[Callable[[], None]]] = None + self.callbacks: Optional[list[Callable[[], None]]] = None self.add_callbacks(callbacks) @@ -404,7 +399,7 @@ def __init__( clock: Clock, server_name: str, cache_name: str, - cache_type: Type[Union[dict, TreeCache]] = dict, + cache_type: type[Union[dict, TreeCache]] = dict, size_callback: Optional[Callable[[VT], int]] = None, metrics_collection_callback: Optional[Callable[[], None]] = None, apply_cache_factor_from_config: bool = True, @@ -420,7 +415,7 @@ def __init__( clock: Clock, server_name: str, cache_name: Literal[None] = None, - cache_type: Type[Union[dict, TreeCache]] = dict, + cache_type: type[Union[dict, TreeCache]] = dict, size_callback: Optional[Callable[[VT], int]] = None, metrics_collection_callback: Optional[Callable[[], None]] = None, apply_cache_factor_from_config: bool = True, @@ -435,7 +430,7 @@ def __init__( clock: Clock, server_name: str, cache_name: Optional[str] = None, - cache_type: Type[Union[dict, TreeCache]] = dict, + cache_type: type[Union[dict, TreeCache]] = dict, size_callback: Optional[Callable[[VT], int]] = None, metrics_collection_callback: Optional[Callable[[], None]] = None, apply_cache_factor_from_config: bool = True, @@ -489,7 +484,7 @@ def __init__( Note: The new key does not have to be unique. """ - cache: Union[Dict[KT, _Node[KT, VT]], TreeCache] = cache_type() + cache: Union[dict[KT, _Node[KT, VT]], TreeCache] = cache_type() self.cache = cache # Used for introspection. self.apply_cache_factor_from_config = apply_cache_factor_from_config @@ -529,7 +524,7 @@ def __init__( lock = threading.Lock() - extra_index: Dict[KT, Set[KT]] = {} + extra_index: dict[KT, set[KT]] = {} def evict() -> None: while cache_len() > self.max_size: @@ -682,21 +677,21 @@ def cache_get_multi( key: tuple, default: Literal[None] = None, update_metrics: bool = True, - ) -> Union[None, Iterable[Tuple[KT, VT]]]: ... + ) -> Union[None, Iterable[tuple[KT, VT]]]: ... @overload def cache_get_multi( key: tuple, default: T, update_metrics: bool = True, - ) -> Union[T, Iterable[Tuple[KT, VT]]]: ... + ) -> Union[T, Iterable[tuple[KT, VT]]]: ... @synchronized def cache_get_multi( key: tuple, default: Optional[T] = None, update_metrics: bool = True, - ) -> Union[None, T, Iterable[Tuple[KT, VT]]]: + ) -> Union[None, T, Iterable[tuple[KT, VT]]]: """Returns a generator yielding all entries under the given key. Can only be used if backed by a tree cache. diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index 3d39357236..e82036d7e0 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -24,7 +24,6 @@ Any, Awaitable, Callable, - Dict, Generic, Iterable, Optional, @@ -119,7 +118,7 @@ def __init__( timeout_ms enable_logging """ - self._result_cache: Dict[KV, ResponseCacheEntry] = {} + self._result_cache: dict[KV, ResponseCacheEntry] = {} self.clock = clock self.timeout_sec = timeout_ms / 1000.0 diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py index 2cffd352d8..552570fbb9 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py @@ -21,7 +21,7 @@ import logging import math -from typing import Collection, Dict, FrozenSet, List, Mapping, Optional, Set, Union +from typing import Collection, Mapping, Optional, Union import attr from sortedcontainers import SortedDict @@ -45,14 +45,14 @@ class AllEntitiesChangedResult: that callers do the correct checks. """ - _entities: Optional[List[EntityType]] + _entities: Optional[list[EntityType]] @property def hit(self) -> bool: return self._entities is not None @property - def entities(self) -> List[EntityType]: + def entities(self) -> list[EntityType]: assert self._entities is not None return self._entities @@ -94,11 +94,11 @@ def __init__( self._max_size = math.floor(max_size) # map from stream id to the set of entities which changed at that stream id. - self._cache: SortedDict[int, Set[EntityType]] = SortedDict() + self._cache: SortedDict[int, set[EntityType]] = SortedDict() # map from entity to the stream ID of the latest change for that entity. # # Must be kept in sync with _cache. - self._entity_to_key: Dict[EntityType, int] = {} + self._entity_to_key: dict[EntityType, int] = {} # the earliest stream_pos for which we can reliably answer # get_all_entities_changed. In other words, one less than the earliest @@ -182,7 +182,7 @@ def has_entity_changed(self, entity: EntityType, stream_pos: int) -> bool: def get_entities_changed( self, entities: Collection[EntityType], stream_pos: int, _perf_factor: int = 1 - ) -> Union[Set[EntityType], FrozenSet[EntityType]]: + ) -> Union[set[EntityType], frozenset[EntityType]]: """ Returns the subset of the given entities that have had changes after the given position. @@ -291,7 +291,7 @@ def get_all_entities_changed(self, stream_pos: int) -> AllEntitiesChangedResult: if stream_pos < self._earliest_known_stream_pos: return AllEntitiesChangedResult(None) - changed_entities: List[EntityType] = [] + changed_entities: list[EntityType] = [] for k in self._cache.islice(start=self._cache.bisect_right(stream_pos)): changed_entities.extend(self._cache[k]) diff --git a/synapse/util/caches/ttlcache.py b/synapse/util/caches/ttlcache.py index 18c3a1e51c..2be9463d6a 100644 --- a/synapse/util/caches/ttlcache.py +++ b/synapse/util/caches/ttlcache.py @@ -21,7 +21,7 @@ import logging import time -from typing import Any, Callable, Dict, Generic, Tuple, TypeVar, Union +from typing import Any, Callable, Generic, TypeVar, Union import attr from sortedcontainers import SortedList @@ -56,7 +56,7 @@ def __init__( """ # map from key to _CacheEntry - self._data: Dict[KT, _CacheEntry[KT, VT]] = {} + self._data: dict[KT, _CacheEntry[KT, VT]] = {} # the _CacheEntries, sorted by expiry time self._expiry_list: SortedList[_CacheEntry[KT, VT]] = SortedList() @@ -113,7 +113,7 @@ def get(self, key: KT, default: T = SENTINEL) -> Union[VT, T]: self._metrics.inc_hits() return e.value - def get_with_expiry(self, key: KT) -> Tuple[VT, float, float]: + def get_with_expiry(self, key: KT) -> tuple[VT, float, float]: """Get a value, and its expiry time, from the cache Args: diff --git a/synapse/util/clock.py b/synapse/util/clock.py index 5e65cf32a4..6557582629 100644 --- a/synapse/util/clock.py +++ b/synapse/util/clock.py @@ -17,8 +17,6 @@ from typing import ( Any, Callable, - Dict, - List, ) from typing_extensions import ParamSpec @@ -62,10 +60,10 @@ def __init__(self, reactor: ISynapseThreadlessReactor, server_name: str) -> None self._delayed_call_id: int = 0 """Unique ID used to track delayed calls""" - self._looping_calls: List[LoopingCall] = [] + self._looping_calls: list[LoopingCall] = [] """List of active looping calls""" - self._call_id_to_delayed_call: Dict[int, IDelayedCall] = {} + self._call_id_to_delayed_call: dict[int, IDelayedCall] = {} """Mapping from unique call ID to delayed call""" self._is_shutdown = False diff --git a/synapse/util/daemonize.py b/synapse/util/daemonize.py index dba815040d..411b47f939 100644 --- a/synapse/util/daemonize.py +++ b/synapse/util/daemonize.py @@ -27,7 +27,7 @@ import signal import sys from types import FrameType, TracebackType -from typing import NoReturn, Optional, Type +from typing import NoReturn, Optional from synapse.logging.context import ( LoggingContext, @@ -119,7 +119,7 @@ def daemonize_process(pid_file: str, logger: logging.Logger, chdir: str = "/") - # also catch any other uncaught exceptions before we get that far.) def excepthook( - type_: Type[BaseException], + type_: type[BaseException], value: BaseException, traceback: Optional[TracebackType], ) -> None: diff --git a/synapse/util/distributor.py b/synapse/util/distributor.py index dec6536e4e..e8df5399cd 100644 --- a/synapse/util/distributor.py +++ b/synapse/util/distributor.py @@ -24,9 +24,7 @@ Any, Awaitable, Callable, - Dict, Generic, - List, Optional, TypeVar, Union, @@ -69,8 +67,8 @@ def __init__(self, hs: "HomeServer") -> None: (this should be `hs.hostname`). """ self.hs = hs - self.signals: Dict[str, Signal] = {} - self.pre_registration: Dict[str, List[Callable]] = {} + self.signals: dict[str, Signal] = {} + self.pre_registration: dict[str, list[Callable]] = {} def declare(self, name: str) -> None: if name in self.signals: @@ -122,7 +120,7 @@ class Signal(Generic[P]): def __init__(self, name: str): self.name: str = name - self.observers: List[Callable[P, Any]] = [] + self.observers: list[Callable[P, Any]] = [] def observe(self, observer: Callable[P, Any]) -> None: """Adds a new callable to the observer list which will be invoked by @@ -131,7 +129,7 @@ def observe(self, observer: Callable[P, Any]) -> None: Each observer callable may return a Deferred.""" self.observers.append(observer) - def fire(self, *args: P.args, **kwargs: P.kwargs) -> "defer.Deferred[List[Any]]": + def fire(self, *args: P.args, **kwargs: P.kwargs) -> "defer.Deferred[list[Any]]": """Invokes every callable in the observer list, passing in the args and kwargs. Exceptions thrown by observers are logged but ignored. It is not an error to fire a signal with no observers. diff --git a/synapse/util/events.py b/synapse/util/events.py index 4808268702..e41799b1f7 100644 --- a/synapse/util/events.py +++ b/synapse/util/events.py @@ -13,7 +13,7 @@ # # -from typing import Any, List, Optional +from typing import Any, Optional from synapse._pydantic_compat import Field, StrictStr, ValidationError, validator from synapse.types import JsonDict @@ -52,7 +52,7 @@ class MTopic(ParseModel): See `TopicContentBlock` in the Matrix specification. """ - m_text: Optional[List[MTextRepresentation]] = Field(alias="m.text") + m_text: Optional[list[MTextRepresentation]] = Field(alias="m.text") """ An ordered array of textual representations in different mimetypes. """ @@ -63,7 +63,7 @@ class MTopic(ParseModel): @validator("m_text", pre=True) def ignore_invalid_representations( cls, m_text: Any - ) -> Optional[List[MTextRepresentation]]: + ) -> Optional[list[MTextRepresentation]]: if not isinstance(m_text, list): raise ValueError("m.text must be a list") representations = [] diff --git a/synapse/util/gai_resolver.py b/synapse/util/gai_resolver.py index 3c7a966e87..e07003f1af 100644 --- a/synapse/util/gai_resolver.py +++ b/synapse/util/gai_resolver.py @@ -17,12 +17,9 @@ from typing import ( TYPE_CHECKING, Callable, - List, NoReturn, Optional, Sequence, - Tuple, - Type, Union, ) @@ -91,13 +88,13 @@ def cancel(self) -> NoReturn: } -_GETADDRINFO_RESULT = List[ - Tuple[ +_GETADDRINFO_RESULT = list[ + tuple[ AddressFamily, SocketKind, int, str, - Union[Tuple[str, int], Tuple[str, int, int, int], Tuple[int, bytes]], + Union[tuple[str, int], tuple[str, int, int, int], tuple[int, bytes]], ] ] @@ -141,7 +138,7 @@ def resolveHostName( resolutionReceiver: IResolutionReceiver, hostName: str, portNumber: int = 0, - addressTypes: Optional[Sequence[Type[IAddress]]] = None, + addressTypes: Optional[Sequence[type[IAddress]]] = None, transportSemantics: str = "TCP", ) -> IHostResolution: """ diff --git a/synapse/util/httpresourcetree.py b/synapse/util/httpresourcetree.py index 6471b31c94..46fa92a4c5 100644 --- a/synapse/util/httpresourcetree.py +++ b/synapse/util/httpresourcetree.py @@ -20,7 +20,6 @@ # import logging -from typing import Dict from twisted.web.resource import Resource @@ -30,7 +29,7 @@ def create_resource_tree( - desired_tree: Dict[str, Resource], root_resource: Resource + desired_tree: dict[str, Resource], root_resource: Resource ) -> Resource: """Create the resource tree for this homeserver. @@ -48,7 +47,7 @@ def create_resource_tree( # unless you give it a Request object IN ADDITION to the name :/ So # instead, we'll store a copy of this mapping so we can actually add # extra resources to existing nodes. See self._resource_id for the key. - resource_mappings: Dict[str, Resource] = {} + resource_mappings: dict[str, Resource] = {} for full_path_str, res in desired_tree.items(): # twisted requires all resources to be bytes full_path = full_path_str.encode("utf-8") diff --git a/synapse/util/iterutils.py b/synapse/util/iterutils.py index 0a6a30aab2..19789a4666 100644 --- a/synapse/util/iterutils.py +++ b/synapse/util/iterutils.py @@ -24,16 +24,12 @@ from typing import ( Callable, Collection, - Dict, Generator, Iterable, Iterator, - List, Mapping, Protocol, - Set, Sized, - Tuple, TypeVar, ) @@ -52,7 +48,7 @@ class _SelfSlice(Sized, Protocol): def __getitem__(self: S, i: slice) -> S: ... -def batch_iter(iterable: Iterable[T], size: int) -> Iterator[Tuple[T, ...]]: +def batch_iter(iterable: Iterable[T], size: int) -> Iterator[tuple[T, ...]]: """batch an iterable up into tuples with a maximum size Args: @@ -80,7 +76,7 @@ def chunk_seq(iseq: S, maxlen: int) -> Iterator[S]: def partition( iterable: Iterable[T], predicate: Callable[[T], bool] -) -> Tuple[List[T], List[T]]: +) -> tuple[list[T], list[T]]: """ Separate a given iterable into two lists based on the result of a predicate function. @@ -115,7 +111,7 @@ def sorted_topologically( # This is implemented by Kahn's algorithm. degree_map = dict.fromkeys(nodes, 0) - reverse_graph: Dict[T, Set[T]] = {} + reverse_graph: dict[T, set[T]] = {} for node, edges in graph.items(): if node not in degree_map: @@ -165,7 +161,7 @@ def sorted_topologically_batched( """ degree_map = dict.fromkeys(nodes, 0) - reverse_graph: Dict[T, Set[T]] = {} + reverse_graph: dict[T, set[T]] = {} for node, edges in graph.items(): if node not in degree_map: diff --git a/synapse/util/json.py b/synapse/util/json.py index e6db55f8e4..b1091704a8 100644 --- a/synapse/util/json.py +++ b/synapse/util/json.py @@ -16,7 +16,6 @@ import json from typing import ( Any, - Dict, ) from immutabledict import immutabledict @@ -27,7 +26,7 @@ def _reject_invalid_json(val: Any) -> None: raise ValueError("Invalid JSON value: '%s'" % val) -def _handle_immutabledict(obj: Any) -> Dict[Any, Any]: +def _handle_immutabledict(obj: Any) -> dict[Any, Any]: """Helper for json_encoder. Makes immutabledicts serializable by returning the underlying dict """ diff --git a/synapse/util/linked_list.py b/synapse/util/linked_list.py index 87f801c0cf..052863fdd6 100644 --- a/synapse/util/linked_list.py +++ b/synapse/util/linked_list.py @@ -22,7 +22,7 @@ """A circular doubly linked list implementation.""" import threading -from typing import Generic, Optional, Type, TypeVar +from typing import Generic, Optional, TypeVar P = TypeVar("P") LN = TypeVar("LN", bound="ListNode") @@ -53,7 +53,7 @@ def __init__(self, cache_entry: Optional[P] = None) -> None: self.next_node: Optional[ListNode[P]] = None @classmethod - def create_root_node(cls: Type["ListNode[P]"]) -> "ListNode[P]": + def create_root_node(cls: type["ListNode[P]"]) -> "ListNode[P]": """Create a new linked list by creating a "root" node, which is a node that has prev_node/next_node pointing to itself and no associated cache entry. @@ -65,7 +65,7 @@ def create_root_node(cls: Type["ListNode[P]"]) -> "ListNode[P]": @classmethod def insert_after( - cls: Type[LN], + cls: type[LN], cache_entry: P, node: "ListNode[P]", ) -> LN: diff --git a/synapse/util/manhole.py b/synapse/util/manhole.py index 63ec3e7e1b..dbf444e015 100644 --- a/synapse/util/manhole.py +++ b/synapse/util/manhole.py @@ -21,7 +21,7 @@ import inspect import sys import traceback -from typing import Any, Dict, Optional +from typing import Any, Optional from twisted.conch import manhole_ssh from twisted.conch.insults import insults @@ -71,7 +71,7 @@ -----END RSA PRIVATE KEY-----""" -def manhole(settings: ManholeConfig, globals: Dict[str, Any]) -> ServerFactory: +def manhole(settings: ManholeConfig, globals: dict[str, Any]) -> ServerFactory: """Starts a ssh listener with password authentication using the given username and password. Clients connecting to the ssh listener will find themselves in a colored python shell with diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py index f71380d689..6d1adf1131 100644 --- a/synapse/util/metrics.py +++ b/synapse/util/metrics.py @@ -25,11 +25,9 @@ from typing import ( Awaitable, Callable, - Dict, Generator, Optional, Protocol, - Type, TypeVar, ) @@ -238,7 +236,7 @@ def __enter__(self) -> "Measure": def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: @@ -293,8 +291,8 @@ class DynamicCollectorRegistry(CollectorRegistry): def __init__(self) -> None: super().__init__() - self._server_name_to_pre_update_hooks: Dict[ - str, Dict[str, Callable[[], None]] + self._server_name_to_pre_update_hooks: dict[ + str, dict[str, Callable[[], None]] ] = {} """ Mapping of server name to a mapping of metric name to metric pre-update diff --git a/synapse/util/module_loader.py b/synapse/util/module_loader.py index a33c75d54e..ae50e302ed 100644 --- a/synapse/util/module_loader.py +++ b/synapse/util/module_loader.py @@ -21,7 +21,7 @@ import importlib import importlib.util from types import ModuleType -from typing import Any, Tuple, Type +from typing import Any import jsonschema @@ -30,7 +30,7 @@ from synapse.types import StrSequence -def load_module(provider: dict, config_path: StrSequence) -> Tuple[Type, Any]: +def load_module(provider: dict, config_path: StrSequence) -> tuple[type, Any]: """Loads a synapse module with its config Args: diff --git a/synapse/util/patch_inline_callbacks.py b/synapse/util/patch_inline_callbacks.py index c776ad65b3..fca166a5b8 100644 --- a/synapse/util/patch_inline_callbacks.py +++ b/synapse/util/patch_inline_callbacks.py @@ -21,7 +21,7 @@ import functools import sys from types import GeneratorType -from typing import Any, Callable, Generator, List, TypeVar, cast +from typing import Any, Callable, Generator, TypeVar, cast from typing_extensions import ParamSpec @@ -56,7 +56,7 @@ def new_inline_callbacks( @functools.wraps(f) def wrapped(*args: P.args, **kwargs: P.kwargs) -> "Deferred[T]": start_context = current_context() - changes: List[str] = [] + changes: list[str] = [] orig: Callable[P, "Deferred[T]"] = orig_inline_callbacks( _check_yield_points(f, changes) ) @@ -126,7 +126,7 @@ def check_ctx(r: T) -> T: def _check_yield_points( f: Callable[P, Generator["Deferred[object]", object, T]], - changes: List[str], + changes: list[str], ) -> Callable: """Wraps a generator that is about to be passed to defer.inlineCallbacks checking that after every yield the log contexts are correct. diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index 756677fe6c..37d2e4505d 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -28,15 +28,10 @@ Any, Callable, ContextManager, - DefaultDict, - Dict, Iterator, - List, Mapping, MutableSet, Optional, - Set, - Tuple, ) from weakref import WeakSet @@ -104,7 +99,7 @@ def _get_counts_from_rate_limiter_instance( count_func: Callable[["FederationRateLimiter"], int], -) -> Mapping[Tuple[str, ...], int]: +) -> Mapping[tuple[str, ...], int]: """Returns a count of something (slept/rejected hosts) by (metrics_name)""" # Cast to a list to prevent it changing while the Prometheus # thread is collecting metrics @@ -114,7 +109,7 @@ def _get_counts_from_rate_limiter_instance( # Map from (metrics_name,) -> int, the number of something like slept hosts # or rejected hosts. The key type is Tuple[str], but we leave the length # unspecified for compatability with LaterGauge's annotations. - counts: Dict[Tuple[str, ...], int] = {} + counts: dict[tuple[str, ...], int] = {} for rate_limiter_instance in rate_limiter_instances: # Only track metrics if they provided a `metrics_name` to # differentiate this instance of the rate limiter. @@ -191,7 +186,7 @@ def new_limiter() -> "_PerHostRatelimiter": metrics_name=metrics_name, ) - self.ratelimiters: DefaultDict[str, "_PerHostRatelimiter"] = ( + self.ratelimiters: collections.defaultdict[str, "_PerHostRatelimiter"] = ( collections.defaultdict(new_limiter) ) @@ -244,7 +239,7 @@ def __init__( self.concurrent_requests = config.concurrent # request_id objects for requests which have been slept - self.sleeping_requests: Set[object] = set() + self.sleeping_requests: set[object] = set() # map from request_id object to Deferred for requests which are ready # for processing but have been queued @@ -253,11 +248,11 @@ def __init__( ] = collections.OrderedDict() # request id objects for requests which are in progress - self.current_processing: Set[object] = set() + self.current_processing: set[object] = set() # times at which we have recently (within the last window_size ms) # received requests. - self.request_times: List[int] = [] + self.request_times: list[int] = [] @contextlib.contextmanager def ratelimit(self, host: str) -> "Iterator[defer.Deferred[None]]": diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index 96fe2bd566..ce747c3f19 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -21,7 +21,7 @@ import logging import random from types import TracebackType -from typing import TYPE_CHECKING, Any, Optional, Type +from typing import TYPE_CHECKING, Any, Optional from synapse.api.errors import CodeMessageException from synapse.storage import DataStore @@ -230,7 +230,7 @@ def __enter__(self) -> None: def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py index 32b5bc00c9..6b0d3677da 100644 --- a/synapse/util/stringutils.py +++ b/synapse/util/stringutils.py @@ -23,7 +23,7 @@ import re import secrets import string -from typing import Any, Iterable, Optional, Tuple +from typing import Any, Iterable, Optional from netaddr import valid_ipv6 @@ -92,7 +92,7 @@ def assert_valid_client_secret(client_secret: str) -> None: ) -def parse_server_name(server_name: str) -> Tuple[str, Optional[int]]: +def parse_server_name(server_name: str) -> tuple[str, Optional[int]]: """Split a server name into host/port parts. Args: @@ -123,7 +123,7 @@ def parse_server_name(server_name: str) -> Tuple[str, Optional[int]]: VALID_HOST_REGEX = re.compile("\\A[0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*\\Z") -def parse_and_validate_server_name(server_name: str) -> Tuple[str, Optional[int]]: +def parse_and_validate_server_name(server_name: str) -> tuple[str, Optional[int]]: """Split a server name into host/port parts and do some basic validation. Args: @@ -190,7 +190,7 @@ def valid_id_server_location(id_server: str) -> bool: return "#" not in path and "?" not in path -def parse_and_validate_mxc_uri(mxc: str) -> Tuple[str, Optional[int], str]: +def parse_and_validate_mxc_uri(mxc: str) -> tuple[str, Optional[int], str]: """Parse the given string as an MXC URI Checks that the "server name" part is a valid server name diff --git a/synapse/util/task_scheduler.py b/synapse/util/task_scheduler.py index 8dd6f12feb..f033d37579 100644 --- a/synapse/util/task_scheduler.py +++ b/synapse/util/task_scheduler.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Awaitable, Callable, Dict, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Awaitable, Callable, Optional from twisted.python.failure import Failure @@ -110,13 +110,13 @@ def __init__(self, hs: "HomeServer"): self.server_name = hs.hostname self._store = hs.get_datastores().main self._clock = hs.get_clock() - self._running_tasks: Set[str] = set() + self._running_tasks: set[str] = set() # A map between action names and their registered function - self._actions: Dict[ + self._actions: dict[ str, Callable[ [ScheduledTask], - Awaitable[Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]], + Awaitable[tuple[TaskStatus, Optional[JsonMapping], Optional[str]]], ], ] = {} self._run_background_tasks = hs.config.worker.run_background_tasks @@ -143,7 +143,7 @@ def register_action( self, function: Callable[ [ScheduledTask], - Awaitable[Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]], + Awaitable[tuple[TaskStatus, Optional[JsonMapping], Optional[str]]], ], action_name: str, ) -> None: @@ -278,12 +278,12 @@ async def get_task(self, id: str) -> Optional[ScheduledTask]: async def get_tasks( self, *, - actions: Optional[List[str]] = None, + actions: Optional[list[str]] = None, resource_id: Optional[str] = None, - statuses: Optional[List[TaskStatus]] = None, + statuses: Optional[list[TaskStatus]] = None, max_timestamp: Optional[int] = None, limit: Optional[int] = None, - ) -> List[ScheduledTask]: + ) -> list[ScheduledTask]: """Get a list of tasks. Returns all the tasks if no args are provided. If an arg is `None`, all tasks matching the other args will be selected. diff --git a/synapse/util/wheel_timer.py b/synapse/util/wheel_timer.py index 95eb1d7185..c63faa96df 100644 --- a/synapse/util/wheel_timer.py +++ b/synapse/util/wheel_timer.py @@ -19,7 +19,7 @@ # # import logging -from typing import Generic, Hashable, List, Set, TypeVar +from typing import Generic, Hashable, TypeVar import attr @@ -31,7 +31,7 @@ @attr.s(slots=True, frozen=True, auto_attribs=True) class _Entry(Generic[T]): end_key: int - elements: Set[T] = attr.Factory(set) + elements: set[T] = attr.Factory(set) class WheelTimer(Generic[T]): @@ -46,7 +46,7 @@ def __init__(self, bucket_size: int = 5000) -> None: accuracy of the timer. """ self.bucket_size: int = bucket_size - self.entries: List[_Entry[T]] = [] + self.entries: list[_Entry[T]] = [] def insert(self, now: int, obj: T, then: int) -> None: """Inserts object into timer. @@ -91,7 +91,7 @@ def insert(self, now: int, obj: T, then: int) -> None: self.entries[-1].elements.add(obj) - def fetch(self, now: int) -> List[T]: + def fetch(self, now: int) -> list[T]: """Fetch any objects that have timed out Args: @@ -102,7 +102,7 @@ def fetch(self, now: int) -> List[T]: """ now_key = int(now / self.bucket_size) - ret: List[T] = [] + ret: list[T] = [] while self.entries and self.entries[0].end_key <= now_key: ret.extend(self.entries.pop(0).elements) diff --git a/synapse/visibility.py b/synapse/visibility.py index 662f2636d0..41b6198af0 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -23,14 +23,9 @@ from enum import Enum, auto from typing import ( Collection, - Dict, Final, - FrozenSet, - List, Optional, Sequence, - Set, - Tuple, ) import attr @@ -76,18 +71,18 @@ Membership.BAN, ) -_HISTORY_VIS_KEY: Final[Tuple[str, str]] = (EventTypes.RoomHistoryVisibility, "") +_HISTORY_VIS_KEY: Final[tuple[str, str]] = (EventTypes.RoomHistoryVisibility, "") @trace async def filter_events_for_client( storage: StorageControllers, user_id: str, - events: List[EventBase], + events: list[EventBase], is_peeking: bool = False, - always_include_ids: FrozenSet[str] = frozenset(), + always_include_ids: frozenset[str] = frozenset(), filter_send_to_client: bool = True, -) -> List[EventBase]: +) -> list[EventBase]: """ Check which events a user is allowed to see. If the user can see the event but its sender asked for their data to be erased, prune the content of the event. @@ -160,7 +155,7 @@ async def filter_events_for_client( if filter_send_to_client: room_ids = {e.room_id for e in events} - retention_policies: Dict[str, RetentionPolicy] = {} + retention_policies: dict[str, RetentionPolicy] = {} for room_id in room_ids: retention_policies[ @@ -351,7 +346,7 @@ def _check_client_allowed_to_see_event( clock: Clock, filter_send_to_client: bool, is_peeking: bool, - always_include_ids: FrozenSet[str], + always_include_ids: frozenset[str], sender_ignored: bool, retention_policy: RetentionPolicy, state: Optional[StateMap[EventBase]], @@ -652,7 +647,7 @@ async def filter_events_for_server( redact: bool, filter_out_erased_senders: bool, filter_out_remote_partial_state_events: bool, -) -> List[EventBase]: +) -> list[EventBase]: """Filter a list of events based on whether the target server is allowed to see them. @@ -687,7 +682,7 @@ async def filter_events_for_server( # otherwise a room could be fully joined after we retrieve those, which would then bypass # this check but would base the filtering on an outdated view of the membership events. - partial_state_invisible_event_ids: Set[str] = set() + partial_state_invisible_event_ids: set[str] = set() if filter_out_remote_partial_state_events: for e in events: sender_domain = get_domain_from_id(e.sender) @@ -733,7 +728,7 @@ async def filter_events_for_server( async def _event_to_history_vis( storage: StorageControllers, events: Collection[EventBase] -) -> Dict[str, str]: +) -> dict[str, str]: """Get the history visibility at each of the given events Returns a map from event id to history_visibility setting @@ -758,7 +753,7 @@ async def _event_to_history_vis( } vis_events = await storage.main.get_events(visibility_ids) - result: Dict[str, str] = {} + result: dict[str, str] = {} for event in events: vis = HistoryVisibility.SHARED state_ids = event_to_state_ids.get(event.event_id) @@ -780,7 +775,7 @@ async def _event_to_history_vis( async def _event_to_memberships( storage: StorageControllers, events: Collection[EventBase], server_name: str -) -> Dict[str, StateMap[Tuple[str, str]]]: +) -> dict[str, StateMap[tuple[str, str]]]: """Get the remote membership list at each of the given events Returns a map from event id to state map, which will contain only membership events diff --git a/synmark/__main__.py b/synmark/__main__.py index 82717c4fc7..5308c96012 100644 --- a/synmark/__main__.py +++ b/synmark/__main__.py @@ -22,7 +22,7 @@ from argparse import REMAINDER, Namespace from contextlib import redirect_stderr from io import StringIO -from typing import Any, Callable, Coroutine, List, TypeVar +from typing import Any, Callable, Coroutine, TypeVar import pyperf @@ -76,7 +76,7 @@ def on_done(res: T) -> T: if __name__ == "__main__": - def add_cmdline_args(cmd: List[str], args: Namespace) -> None: + def add_cmdline_args(cmd: list[str], args: Namespace) -> None: if args.log: cmd.extend(["--log"]) cmd.extend(args.tests) diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py index d74878a4e1..7742a06b4c 100644 --- a/tests/api/test_filtering.py +++ b/tests/api/test_filtering.py @@ -20,7 +20,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List from unittest.mock import patch import jsonschema @@ -50,7 +49,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: def test_errors_on_invalid_filters(self) -> None: # See USER_FILTER_SCHEMA for the filter schema. - invalid_filters: List[JsonDict] = [ + invalid_filters: list[JsonDict] = [ # `account_data` must be a dictionary {"account_data": "Hello World"}, # `event_format` must be "client" or "federation" @@ -67,7 +66,7 @@ def test_errors_on_invalid_filters(self) -> None: def test_ignores_unknown_filter_fields(self) -> None: # For forward compatibility, we must ignore unknown filter fields. # See USER_FILTER_SCHEMA for the filter schema. - filters: List[JsonDict] = [ + filters: list[JsonDict] = [ {"org.matrix.msc9999.future_option": True}, {"presence": {"org.matrix.msc9999.future_option": True}}, {"room": {"org.matrix.msc9999.future_option": True}}, @@ -78,7 +77,7 @@ def test_ignores_unknown_filter_fields(self) -> None: # Must not raise. def test_valid_filters(self) -> None: - valid_filters: List[JsonDict] = [ + valid_filters: list[JsonDict] = [ { "room": { "timeline": {"limit": 20}, @@ -557,7 +556,7 @@ def test_filter_relations(self) -> None: room_id="!foo:bar", ), ] - jsondicts: List[JsonDict] = [{}] + jsondicts: list[JsonDict] = [{}] # For the following tests we patch the datastore method (intead of injecting # events). This is a bit cheeky, but tests the logic of _check_event_relations. @@ -565,7 +564,7 @@ def test_filter_relations(self) -> None: # Filter for a particular sender. definition = {"related_by_senders": ["@foo:bar"]} - async def events_have_relations(*args: object, **kwargs: object) -> List[str]: + async def events_have_relations(*args: object, **kwargs: object) -> list[str]: return ["$with_relation"] with patch.object( diff --git a/tests/app/test_openid_listener.py b/tests/app/test_openid_listener.py index 6ca514d557..6a1a630fe8 100644 --- a/tests/app/test_openid_listener.py +++ b/tests/app/test_openid_listener.py @@ -17,7 +17,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List from unittest.mock import Mock, patch from parameterized import parameterized @@ -58,7 +57,7 @@ def default_config(self) -> JsonDict: (["openid"], "auth_fail"), ] ) - def test_openid_listener(self, names: List[str], expectation: str) -> None: + def test_openid_listener(self, names: list[str], expectation: str) -> None: """ Test different openid listener configurations. @@ -106,7 +105,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: (["openid"], "auth_fail"), ] ) - def test_openid_listener(self, names: List[str], expectation: str) -> None: + def test_openid_listener(self, names: list[str], expectation: str) -> None: """ Test different openid listener configurations. diff --git a/tests/appservice/test_api.py b/tests/appservice/test_api.py index 085dfd2d1d..1943292a8f 100644 --- a/tests/appservice/test_api.py +++ b/tests/appservice/test_api.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, List, Mapping, Optional, Sequence, Union +from typing import Any, Mapping, Optional, Sequence, Union from unittest.mock import Mock from twisted.internet.testing import MemoryReactor @@ -81,7 +81,7 @@ async def get_json( url: str, args: Mapping[Any, Any], headers: Mapping[Union[str, bytes], Sequence[Union[str, bytes]]], - ) -> List[JsonDict]: + ) -> list[JsonDict]: # Ensure the access token is passed as a header. if not headers or not headers.get(b"Authorization"): raise RuntimeError("Access token not provided") @@ -157,7 +157,7 @@ async def get_json( headers: Optional[ Mapping[Union[str, bytes], Sequence[Union[str, bytes]]] ] = None, - ) -> List[JsonDict]: + ) -> list[JsonDict]: # Ensure the access token is passed as a both a query param and in the headers. if not args.get(b"access_token"): raise RuntimeError("Access token should be provided in query params.") diff --git a/tests/appservice/test_scheduler.py b/tests/appservice/test_scheduler.py index f4490a1a79..f17957c206 100644 --- a/tests/appservice/test_scheduler.py +++ b/tests/appservice/test_scheduler.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional, Sequence, Tuple +from typing import Optional, Sequence from unittest.mock import AsyncMock, Mock from typing_extensions import TypeAlias @@ -288,11 +288,11 @@ def take_txn( # Corresponds to synapse.appservice.scheduler._TransactionController.send TxnCtrlArgs: TypeAlias = """ defer.Deferred[ - Tuple[ + tuple[ ApplicationService, Sequence[EventBase], - Optional[List[JsonDict]], - Optional[List[JsonDict]], + Optional[list[JsonDict]], + Optional[list[JsonDict]], Optional[TransactionOneTimeKeysCount], Optional[TransactionUnusedFallbackKeys], Optional[DeviceListUpdates], diff --git a/tests/config/utils.py b/tests/config/utils.py index 3cba4ac588..efc63558db 100644 --- a/tests/config/utils.py +++ b/tests/config/utils.py @@ -24,7 +24,6 @@ import unittest from contextlib import redirect_stdout from io import StringIO -from typing import List from synapse.config.homeserver import HomeServerConfig @@ -61,7 +60,7 @@ def generate_config_and_remove_lines_containing(self, needles: list[str]) -> Non with open(self.config_file, "w") as f: f.write("".join(contents)) - def add_lines_to_config(self, lines: List[str]) -> None: + def add_lines_to_config(self, lines: list[str]) -> None: with open(self.config_file, "a") as f: for line in lines: f.write(line + "\n") diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 93ae24628a..2eaf77e9dc 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -19,7 +19,7 @@ # # import time -from typing import Any, Dict, List, Optional, cast +from typing import Any, Optional, cast from unittest.mock import Mock import attr @@ -60,7 +60,7 @@ def __init__(self) -> None: self.server_name = "mock_server" self.key = signedjson.key.generate_signing_key("0") - def get_verify_keys(self) -> Dict[str, str]: + def get_verify_keys(self) -> dict[str, str]: vk = signedjson.key.get_verify_key(self.key) return {"%s:%s" % (vk.alg, vk.version): encode_verify_key_base64(vk)} @@ -107,8 +107,8 @@ def test_verify_json_objects_for_server_awaits_previous_requests(self) -> None: first_lookup_deferred: "Deferred[None]" = Deferred() async def first_lookup_fetch( - server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: # self.assertEqual(current_context().request.id, "context_11") self.assertEqual(server_name, "server10") self.assertEqual(key_ids, [get_key_id(key1)]) @@ -152,8 +152,8 @@ async def first_lookup() -> None: # should block rather than start a second call async def second_lookup_fetch( - server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: # self.assertEqual(current_context().request.id, "context_12") return {get_key_id(key1): FetchKeyResult(get_verify_key(key1), 100)} @@ -276,8 +276,8 @@ def test_verify_for_local_server_unknown_key(self) -> None: # set up a mock fetcher which will return the key async def get_keys( - server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: self.assertEqual(server_name, self.hs.hostname) self.assertEqual(key_ids, [get_key_id(key2)]) @@ -302,8 +302,8 @@ def test_verify_json_dedupes_key_requests(self) -> None: key1 = signedjson.key.generate_signing_key("1") async def get_keys( - server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: # there should only be one request object (with the max validity) self.assertEqual(server_name, "server1") self.assertEqual(key_ids, [get_key_id(key1)]) @@ -344,16 +344,16 @@ def test_verify_json_falls_back_to_other_fetchers(self) -> None: key1 = signedjson.key.generate_signing_key("1") async def get_keys1( - server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: self.assertEqual(server_name, "server1") self.assertEqual(key_ids, [get_key_id(key1)]) self.assertEqual(minimum_valid_until_ts, 1500) return {get_key_id(key1): FetchKeyResult(get_verify_key(key1), 800)} async def get_keys2( - server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: self.assertEqual(server_name, "server1") self.assertEqual(key_ids, [get_key_id(key1)]) self.assertEqual(minimum_valid_until_ts, 1500) @@ -701,7 +701,7 @@ def build_response() -> dict: SERVER_NAME, testkey, VALID_UNTIL_TS ) - def get_key_from_perspectives(response: JsonDict) -> Dict[str, FetchKeyResult]: + def get_key_from_perspectives(response: JsonDict) -> dict[str, FetchKeyResult]: fetcher = PerspectivesKeyFetcher(self.hs) self.expect_outgoing_key_query(SERVER_NAME, "key1", response) return self.get_success(fetcher.get_keys(SERVER_NAME, ["key1"], 0)) diff --git a/tests/events/test_auto_accept_invites.py b/tests/events/test_auto_accept_invites.py index fa7ea64105..d3842e72d7 100644 --- a/tests/events/test_auto_accept_invites.py +++ b/tests/events/test_auto_accept_invites.py @@ -21,7 +21,7 @@ import asyncio from asyncio import Future from http import HTTPStatus -from typing import Any, Awaitable, Dict, List, Optional, Tuple, TypeVar, cast +from typing import Any, Awaitable, Optional, TypeVar, cast from unittest.mock import Mock import attr @@ -527,7 +527,7 @@ def sync_join( testcase: HomeserverTestCase, user_id: str, since_token: Optional[StreamToken] = None, -) -> Tuple[List[JoinedSyncResult], StreamToken]: +) -> tuple[list[JoinedSyncResult], StreamToken]: """Perform a sync request for the given user and return the user join updates they've received, as well as the next_batch token. @@ -765,7 +765,7 @@ class MockEvent: sender: str type: str - content: Dict[str, Any] + content: dict[str, Any] room_id: str = "!someroom" state_key: Optional[str] = None @@ -802,7 +802,7 @@ def make_multiple_awaitable(result: TV) -> Awaitable[TV]: def create_module( - config_override: Optional[Dict[str, Any]] = None, worker_name: Optional[str] = None + config_override: Optional[dict[str, Any]] = None, worker_name: Optional[str] = None ) -> InviteAutoAccepter: # Create a mock based on the ModuleApi spec, but override some mocked functions # because some capabilities are needed for running the tests. diff --git a/tests/events/test_presence_router.py b/tests/events/test_presence_router.py index 696d9dd6e2..aa8d7454c0 100644 --- a/tests/events/test_presence_router.py +++ b/tests/events/test_presence_router.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict, Iterable, List, Optional, Set, Tuple, Union +from typing import Iterable, Optional, Union from unittest.mock import AsyncMock, Mock import attr @@ -46,7 +46,7 @@ @attr.s class PresenceRouterTestConfig: - users_who_should_receive_all_presence = attr.ib(type=List[str], default=[]) + users_who_should_receive_all_presence = attr.ib(type=list[str], default=[]) class LegacyPresenceRouterTestModule: @@ -56,14 +56,14 @@ def __init__(self, config: PresenceRouterTestConfig, module_api: ModuleApi): async def get_users_for_states( self, state_updates: Iterable[UserPresenceState] - ) -> Dict[str, Set[UserPresenceState]]: + ) -> dict[str, set[UserPresenceState]]: users_to_state = { user_id: set(state_updates) for user_id in self._config.users_who_should_receive_all_presence } return users_to_state - async def get_interested_users(self, user_id: str) -> Union[Set[str], str]: + async def get_interested_users(self, user_id: str) -> Union[set[str], str]: if user_id in self._config.users_who_should_receive_all_presence: return PresenceRouter.ALL_USERS @@ -106,14 +106,14 @@ def __init__(self, config: PresenceRouterTestConfig, api: ModuleApi): async def get_users_for_states( self, state_updates: Iterable[UserPresenceState] - ) -> Dict[str, Set[UserPresenceState]]: + ) -> dict[str, set[UserPresenceState]]: users_to_state = { user_id: set(state_updates) for user_id in self._config.users_who_should_receive_all_presence } return users_to_state - async def get_interested_users(self, user_id: str) -> Union[Set[str], str]: + async def get_interested_users(self, user_id: str) -> Union[set[str], str]: if user_id in self._config.users_who_should_receive_all_presence: return PresenceRouter.ALL_USERS @@ -511,7 +511,7 @@ def sync_presence( testcase: HomeserverTestCase, user_id: str, since_token: Optional[StreamToken] = None, -) -> Tuple[List[UserPresenceState], StreamToken]: +) -> tuple[list[UserPresenceState], StreamToken]: """Perform a sync request for the given user and return the user presence updates they've received, as well as the next_batch token. diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py index c6ebefbf38..9d41067844 100644 --- a/tests/events/test_utils.py +++ b/tests/events/test_utils.py @@ -20,7 +20,7 @@ # import unittest as stdlib_unittest -from typing import Any, List, Mapping, Optional +from typing import Any, Mapping, Optional import attr from parameterized import parameterized @@ -648,7 +648,7 @@ class SerializeEventTestCase(stdlib_unittest.TestCase): def serialize( self, ev: EventBase, - fields: Optional[List[str]], + fields: Optional[list[str]], include_admin_metadata: bool = False, ) -> JsonDict: return serialize_event( diff --git a/tests/federation/test_federation_catch_up.py b/tests/federation/test_federation_catch_up.py index 5edb651767..34b552b9ed 100644 --- a/tests/federation/test_federation_catch_up.py +++ b/tests/federation/test_federation_catch_up.py @@ -1,4 +1,4 @@ -from typing import Callable, Collection, List, Optional, Tuple +from typing import Callable, Collection, Optional from unittest import mock from unittest.mock import AsyncMock, Mock @@ -55,8 +55,8 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: ) # whenever send_transaction is called, record the pdu data - self.pdus: List[JsonDict] = [] - self.failed_pdus: List[JsonDict] = [] + self.pdus: list[JsonDict] = [] + self.failed_pdus: list[JsonDict] = [] self.is_online = True self.federation_transport_client.send_transaction.side_effect = ( self.record_transaction @@ -269,7 +269,7 @@ def test_catch_up_from_blank_state(self) -> None: def make_fake_destination_queue( self, destination: str = "host2" - ) -> Tuple[PerDestinationQueue, List[EventBase]]: + ) -> tuple[PerDestinationQueue, list[EventBase]]: """ Makes a fake per-destination queue. """ @@ -279,8 +279,8 @@ def make_fake_destination_queue( async def fake_send( destination_tm: str, - pending_pdus: List[EventBase], - _pending_edus: List[Edu], + pending_pdus: list[EventBase], + _pending_edus: list[Edu], ) -> None: assert destination == destination_tm results_list.extend(pending_pdus) diff --git a/tests/federation/test_federation_out_of_band_membership.py b/tests/federation/test_federation_out_of_band_membership.py index fa4e7c63ba..905f9e6580 100644 --- a/tests/federation/test_federation_out_of_band_membership.py +++ b/tests/federation/test_federation_out_of_band_membership.py @@ -23,7 +23,7 @@ import time import urllib.parse from http import HTTPStatus -from typing import Any, Callable, Optional, Set, Tuple, TypeVar, Union +from typing import Any, Callable, Optional, TypeVar, Union from unittest.mock import Mock import attr @@ -147,7 +147,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: def do_sync( self, sync_body: JsonDict, *, since: Optional[str] = None, tok: str - ) -> Tuple[JsonDict, str]: + ) -> tuple[JsonDict, str]: """Do a sliding sync request with given body. Asserts the request was successful. @@ -350,7 +350,7 @@ async def get_json( self.federation_http_client.get_json.side_effect = get_json # PDU's that hs1 sent to hs2 - collected_pdus_from_hs1_federation_send: Set[str] = set() + collected_pdus_from_hs1_federation_send: set[str] = set() async def put_json( destination: str, @@ -503,7 +503,7 @@ def test_can_x_from_out_of_band_invite_after_we_are_already_participating_in_the T = TypeVar("T") # PDU's that hs1 sent to hs2 - collected_pdus_from_hs1_federation_send: Set[str] = set() + collected_pdus_from_hs1_federation_send: set[str] = set() async def put_json( destination: str, diff --git a/tests/federation/test_federation_sender.py b/tests/federation/test_federation_sender.py index 27b69a9180..20b67e3a73 100644 --- a/tests/federation/test_federation_sender.py +++ b/tests/federation/test_federation_sender.py @@ -17,7 +17,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Callable, FrozenSet, List, Optional, Set +from typing import Callable, Optional from unittest.mock import AsyncMock, Mock from signedjson import key, sign @@ -435,7 +435,7 @@ def test_presence_batched(self) -> None: # A set of all user presence we see, this should end up matching the # number we sent out above. - seen_users: Set[str] = set() + seen_users: set[str] = set() for edu in presence_edus: presence_states = edu["content"]["push"] @@ -483,12 +483,12 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: # stub out `get_rooms_for_user` and `get_current_hosts_in_room` so that the # server thinks the user shares a room with `@user2:host2` - def get_rooms_for_user(user_id: str) -> "defer.Deferred[FrozenSet[str]]": + def get_rooms_for_user(user_id: str) -> "defer.Deferred[frozenset[str]]": return defer.succeed(frozenset({test_room_id})) hs.get_datastores().main.get_rooms_for_user = get_rooms_for_user # type: ignore[assignment] - async def get_current_hosts_in_room(room_id: str) -> Set[str]: + async def get_current_hosts_in_room(room_id: str) -> set[str]: if room_id == test_room_id: return {"host2"} else: @@ -504,7 +504,7 @@ async def get_current_hosts_in_room(room_id: str) -> Set[str]: self.device_handler = device_handler # whenever send_transaction is called, record the edu data - self.edus: List[JsonDict] = [] + self.edus: list[JsonDict] = [] self.federation_transport_client.send_transaction.side_effect = ( self.record_transaction ) diff --git a/tests/federation/transport/server/test__base.py b/tests/federation/transport/server/test__base.py index 0e3b41ec4d..3c553e6e40 100644 --- a/tests/federation/transport/server/test__base.py +++ b/tests/federation/transport/server/test__base.py @@ -20,7 +20,6 @@ # from http import HTTPStatus -from typing import Dict, List, Tuple from twisted.web.resource import Resource @@ -52,14 +51,14 @@ def __init__( @cancellable async def on_GET( - self, origin: str, content: None, query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: str, content: None, query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} async def on_POST( - self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: str, content: JsonDict, query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} diff --git a/tests/federation/transport/test_client.py b/tests/federation/transport/test_client.py index 3d882f99f2..f538b67e41 100644 --- a/tests/federation/transport/test_client.py +++ b/tests/federation/transport/test_client.py @@ -20,7 +20,7 @@ # import json -from typing import List, Optional +from typing import Optional from unittest.mock import Mock import ijson.common @@ -98,7 +98,7 @@ def parse(response: JsonDict) -> bool: def test_servers_in_room(self) -> None: """Check that the servers_in_room field is correctly parsed""" - def parse(response: JsonDict) -> Optional[List[str]]: + def parse(response: JsonDict) -> Optional[list[str]]: parser = SendJoinParser(RoomVersions.V1, False) serialised_response = json.dumps(response).encode() diff --git a/tests/federation/transport/test_knocking.py b/tests/federation/transport/test_knocking.py index a243938255..9e92b06d91 100644 --- a/tests/federation/transport/test_knocking.py +++ b/tests/federation/transport/test_knocking.py @@ -19,7 +19,7 @@ # # from collections import OrderedDict -from typing import Any, Dict, List, Optional +from typing import Any, Optional from twisted.internet.testing import MemoryReactor @@ -161,8 +161,8 @@ def send_example_state_events_to_room( def check_knock_room_state_against_room_state( self, - knock_room_state: List[Dict], - expected_room_state: Dict, + knock_room_state: list[dict], + expected_room_state: dict, ) -> None: """Test a list of stripped room state events received over federation against a dict of expected state events. diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index df36185b99..7d6bd35a9a 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -24,9 +24,7 @@ Any, Awaitable, Callable, - Dict, Iterable, - List, Optional, TypeVar, ) @@ -450,7 +448,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: hs.get_application_service_handler().scheduler.txn_ctrl.send = self.send_mock # type: ignore[method-assign] # Mock out application services, and allow defining our own in tests - self._services: List[ApplicationService] = [] + self._services: list[ApplicationService] = [] self.hs.get_datastores().main.get_app_services = Mock( # type: ignore[method-assign] return_value=self._services ) @@ -884,7 +882,7 @@ def test_application_services_receive_bursts_of_to_device(self) -> None: # Count the total number of to-device messages that were sent out per-service. # Ensure that we only sent to-device messages to interested services, and that # each interested service received the full count of to-device messages. - service_id_to_message_count: Dict[str, int] = {} + service_id_to_message_count: dict[str, int] = {} for call in self.send_mock.call_args_list: ( @@ -1023,7 +1021,7 @@ def test_application_services_receive_local_to_device_for_many_users(self) -> No def _register_application_service( self, - namespaces: Optional[Dict[str, Iterable[Dict]]] = None, + namespaces: Optional[dict[str, Iterable[dict]]] = None, ) -> ApplicationService: """ Register a new application service, with the given namespaces of interest. @@ -1073,7 +1071,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: hs.get_application_service_api().put_json = self.put_json # type: ignore[method-assign] # Mock out application services, and allow defining our own in tests - self._services: List[ApplicationService] = [] + self._services: list[ApplicationService] = [] self.hs.get_datastores().main.get_app_services = Mock( # type: ignore[method-assign] return_value=self._services ) diff --git a/tests/handlers/test_cas.py b/tests/handlers/test_cas.py index f677f3be2a..02671fc264 100644 --- a/tests/handlers/test_cas.py +++ b/tests/handlers/test_cas.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict +from typing import Any from unittest.mock import AsyncMock, Mock from twisted.internet.testing import MemoryReactor @@ -35,7 +35,7 @@ class CasHandlerTestCase(HomeserverTestCase): - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL cas_config = { diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py index 45b8f2353a..76b145b92b 100644 --- a/tests/handlers/test_directory.py +++ b/tests/handlers/test_directory.py @@ -19,7 +19,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Awaitable, Callable, Dict +from typing import Any, Awaitable, Callable from unittest.mock import AsyncMock, Mock from twisted.internet.testing import MemoryReactor @@ -43,7 +43,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.mock_federation = AsyncMock() self.mock_registry = Mock() - self.query_handlers: Dict[str, Callable[[dict], Awaitable[JsonDict]]] = {} + self.query_handlers: dict[str, Callable[[dict], Awaitable[JsonDict]]] = {} def register_query_handler( query_type: str, handler: Callable[[dict], Awaitable[JsonDict]] @@ -410,7 +410,7 @@ class TestCreateAliasACL(unittest.HomeserverTestCase): servlets = [directory.register_servlets, room.register_servlets] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() # Add custom alias creation rules to the config. @@ -476,7 +476,7 @@ class TestCreatePublishedRoomACL(unittest.HomeserverTestCase): data = {"room_alias_name": "unofficial_test"} allowed_localpart = "allowed" - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() # Add custom room list publication rules to the config. diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index fca1f2cc44..a4f9d55a13 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -20,7 +20,7 @@ # # import time -from typing import Dict, Iterable +from typing import Iterable from unittest import mock from parameterized import parameterized @@ -291,7 +291,7 @@ def test_claim_one_time_key_bulk(self) -> None: (chris, "chris_dev_2", "alg2"): 1, } # Convert to the format the handler wants. - query: Dict[str, Dict[str, Dict[str, int]]] = {} + query: dict[str, dict[str, dict[str, int]]] = {} for (user_id, device_id, algorithm), count in claims_to_make.items(): query.setdefault(user_id, {}).setdefault(device_id, {})[algorithm] = count claim_res = self.get_success( @@ -1510,7 +1510,7 @@ def test_query_appservice_with_fallback(self) -> None: ) # Setup a response. - response: Dict[str, Dict[str, Dict[str, JsonDict]]] = { + response: dict[str, dict[str, dict[str, JsonDict]]] = { local_user: {device_id_1: {**as_otk, **as_fallback_key}} } self.appservice_api.claim_client_keys.return_value = (response, []) diff --git a/tests/handlers/test_message.py b/tests/handlers/test_message.py index 4262e805e7..6450a90444 100644 --- a/tests/handlers/test_message.py +++ b/tests/handlers/test_message.py @@ -19,7 +19,6 @@ # # import logging -from typing import Tuple from twisted.internet.testing import MemoryReactor @@ -64,7 +63,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.requester = create_requester(self.user_id, device_id=device_id) - def _create_and_persist_member_event(self) -> Tuple[EventBase, EventContext]: + def _create_and_persist_member_event(self) -> tuple[EventBase, EventContext]: # Create a member event we can use as an auth_event memberEvent, memberEventContext = self.get_success( create_event( @@ -86,7 +85,7 @@ def _create_and_persist_member_event(self) -> Tuple[EventBase, EventContext]: def _create_duplicate_event( self, txn_id: str - ) -> Tuple[EventBase, UnpersistedEventContextBase]: + ) -> tuple[EventBase, UnpersistedEventContextBase]: """Create a new event with the given transaction ID. All events produced by this method will be considered duplicates. """ diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index 4640f35a1e..43004bfc69 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -25,7 +25,7 @@ from http import HTTPStatus from http.server import BaseHTTPRequestHandler, HTTPServer from io import BytesIO -from typing import Any, ClassVar, Coroutine, Dict, Generator, Optional, TypeVar, Union +from typing import Any, ClassVar, Coroutine, Generator, Optional, TypeVar, Union from unittest.mock import ANY, AsyncMock, Mock from urllib.parse import parse_qs @@ -130,7 +130,7 @@ def device_scope(self) -> str: keys.register_servlets, ] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL config["disable_registration"] = True @@ -834,7 +834,7 @@ def till_deferred_has_result( return deferred - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL config["disable_registration"] = True @@ -1100,9 +1100,9 @@ class DisabledEndpointsTestCase(HomeserverTestCase): admin.register_servlets, ] - config: Dict[str, Any] + config: dict[str, Any] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL config["disable_registration"] = True diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py index 5207382f00..3180969e7b 100644 --- a/tests/handlers/test_oidc.py +++ b/tests/handlers/test_oidc.py @@ -19,7 +19,7 @@ # # import os -from typing import Any, Awaitable, ContextManager, Dict, Optional, Tuple +from typing import Any, Awaitable, ContextManager, Optional from unittest.mock import ANY, AsyncMock, Mock, patch from urllib.parse import parse_qs, urlparse @@ -152,7 +152,7 @@ class OidcHandlerTestCase(HomeserverTestCase): if not HAS_OIDC: skip = "requires OIDC" - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL return config @@ -204,7 +204,7 @@ def start_authorization( client_redirect_url: str = "http://client/redirect", scope: str = "openid", with_sid: bool = False, - ) -> Tuple[SynapseRequest, FakeAuthorizationGrant]: + ) -> tuple[SynapseRequest, FakeAuthorizationGrant]: """Start an authorization request, and get the callback request back.""" nonce = random_string(10) state = random_string(10) @@ -222,7 +222,7 @@ def start_authorization( def assertRenderedError( self, error: str, error_description: Optional[str] = None - ) -> Tuple[Any, ...]: + ) -> tuple[Any, ...]: self.render_error.assert_called_once() args = self.render_error.call_args[0] self.assertEqual(args[1], error) diff --git a/tests/handlers/test_password_providers.py b/tests/handlers/test_password_providers.py index aa41875063..faa269bd35 100644 --- a/tests/handlers/test_password_providers.py +++ b/tests/handlers/test_password_providers.py @@ -22,7 +22,7 @@ """Tests for the password_auth_provider interface""" from http import HTTPStatus -from typing import Any, Dict, List, Optional, Type, Union +from typing import Any, Optional, Union from unittest.mock import AsyncMock, Mock from twisted.internet.testing import MemoryReactor @@ -75,7 +75,7 @@ def parse_config(config: JsonDict) -> None: def __init__(self, config: None, account_handler: AccountHandler): pass - def get_supported_login_types(self) -> Dict[str, List[str]]: + def get_supported_login_types(self) -> dict[str, list[str]]: return {"test.login_type": ["test_field"]} def check_auth(self, *args: str) -> Mock: @@ -109,7 +109,7 @@ def parse_config(config: JsonDict) -> None: def __init__(self, config: None, account_handler: AccountHandler): pass - def get_supported_login_types(self) -> Dict[str, List[str]]: + def get_supported_login_types(self) -> dict[str, list[str]]: return {"m.login.password": ["password"], "test.login_type": ["test_field"]} def check_auth(self, *args: str) -> Mock: @@ -139,7 +139,7 @@ def check_pass(self, *args: str) -> Mock: return mock_password_provider.check_password(*args) -def legacy_providers_config(*providers: Type[Any]) -> dict: +def legacy_providers_config(*providers: type[Any]) -> dict: """Returns a config dict that will enable the given legacy password auth providers""" return { "password_providers": [ @@ -149,7 +149,7 @@ def legacy_providers_config(*providers: Type[Any]) -> dict: } -def providers_config(*providers: Type[Any]) -> dict: +def providers_config(*providers: type[Any]) -> dict: """Returns a config dict that will enable the given modules""" return { "modules": [ diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index 73426c7b04..7a7f803ebd 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Awaitable, Callable, Dict +from typing import Any, Awaitable, Callable from unittest.mock import AsyncMock, Mock from parameterized import parameterized @@ -44,7 +44,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.mock_federation = AsyncMock() self.mock_registry = Mock() - self.query_handlers: Dict[str, Callable[[dict], Awaitable[JsonDict]]] = {} + self.query_handlers: dict[str, Callable[[dict], Awaitable[JsonDict]]] = {} def register_query_handler( query_type: str, handler: Callable[[dict], Awaitable[JsonDict]] @@ -377,7 +377,7 @@ def test_check_avatar_on_remote_server(self, remote_server_name: str) -> None: self.get_success(self.handler.check_avatar_size_and_mime_type(remote_mxc)) ) - def _setup_local_files(self, names_and_props: Dict[str, Dict[str, Any]]) -> None: + def _setup_local_files(self, names_and_props: dict[str, dict[str, Any]]) -> None: """Stores metadata about files in the database. Args: diff --git a/tests/handlers/test_receipts.py b/tests/handlers/test_receipts.py index 4febccbfcf..bb9e84d644 100644 --- a/tests/handlers/test_receipts.py +++ b/tests/handlers/test_receipts.py @@ -20,7 +20,6 @@ # from copy import deepcopy -from typing import List from twisted.internet.testing import MemoryReactor @@ -334,7 +333,7 @@ def test_we_do_not_mutate(self) -> None: self.assertEqual(events, original_events) def _test_filters_private( - self, events: List[JsonDict], expected_output: List[JsonDict] + self, events: list[JsonDict], expected_output: list[JsonDict] ) -> None: """Tests that the _filter_out_private returns the expected output""" filtered_events = self.event_source.filter_out_private_receipts( diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 5e2eb8dee7..20c2554e25 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -19,7 +19,7 @@ # # -from typing import Any, Collection, List, Optional, Tuple +from typing import Any, Collection, Optional from unittest.mock import AsyncMock, Mock from twisted.internet.testing import MemoryReactor @@ -65,7 +65,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], auth_provider_id: Optional[str], ) -> RegistrationBehaviour: return RegistrationBehaviour.ALLOW @@ -76,7 +76,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], auth_provider_id: Optional[str], ) -> RegistrationBehaviour: return RegistrationBehaviour.DENY @@ -87,7 +87,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], auth_provider_id: Optional[str], ) -> RegistrationBehaviour: return RegistrationBehaviour.SHADOW_BAN @@ -98,7 +98,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], auth_provider_id: Optional[str] = None, ) -> RegistrationBehaviour: # Reject any user coming from CAS and whose username contains profanity @@ -115,7 +115,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], ) -> RegistrationBehaviour: return RegistrationBehaviour.ALLOW @@ -125,7 +125,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], ) -> RegistrationBehaviour: return RegistrationBehaviour.ALLOW @@ -135,7 +135,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], ) -> RegistrationBehaviour: return RegistrationBehaviour.DENY @@ -779,7 +779,7 @@ async def get_or_create_user( localpart: str, displayname: Optional[str], password_hash: Optional[str] = None, - ) -> Tuple[str, str]: + ) -> tuple[str, str]: """Creates a new user if the user does not exist, else revokes all previous access tokens and generates a new one. @@ -842,7 +842,7 @@ async def update_membership(*args: Any, **kwargs: Any) -> None: async def lookup_room_alias( *args: Any, **kwargs: Any - ) -> Tuple[RoomID, List[str]]: + ) -> tuple[RoomID, list[str]]: return RoomID.from_string(self.room_id), ["remotetest"] self.room_member_handler = Mock(spec=["update_membership", "lookup_room_alias"]) diff --git a/tests/handlers/test_room_list.py b/tests/handlers/test_room_list.py index 45cef09b22..f6e9309f1f 100644 --- a/tests/handlers/test_room_list.py +++ b/tests/handlers/test_room_list.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Optional, Set +from typing import Optional from synapse.rest import admin from synapse.rest.client import directory, login, room @@ -69,7 +69,7 @@ def test_acls_applied_to_room_directory_results(self) -> None: limit=50, from_federation_origin="test2" ) ) - room_ids_in_test2_list: Set[str] = { + room_ids_in_test2_list: set[str] = { entry["room_id"] for entry in room_list["chunk"] } @@ -78,7 +78,7 @@ def test_acls_applied_to_room_directory_results(self) -> None: limit=50, from_federation_origin="test3" ) ) - room_ids_in_test3_list: Set[str] = { + room_ids_in_test3_list: set[str] = { entry["room_id"] for entry in room_list["chunk"] } diff --git a/tests/handlers/test_room_summary.py b/tests/handlers/test_room_summary.py index 00592b9871..3c8c483921 100644 --- a/tests/handlers/test_room_summary.py +++ b/tests/handlers/test_room_summary.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict, Iterable, List, Optional, Set, Tuple +from typing import Any, Iterable, Optional from unittest import mock from twisted.internet.defer import ensureDeferred @@ -60,7 +60,7 @@ def _create_event( return result -def _order(*events: mock.Mock) -> List[mock.Mock]: +def _order(*events: mock.Mock) -> list[mock.Mock]: return sorted(events, key=_child_events_comparison_key) @@ -152,7 +152,7 @@ def _add_child( room_id: str, token: str, order: Optional[str] = None, - via: Optional[List[str]] = None, + via: Optional[list[str]] = None, ) -> None: """Add a child room to a space.""" if via is None: @@ -170,7 +170,7 @@ def _add_child( ) def _assert_hierarchy( - self, result: JsonDict, rooms_and_children: Iterable[Tuple[str, Iterable[str]]] + self, result: JsonDict, rooms_and_children: Iterable[tuple[str, Iterable[str]]] ) -> None: """ Assert that the expected room IDs are in the response. @@ -547,7 +547,7 @@ def test_pagination(self) -> None: ) # The result should have the space and all of the links, plus some of the # rooms and a pagination token. - expected: List[Tuple[str, Iterable[str]]] = [(self.space, room_ids)] + expected: list[tuple[str, Iterable[str]]] = [(self.space, room_ids)] expected += [(room_id, ()) for room_id in room_ids[:6]] self._assert_hierarchy(result, expected) self.assertIn("next_batch", result) @@ -646,7 +646,7 @@ def test_max_depth(self) -> None: create_requester(self.user), self.space, max_depth=0 ) ) - expected: List[Tuple[str, Iterable[str]]] = [(spaces[0], [rooms[0], spaces[1]])] + expected: list[tuple[str, Iterable[str]]] = [(spaces[0], [rooms[0], spaces[1]])] self._assert_hierarchy(result, expected) # A single additional layer. @@ -740,7 +740,7 @@ def test_fed_complex(self) -> None: async def summarize_remote_room_hierarchy( _self: Any, room: Any, suggested_only: bool - ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: + ) -> tuple[Optional[_RoomEntry], dict[str, JsonDict], set[str]]: return requested_room_entry, {subroom: child_room}, set() # Add a room to the space which is on another server. @@ -793,7 +793,7 @@ def test_fed_root(self) -> None: async def summarize_remote_room_hierarchy( _self: Any, room: Any, suggested_only: bool - ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: + ) -> tuple[Optional[_RoomEntry], dict[str, JsonDict], set[str]]: return requested_room_entry, {fed_subroom: child_room}, set() expected = [ @@ -921,7 +921,7 @@ def test_fed_filtering(self) -> None: async def summarize_remote_room_hierarchy( _self: Any, room: Any, suggested_only: bool - ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: + ) -> tuple[Optional[_RoomEntry], dict[str, JsonDict], set[str]]: return subspace_room_entry, dict(children_rooms), set() # Add a room to the space which is on another server. @@ -985,7 +985,7 @@ def test_fed_invited(self) -> None: async def summarize_remote_room_hierarchy( _self: Any, room: Any, suggested_only: bool - ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: + ) -> tuple[Optional[_RoomEntry], dict[str, JsonDict], set[str]]: return fed_room_entry, {}, set() # Add a room to the space which is on another server. @@ -1120,7 +1120,7 @@ def test_fed_remote_room_hosts(self) -> None: async def summarize_remote_room_hierarchy( _self: Any, room: Any, suggested_only: bool - ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: + ) -> tuple[Optional[_RoomEntry], dict[str, JsonDict], set[str]]: return requested_room_entry, {fed_subroom: child_room}, set() expected = [ @@ -1233,7 +1233,7 @@ def test_fed(self) -> None: async def summarize_remote_room_hierarchy( _self: Any, room: Any, suggested_only: bool - ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: + ) -> tuple[Optional[_RoomEntry], dict[str, JsonDict], set[str]]: return requested_room_entry, {}, set() with mock.patch( diff --git a/tests/handlers/test_saml.py b/tests/handlers/test_saml.py index f7cbf91113..28159abbcb 100644 --- a/tests/handlers/test_saml.py +++ b/tests/handlers/test_saml.py @@ -19,7 +19,7 @@ # # -from typing import Any, Dict, Optional, Set, Tuple +from typing import Any, Optional from unittest.mock import AsyncMock, Mock import attr @@ -73,7 +73,7 @@ def parse_config(config: JsonDict) -> None: return None @staticmethod - def get_saml_attributes(config: None) -> Tuple[Set[str], Set[str]]: + def get_saml_attributes(config: None) -> tuple[set[str], set[str]]: return {"uid"}, {"displayName"} def get_remote_user_id( @@ -102,10 +102,10 @@ def saml_response_to_user_attributes( class SamlHandlerTestCase(HomeserverTestCase): - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL - saml_config: Dict[str, Any] = { + saml_config: dict[str, Any] = { "sp_config": {"metadata": {}}, # Disable grandfathering. "grandfathered_mxid_source_attribute": None, diff --git a/tests/handlers/test_send_email.py b/tests/handlers/test_send_email.py index 5f7839c82c..d033ed3a1c 100644 --- a/tests/handlers/test_send_email.py +++ b/tests/handlers/test_send_email.py @@ -20,7 +20,7 @@ # -from typing import Callable, List, Tuple, Type, Union +from typing import Callable, Union from unittest.mock import patch from zope.interface import implementer @@ -58,18 +58,18 @@ def TestingESMTPTLSClientFactory( class _DummyMessageDelivery: def __init__(self) -> None: # (recipient, message) tuples - self.messages: List[Tuple[smtp.Address, bytes]] = [] + self.messages: list[tuple[smtp.Address, bytes]] = [] def receivedHeader( self, - helo: Tuple[bytes, bytes], + helo: tuple[bytes, bytes], origin: smtp.Address, - recipients: List[smtp.User], + recipients: list[smtp.User], ) -> None: return None def validateFrom( - self, helo: Tuple[bytes, bytes], origin: smtp.Address + self, helo: tuple[bytes, bytes], origin: smtp.Address ) -> smtp.Address: return origin @@ -89,7 +89,7 @@ class _DummyMessage: def __init__(self, delivery: _DummyMessageDelivery, user: smtp.User): self._delivery = delivery self._user = user - self._buffer: List[bytes] = [] + self._buffer: list[bytes] = [] def lineReceived(self, line: bytes) -> None: self._buffer.append(line) @@ -104,7 +104,7 @@ def connectionLost(self) -> None: class SendEmailHandlerTestCaseIPv4(HomeserverTestCase): - ip_class: Union[Type[IPv4Address], Type[IPv6Address]] = IPv4Address + ip_class: Union[type[IPv4Address], type[IPv6Address]] = IPv4Address def setUp(self) -> None: super().setUp() diff --git a/tests/handlers/test_sliding_sync.py b/tests/handlers/test_sliding_sync.py index 1ffd15cadb..a35910e4dd 100644 --- a/tests/handlers/test_sliding_sync.py +++ b/tests/handlers/test_sliding_sync.py @@ -18,7 +18,7 @@ # # import logging -from typing import AbstractSet, Dict, Mapping, Optional, Set, Tuple +from typing import AbstractSet, Mapping, Optional from unittest.mock import patch import attr @@ -3278,7 +3278,7 @@ def _get_sync_room_ids_for_user( user: UserID, to_token: StreamToken, from_token: Optional[StreamToken], - ) -> Tuple[Dict[str, RoomsForUserType], AbstractSet[str], AbstractSet[str]]: + ) -> tuple[dict[str, RoomsForUserType], AbstractSet[str], AbstractSet[str]]: """ Get the rooms the user should be syncing with """ @@ -3615,7 +3615,7 @@ def _get_sync_room_ids_for_user( user: UserID, to_token: StreamToken, from_token: Optional[StreamToken], - ) -> Tuple[Dict[str, RoomsForUserType], AbstractSet[str], AbstractSet[str]]: + ) -> tuple[dict[str, RoomsForUserType], AbstractSet[str], AbstractSet[str]]: """ Get the rooms the user should be syncing with """ @@ -3824,13 +3824,13 @@ def test_default_bump_event_types(self) -> None: @attr.s(slots=True, auto_attribs=True, frozen=True) class RequiredStateChangesTestParameters: - previous_required_state_map: Dict[str, Set[str]] - request_required_state_map: Dict[str, Set[str]] + previous_required_state_map: dict[str, set[str]] + request_required_state_map: dict[str, set[str]] state_deltas: StateMap[str] - expected_with_state_deltas: Tuple[ + expected_with_state_deltas: tuple[ Optional[Mapping[str, AbstractSet[str]]], StateFilter ] - expected_without_state_deltas: Tuple[ + expected_without_state_deltas: tuple[ Optional[Mapping[str, AbstractSet[str]]], StateFilter ] @@ -4785,7 +4785,7 @@ def test_limit_retained_previous_state_keys( self, _test_label: str, event_type: str, - extra_state_keys: Set[str], + extra_state_keys: set[str], ) -> None: """ Test that we limit the number of state_keys that we remember but always include diff --git a/tests/handlers/test_sso.py b/tests/handlers/test_sso.py index b09d0a42f5..5ac088f601 100644 --- a/tests/handlers/test_sso.py +++ b/tests/handlers/test_sso.py @@ -18,7 +18,7 @@ # # from http import HTTPStatus -from typing import BinaryIO, Callable, Dict, List, Optional, Tuple +from typing import BinaryIO, Callable, Optional from unittest.mock import Mock from twisted.internet.testing import MemoryReactor @@ -120,7 +120,7 @@ async def mock_get_file( max_size: Optional[int] = None, headers: Optional[RawHeaders] = None, is_allowed_content_type: Optional[Callable[[str], bool]] = None, -) -> Tuple[int, Dict[bytes, List[bytes]], str, int]: +) -> tuple[int, dict[bytes, list[bytes]], str, int]: fake_response = FakeResponse(code=404) if url == "http://my.server/me.png": fake_response = FakeResponse( diff --git a/tests/handlers/test_stats.py b/tests/handlers/test_stats.py index abec5c2e39..94f5e472ca 100644 --- a/tests/handlers/test_stats.py +++ b/tests/handlers/test_stats.py @@ -18,7 +18,7 @@ # # -from typing import Any, Dict, List, Optional, Tuple, cast +from typing import Any, Optional, cast from twisted.internet.testing import MemoryReactor @@ -74,9 +74,9 @@ def _add_background_updates(self) -> None: ) ) - async def get_all_room_state(self) -> List[Optional[str]]: + async def get_all_room_state(self) -> list[Optional[str]]: rows = cast( - List[Tuple[Optional[str]]], + list[tuple[Optional[str]]], await self.store.db_pool.simple_select_list( "room_stats_state", None, retcols=("topic",) ), @@ -85,7 +85,7 @@ async def get_all_room_state(self) -> List[Optional[str]]: def _get_current_stats( self, stats_type: str, stat_id: str - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: table, id_col = stats.TYPE_TO_TABLE[stats_type] cols = list(stats.ABSOLUTE_STATS_FIELDS[stats_type]) diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index c61788fe90..140dd4a0ba 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -18,7 +18,7 @@ # # from http import HTTPStatus -from typing import Collection, ContextManager, List, Optional +from typing import Collection, ContextManager, Optional from unittest.mock import AsyncMock, Mock, patch from parameterized import parameterized, parameterized_class @@ -872,7 +872,7 @@ def test_archived_rooms_do_not_include_state_after_leave( # ... And the state should be empty self.assertEqual(sync_room_result.state, {}) - def _patch_get_latest_events(self, latest_events: List[str]) -> ContextManager: + def _patch_get_latest_events(self, latest_events: list[str]) -> ContextManager: """Monkey-patch `get_prev_events_for_room` Returns a context manager which will replace the implementation of @@ -902,7 +902,7 @@ async def _check_event_auth( async def _check_sigs_and_hash_for_pulled_events_and_fetch( dest: str, pdus: Collection[EventBase], room_version: RoomVersion - ) -> List[EventBase]: + ) -> list[EventBase]: return list(pdus) self.client._check_sigs_and_hash_for_pulled_events_and_fetch = ( # type: ignore[method-assign] diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index 90c185bc3d..70557a4a5f 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -21,7 +21,6 @@ import json -from typing import Dict, List, Set from unittest.mock import ANY, AsyncMock, Mock, call from netaddr import IPSet @@ -110,7 +109,7 @@ def make_homeserver( return hs - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: d = super().create_resource_dict() d["/_matrix/federation"] = TransportLayerServer(self.hs) return d @@ -143,7 +142,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: return_value=None ) - self.room_members: List[UserID] = [] + self.room_members: list[UserID] = [] async def check_user_in_room(room_id: str, requester: Requester) -> None: if requester.user.to_string() not in [ @@ -163,7 +162,7 @@ async def check_host_in_room(room_id: str, server_name: str) -> bool: side_effect=check_host_in_room ) - async def get_current_hosts_in_room(room_id: str) -> Set[str]: + async def get_current_hosts_in_room(room_id: str) -> set[str]: return {member.domain for member in self.room_members} hs.get_storage_controllers().state.get_current_hosts_in_room = Mock( # type: ignore[method-assign] @@ -174,7 +173,7 @@ async def get_current_hosts_in_room(room_id: str) -> Set[str]: side_effect=get_current_hosts_in_room ) - async def get_users_in_room(room_id: str) -> Set[str]: + async def get_users_in_room(room_id: str) -> set[str]: return {str(u) for u in self.room_members} self.datastore.get_users_in_room = Mock(side_effect=get_users_in_room) diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py index 1ba0be51a2..f50fa1f4a0 100644 --- a/tests/handlers/test_user_directory.py +++ b/tests/handlers/test_user_directory.py @@ -17,7 +17,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Tuple +from typing import Any from unittest.mock import AsyncMock, Mock, patch from urllib.parse import quote @@ -313,7 +313,7 @@ def check_user_dir_for_private_room() -> None: def _create_rooms_and_inject_memberships( self, creator: str, token: str, joiner: str - ) -> Tuple[str, str]: + ) -> tuple[str, str]: """Create a public and private room as a normal user. Then get the `joiner` into those rooms. """ diff --git a/tests/http/__init__.py b/tests/http/__init__.py index 3c20e5e442..b19a484004 100644 --- a/tests/http/__init__.py +++ b/tests/http/__init__.py @@ -19,7 +19,6 @@ # import os.path import subprocess -from typing import List from incremental import Version from zope.interface import implementer @@ -85,7 +84,7 @@ def get_test_key_file() -> str: """ -def create_test_cert_file(sanlist: List[bytes]) -> str: +def create_test_cert_file(sanlist: list[bytes]) -> str: """build an x509 certificate file Args: @@ -151,7 +150,7 @@ class TestServerTLSConnectionFactory: """An SSL connection creator which returns connections which present a certificate signed by our test CA.""" - def __init__(self, sanlist: List[bytes]): + def __init__(self, sanlist: list[bytes]): """ Args: sanlist: a list of subjectAltName values for the cert @@ -166,7 +165,7 @@ def serverConnectionForTLS(self, tlsProtocol: TLSMemoryBIOProtocol) -> Connectio def wrap_server_factory_for_tls( - factory: IProtocolFactory, clock: IReactorTime, sanlist: List[bytes] + factory: IProtocolFactory, clock: IReactorTime, sanlist: list[bytes] ) -> TLSMemoryBIOFactory: """Wrap an existing Protocol Factory with a test TLSMemoryBIOFactory diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index c66ca489a4..949564fcc7 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -20,7 +20,7 @@ import base64 import logging import os -from typing import Generator, List, Optional, cast +from typing import Generator, Optional, cast from unittest.mock import AsyncMock, call, patch import treq @@ -110,7 +110,7 @@ def _make_connection( client_factory: IProtocolFactory, ssl: bool = True, expected_sni: Optional[bytes] = None, - tls_sanlist: Optional[List[bytes]] = None, + tls_sanlist: Optional[list[bytes]] = None, ) -> HTTPChannel: """Builds a test server, and completes the outgoing client connection Args: diff --git a/tests/http/federation/test_srv_resolver.py b/tests/http/federation/test_srv_resolver.py index a359b0a141..54f3168a01 100644 --- a/tests/http/federation/test_srv_resolver.py +++ b/tests/http/federation/test_srv_resolver.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict, Generator, List, Tuple, cast +from typing import Generator, cast from unittest.mock import Mock from twisted.internet import defer @@ -44,20 +44,20 @@ def test_resolve(self) -> None: type=dns.SRV, payload=dns.Record_SRV(target=host_name) ) - result_deferred: "Deferred[Tuple[List[dns.RRHeader], None, None]]" = Deferred() + result_deferred: "Deferred[tuple[list[dns.RRHeader], None, None]]" = Deferred() dns_client_mock.lookupService.return_value = result_deferred - cache: Dict[bytes, List[Server]] = {} + cache: dict[bytes, list[Server]] = {} resolver = SrvResolver(dns_client=dns_client_mock, cache=cache) @defer.inlineCallbacks - def do_lookup() -> Generator["Deferred[object]", object, List[Server]]: + def do_lookup() -> Generator["Deferred[object]", object, list[Server]]: with LoggingContext( name="one", server_name="test_server", ) as ctx: resolve_d = resolver.resolve_service(service_name) - result: List[Server] + result: list[Server] result = yield defer.ensureDeferred(resolve_d) # type: ignore[assignment] # should have restored our context @@ -95,7 +95,7 @@ def test_from_cache_expired_and_dns_fail( cache = {service_name: [cast(Server, entry)]} resolver = SrvResolver(dns_client=dns_client_mock, cache=cache) - servers: List[Server] + servers: list[Server] servers = yield defer.ensureDeferred(resolver.resolve_service(service_name)) # type: ignore[assignment] dns_client_mock.lookupService.assert_called_once_with(service_name) @@ -122,7 +122,7 @@ def test_from_cache(self) -> Generator["Deferred[object]", object, None]: dns_client=dns_client_mock, cache=cache, get_time=clock.time ) - servers: List[Server] + servers: list[Server] servers = yield defer.ensureDeferred(resolver.resolve_service(service_name)) # type: ignore[assignment] self.assertFalse(dns_client_mock.lookupService.called) @@ -138,7 +138,7 @@ def test_empty_cache(self) -> Generator["Deferred[object]", object, None]: service_name = b"test_service.example.com" - cache: Dict[bytes, List[Server]] = {} + cache: dict[bytes, list[Server]] = {} resolver = SrvResolver(dns_client=dns_client_mock, cache=cache) with self.assertRaises(error.DNSServerError): @@ -152,10 +152,10 @@ def test_name_error(self) -> Generator["Deferred[object]", object, None]: service_name = b"test_service.example.com" - cache: Dict[bytes, List[Server]] = {} + cache: dict[bytes, list[Server]] = {} resolver = SrvResolver(dns_client=dns_client_mock, cache=cache) - servers: List[Server] + servers: list[Server] servers = yield defer.ensureDeferred(resolver.resolve_service(service_name)) # type: ignore[assignment] self.assertEqual(len(servers), 0) @@ -167,10 +167,10 @@ def test_disabled_service(self) -> None: """ service_name = b"test_service.example.com" - lookup_deferred: "Deferred[Tuple[List[dns.RRHeader], None, None]]" = Deferred() + lookup_deferred: "Deferred[tuple[list[dns.RRHeader], None, None]]" = Deferred() dns_client_mock = Mock() dns_client_mock.lookupService.return_value = lookup_deferred - cache: Dict[bytes, List[Server]] = {} + cache: dict[bytes, list[Server]] = {} resolver = SrvResolver(dns_client=dns_client_mock, cache=cache) # Old versions of Twisted don't have an ensureDeferred in failureResultOf. @@ -193,10 +193,10 @@ def test_non_srv_answer(self) -> None: """ service_name = b"test_service.example.com" - lookup_deferred: "Deferred[Tuple[List[dns.RRHeader], None, None]]" = Deferred() + lookup_deferred: "Deferred[tuple[list[dns.RRHeader], None, None]]" = Deferred() dns_client_mock = Mock() dns_client_mock.lookupService.return_value = lookup_deferred - cache: Dict[bytes, List[Server]] = {} + cache: dict[bytes, list[Server]] = {} resolver = SrvResolver(dns_client=dns_client_mock, cache=cache) # Old versions of Twisted don't have an ensureDeferred in successResultOf. diff --git a/tests/http/server/_base.py b/tests/http/server/_base.py index 8eec4329fe..cc9b5fd6e1 100644 --- a/tests/http/server/_base.py +++ b/tests/http/server/_base.py @@ -26,12 +26,8 @@ Any, Callable, ContextManager, - Dict, Generator, - List, Optional, - Set, - Tuple, TypeVar, Union, ) @@ -208,7 +204,7 @@ def make_request_with_cancellation_test( # The set of previously seen `await`s. # Each element is a stringified stack trace. - seen_awaits: Set[Tuple[str, ...]] = set() + seen_awaits: set[tuple[str, ...]] = set() _log_for_request( 0, f"Running make_request_with_cancellation_test for {test_name}..." @@ -337,7 +333,7 @@ class Deferred__await__Patch: deferred_patch.unblock_awaits() """ - def __init__(self, seen_awaits: Set[Tuple[str, ...]], request_number: int): + def __init__(self, seen_awaits: set[tuple[str, ...]], request_number: int): """ Args: seen_awaits: The set of stack traces of `await`s that have been previously @@ -365,10 +361,10 @@ def __init__(self, seen_awaits: Set[Tuple[str, ...]], request_number: int): # unresolved `Deferred` and return it out of `Deferred.__await__` / # `coroutine.send()`. We have to resolve it later, in case the `await`ing # coroutine is part of some shared processing, such as `@cached`. - self._to_unblock: Dict[Deferred, Union[object, Failure]] = {} + self._to_unblock: dict[Deferred, Union[object, Failure]] = {} # The last stack we logged. - self._previous_stack: List[inspect.FrameInfo] = [] + self._previous_stack: list[inspect.FrameInfo] = [] def patch(self) -> ContextManager[Mock]: """Returns a context manager which patches `Deferred.__await__`.""" @@ -507,8 +503,8 @@ def _log_for_request(request_number: int, message: str) -> None: def _log_await_stack( - stack: List[inspect.FrameInfo], - previous_stack: List[inspect.FrameInfo], + stack: list[inspect.FrameInfo], + previous_stack: list[inspect.FrameInfo], request_number: int, note: str, ) -> None: @@ -566,7 +562,7 @@ def _format_stack_frame(frame_info: inspect.FrameInfo) -> str: ) -def _get_stack(skip_frames: int) -> List[inspect.FrameInfo]: +def _get_stack(skip_frames: int) -> list[inspect.FrameInfo]: """Captures the stack for a request. Skips any twisted frames and stops at `JsonResource.wrapped_async_request_handler`. @@ -622,6 +618,6 @@ def _get_stack_frame_method_name(frame_info: inspect.FrameInfo) -> str: return method_name -def _hash_stack(stack: List[inspect.FrameInfo]) -> Tuple[str, ...]: +def _hash_stack(stack: list[inspect.FrameInfo]) -> tuple[str, ...]: """Turns a stack into a hashable value that can be put into a set.""" return tuple(_format_stack_frame(frame) for frame in stack) diff --git a/tests/http/test_client.py b/tests/http/test_client.py index a02f6fc728..d9eaa78a39 100644 --- a/tests/http/test_client.py +++ b/tests/http/test_client.py @@ -20,7 +20,7 @@ # from io import BytesIO -from typing import Tuple, Union +from typing import Union from unittest.mock import Mock from netaddr import IPSet @@ -59,7 +59,7 @@ class ReadMultipartResponseTests(TestCase): def _build_multipart_response( self, response_length: Union[int, str], max_length: int - ) -> Tuple[ + ) -> tuple[ BytesIO, "Deferred[MultipartResponse]", _MultipartParserProtocol, @@ -209,7 +209,7 @@ def test_content_length(self) -> None: class ReadBodyWithMaxSizeTests(TestCase): def _build_response( self, length: Union[int, str] = UNKNOWN_LENGTH - ) -> Tuple[ + ) -> tuple[ BytesIO, "Deferred[int]", _DiscardBodyWithMaxSizeProtocol, diff --git a/tests/http/test_matrixfederationclient.py b/tests/http/test_matrixfederationclient.py index 6d87541888..6accb03b9f 100644 --- a/tests/http/test_matrixfederationclient.py +++ b/tests/http/test_matrixfederationclient.py @@ -18,7 +18,7 @@ # # import io -from typing import Any, Dict, Generator +from typing import Any, Generator from unittest.mock import ANY, Mock, create_autospec from netaddr import IPSet @@ -745,7 +745,7 @@ def test_configurable_retry_and_delay_values(self) -> None: class FederationClientProxyTests(BaseMultiWorkerStreamTestCase): - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: conf = super().default_config() conf["instance_map"] = { "main": {"host": "testserv", "port": 8765}, diff --git a/tests/http/test_proxy.py b/tests/http/test_proxy.py index 7110dcf9f9..59a9b073bc 100644 --- a/tests/http/test_proxy.py +++ b/tests/http/test_proxy.py @@ -18,7 +18,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Set from parameterized import parameterized @@ -64,7 +63,7 @@ class ProxyTests(TestCase): def test_parse_connection_header_value( self, connection_header_value: bytes, - expected_extra_headers_to_remove: Set[str], + expected_extra_headers_to_remove: set[str], ) -> None: """ Tests that the connection header value is parsed correctly diff --git a/tests/http/test_proxyagent.py b/tests/http/test_proxyagent.py index 5bc5d18d81..a9b4f3d956 100644 --- a/tests/http/test_proxyagent.py +++ b/tests/http/test_proxyagent.py @@ -21,7 +21,7 @@ import base64 import logging import os -from typing import List, Optional +from typing import Optional from unittest.mock import patch import treq @@ -252,7 +252,7 @@ def _make_connection( server_factory: IProtocolFactory, ssl: bool = False, expected_sni: Optional[bytes] = None, - tls_sanlist: Optional[List[bytes]] = None, + tls_sanlist: Optional[list[bytes]] = None, ) -> IProtocol: """Builds a test server, and completes the outgoing client connection diff --git a/tests/http/test_servlet.py b/tests/http/test_servlet.py index db39ecf244..087191b220 100644 --- a/tests/http/test_servlet.py +++ b/tests/http/test_servlet.py @@ -21,7 +21,7 @@ import json from http import HTTPStatus from io import BytesIO -from typing import Tuple, Union +from typing import Union from unittest.mock import Mock from synapse.api.errors import Codes, SynapseError @@ -108,11 +108,11 @@ def __init__(self, hs: HomeServer): self.clock = hs.get_clock() @cancellable - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} diff --git a/tests/logging/test_remote_handler.py b/tests/logging/test_remote_handler.py index e0fd12ccf7..534a1fc4ee 100644 --- a/tests/logging/test_remote_handler.py +++ b/tests/logging/test_remote_handler.py @@ -18,7 +18,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Tuple from twisted.internet.protocol import Protocol from twisted.internet.testing import AccumulatingProtocol, MemoryReactorClock @@ -33,7 +32,7 @@ def connect_logging_client( reactor: MemoryReactorClock, client_id: int -) -> Tuple[Protocol, AccumulatingProtocol]: +) -> tuple[Protocol, AccumulatingProtocol]: # This is essentially tests.server.connect_client, but disabling autoflush on # the client transport. This is necessary to avoid an infinite loop due to # sending of data via the logging transport causing additional logs to be diff --git a/tests/media/test_media_storage.py b/tests/media/test_media_storage.py index 28c4ce676a..d584ea951c 100644 --- a/tests/media/test_media_storage.py +++ b/tests/media/test_media_storage.py @@ -23,7 +23,7 @@ import tempfile from binascii import unhexlify from io import BytesIO -from typing import Any, BinaryIO, ClassVar, Dict, List, Literal, Optional, Tuple, Union +from typing import Any, BinaryIO, ClassVar, Literal, Optional, Union from unittest.mock import MagicMock, Mock, patch from urllib import parse @@ -297,9 +297,9 @@ class MediaRepoTests(unittest.HomeserverTestCase): user_id = "@test:user" def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - self.fetches: List[ - Tuple[ - "Deferred[Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]]]]]", + self.fetches: list[ + tuple[ + "Deferred[tuple[bytes, tuple[int, dict[bytes, list[bytes]]]]]", str, str, Optional[QueryParams], @@ -317,12 +317,12 @@ def get_file( retry_on_dns_fail: bool = True, ignore_backoff: bool = False, follow_redirects: bool = False, - ) -> "Deferred[Tuple[int, Dict[bytes, List[bytes]]]]": + ) -> "Deferred[tuple[int, dict[bytes, list[bytes]]]]": """A mock for MatrixFederationHttpClient.get_file.""" def write_to( - r: Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]]]], - ) -> Tuple[int, Dict[bytes, List[bytes]]]: + r: tuple[bytes, tuple[int, dict[bytes, list[bytes]]]], + ) -> tuple[int, dict[bytes, list[bytes]]]: data, response = r output_stream.write(data) return response @@ -332,7 +332,7 @@ def write_err(f: Failure) -> Failure: output_stream.write(f.value.response) return f - d: Deferred[Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]]]]] = Deferred() + d: Deferred[tuple[bytes, tuple[int, dict[bytes, list[bytes]]]]] = Deferred() self.fetches.append((d, destination, path, args)) # Note that this callback changes the value held by d. d_after_callback = d.addCallbacks(write_to, write_err) @@ -370,7 +370,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.media_id = "example.com/12345" - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources @@ -860,12 +860,12 @@ class TestSpamCheckerLegacy: Uses the legacy Spam-Checker API. """ - def __init__(self, config: Dict[str, Any], api: ModuleApi) -> None: + def __init__(self, config: dict[str, Any], api: ModuleApi) -> None: self.config = config self.api = api @staticmethod - def parse_config(config: Dict[str, Any]) -> Dict[str, Any]: + def parse_config(config: dict[str, Any]) -> dict[str, Any]: return config async def check_event_for_spam(self, event: EventBase) -> Union[bool, str]: @@ -911,12 +911,12 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: load_legacy_spam_checkers(hs) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = default_config("test") config.update( @@ -965,14 +965,14 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: check_media_file_for_spam=self.check_media_file_for_spam ) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources async def check_media_file_for_spam( self, file_wrapper: ReadableFileWrapper, file_info: FileInfo - ) -> Union[Codes, Literal["NOT_SPAM"], Tuple[Codes, JsonDict]]: + ) -> Union[Codes, Literal["NOT_SPAM"], tuple[Codes, JsonDict]]: buf = BytesIO() await file_wrapper.write_chunks_to(buf.write) @@ -1028,7 +1028,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.client = hs.get_federation_http_client() self.store = hs.get_datastores().main - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: # We need to manually set the resource tree to include media, the # default only does `/_matrix/client` APIs. return {"/_matrix/media": self.hs.get_media_repository_resource()} @@ -1280,7 +1280,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.store = hs.get_datastores().main self.client = hs.get_federation_http_client() - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources @@ -1377,7 +1377,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: is_user_allowed_to_upload_media_of_size=self.is_user_allowed_to_upload_media_of_size, ) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources diff --git a/tests/metrics/test_metrics.py b/tests/metrics/test_metrics.py index b3f42c76f1..084eba3a5a 100644 --- a/tests/metrics/test_metrics.py +++ b/tests/metrics/test_metrics.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict, NoReturn, Protocol, Tuple +from typing import NoReturn, Protocol from prometheus_client.core import Sample @@ -35,7 +35,7 @@ from tests import unittest -def get_sample_labels_value(sample: Sample) -> Tuple[Dict[str, str], float]: +def get_sample_labels_value(sample: Sample) -> tuple[dict[str, str], float]: """Extract the labels and values of a sample. prometheus_client 0.5 changed the sample type to a named tuple with more @@ -54,7 +54,7 @@ def get_sample_labels_value(sample: Sample) -> Tuple[Dict[str, str], float]: # Otherwise fall back to treating it as a plain 3 tuple. else: # In older versions of prometheus_client Sample was a 3-tuple. - labels: Dict[str, str] + labels: dict[str, str] value: float _, labels, value = sample # type: ignore[misc] return labels, value @@ -127,7 +127,7 @@ def handle2(metrics: MetricEntry) -> None: def get_metrics_from_gauge( self, gauge: InFlightGauge - ) -> Dict[str, Dict[Tuple[str, ...], float]]: + ) -> dict[str, dict[tuple[str, ...], float]]: results = {} for r in gauge.collect(): @@ -384,7 +384,7 @@ def raise_exception() -> NoReturn: self.assertEqual(hs2_metric_value, "2.0") -def get_latest_metrics() -> Dict[str, str]: +def get_latest_metrics() -> dict[str, str]: """ Collect the latest metrics from the registry and parse them into an easy to use map. The key includes the metric name and labels. diff --git a/tests/module_api/test_api.py b/tests/module_api/test_api.py index 86f987f292..b768a913d7 100644 --- a/tests/module_api/test_api.py +++ b/tests/module_api/test_api.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict, Optional +from typing import Any, Optional from unittest.mock import AsyncMock, Mock from twisted.internet import defer @@ -839,7 +839,7 @@ class ModuleApiWorkerTestCase(BaseModuleApiTestCase, BaseMultiWorkerStreamTestCa presence.register_servlets, ] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: conf = super().default_config() conf["stream_writers"] = {"presence": ["presence_writer"]} conf["instance_map"] = { diff --git a/tests/push/test_email.py b/tests/push/test_email.py index 26819e2d3c..d3822b8643 100644 --- a/tests/push/test_email.py +++ b/tests/push/test_email.py @@ -21,7 +21,7 @@ import importlib.resources as importlib_resources import os from http import HTTPStatus -from typing import Any, Dict, List, Sequence, Tuple +from typing import Any, Sequence import attr from parameterized import parameterized @@ -83,8 +83,8 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: hs = self.setup_test_homeserver(config=config) - # List[Tuple[Deferred, args, kwargs]] - self.email_attempts: List[Tuple[Deferred, Sequence, Dict]] = [] + # list[tuple[Deferred, args, kwargs]] + self.email_attempts: list[tuple[Deferred, Sequence, dict]] = [] def sendmail(*args: Any, **kwargs: Any) -> Deferred: # This mocks out synapse.reactor.send_email._sendmail. @@ -510,7 +510,7 @@ def test_remove_unlinked_pushers_background_job(self) -> None: ) self.assertEqual(len(pushers), 0) - def _check_for_mail(self) -> Tuple[Sequence, Dict]: + def _check_for_mail(self) -> tuple[Sequence, dict]: """ Assert that synapse sent off exactly one email notification. diff --git a/tests/push/test_http.py b/tests/push/test_http.py index 4c8aae5782..ca2ced01ed 100644 --- a/tests/push/test_http.py +++ b/tests/push/test_http.py @@ -17,7 +17,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict, List, Tuple +from typing import Any from unittest.mock import Mock from parameterized import parameterized @@ -51,7 +51,7 @@ class HTTPPusherTests(HomeserverTestCase): hijack_auth = False def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - self.push_attempts: List[Tuple[Deferred, str, dict]] = [] + self.push_attempts: list[tuple[Deferred, str, dict]] = [] m = Mock() @@ -747,7 +747,7 @@ def _send_read_request( def _make_user_with_pusher( self, username: str, enabled: bool = True - ) -> Tuple[str, str]: + ) -> tuple[str, str]: """Registers a user and creates a pusher for them. Args: @@ -925,7 +925,7 @@ def test_update_different_device_access_token_device_id(self) -> None: ret = self.get_success( self.hs.get_datastores().main.get_pushers_by({"user_name": user_id}) ) - pushers: List[PusherConfig] = list(ret) + pushers: list[PusherConfig] = list(ret) # Check that we still have one pusher, and that the device ID associated with # it didn't change. @@ -1118,7 +1118,7 @@ def test_msc4076_badge_count( device_id = user_tuple.device_id # Set the push data dict based on test input parameters - push_data: Dict[str, Any] = { + push_data: dict[str, Any] = { "url": "http://example.com/_matrix/push/v1/notify", } if disable_badge_count: diff --git a/tests/push/test_presentable_names.py b/tests/push/test_presentable_names.py index bd42fc0580..4982a80cce 100644 --- a/tests/push/test_presentable_names.py +++ b/tests/push/test_presentable_names.py @@ -19,7 +19,7 @@ # # -from typing import Iterable, List, Optional, Tuple, cast +from typing import Iterable, Optional, cast from synapse.api.constants import EventTypes, Membership from synapse.api.room_versions import RoomVersions @@ -36,7 +36,7 @@ class MockDataStore: (I.e. the state key is used as the event ID.) """ - def __init__(self, events: Iterable[Tuple[StateKey, dict]]): + def __init__(self, events: Iterable[tuple[StateKey, dict]]): """ Args: events: A state map to event contents. @@ -63,7 +63,7 @@ async def get_event( assert allow_none, "Mock not configured for allow_none = False" # Decode the state key from the event ID. - state_key = cast(Tuple[str, str], tuple(event_id.split("|", 1))) + state_key = cast(tuple[str, str], tuple(event_id.split("|", 1))) return self._events.get(state_key) async def get_events(self, event_ids: Iterable[StateKey]) -> StateMap[EventBase]: @@ -77,7 +77,7 @@ class PresentableNamesTestCase(unittest.HomeserverTestCase): def _calculate_room_name( self, - events: Iterable[Tuple[Tuple[str, str], dict]], + events: Iterable[tuple[tuple[str, str], dict]], user_id: str = "", fallback_to_members: bool = True, fallback_to_single_member: bool = True, @@ -97,7 +97,7 @@ def _calculate_room_name( def test_name(self) -> None: """A room name event should be used.""" - events: List[Tuple[Tuple[str, str], dict]] = [ + events: list[tuple[tuple[str, str], dict]] = [ ((EventTypes.Name, ""), {"name": "test-name"}), ] self.assertEqual("test-name", self._calculate_room_name(events)) @@ -111,7 +111,7 @@ def test_name(self) -> None: def test_canonical_alias(self) -> None: """An canonical alias should be used.""" - events: List[Tuple[Tuple[str, str], dict]] = [ + events: list[tuple[tuple[str, str], dict]] = [ ((EventTypes.CanonicalAlias, ""), {"alias": "#test-name:test"}), ] self.assertEqual("#test-name:test", self._calculate_room_name(events)) @@ -125,7 +125,7 @@ def test_canonical_alias(self) -> None: def test_invite(self) -> None: """An invite has special behaviour.""" - events: List[Tuple[Tuple[str, str], dict]] = [ + events: list[tuple[tuple[str, str], dict]] = [ ((EventTypes.Member, self.USER_ID), {"membership": Membership.INVITE}), ((EventTypes.Member, self.OTHER_USER_ID), {"displayname": "Other User"}), ] @@ -151,7 +151,7 @@ def test_invite(self) -> None: def test_no_members(self) -> None: """Behaviour of an empty room.""" - events: List[Tuple[Tuple[str, str], dict]] = [] + events: list[tuple[tuple[str, str], dict]] = [] self.assertEqual("Empty Room", self._calculate_room_name(events)) # Note that events with invalid (or missing) membership are ignored. diff --git a/tests/push/test_push_rule_evaluator.py b/tests/push/test_push_rule_evaluator.py index 718c9614e5..b1f7ba6973 100644 --- a/tests/push/test_push_rule_evaluator.py +++ b/tests/push/test_push_rule_evaluator.py @@ -19,7 +19,7 @@ # # -from typing import Any, Dict, List, Optional, Union, cast +from typing import Any, Optional, Union, cast from twisted.internet.testing import MemoryReactor @@ -60,7 +60,7 @@ def test_nested(self) -> None: def test_non_string(self) -> None: """String, booleans, ints, nulls and list of those should be kept while other items are dropped.""" - input: Dict[str, Any] = { + input: dict[str, Any] = { "woo": "woo", "foo": True, "bar": 1, @@ -165,13 +165,13 @@ def _get_evaluator( ) room_member_count = 0 sender_power_level = 0 - power_levels: Dict[str, Union[int, Dict[str, int]]] = {} + power_levels: dict[str, Union[int, dict[str, int]]] = {} return PushRuleEvaluator( _flatten_dict(event), False, room_member_count, sender_power_level, - cast(Dict[str, int], power_levels.get("notifications", {})), + cast(dict[str, int], power_levels.get("notifications", {})), {} if related_events is None else related_events, related_event_match_enabled=True, room_version_feature_flags=event.room_version.msc3931_push_features, @@ -588,7 +588,7 @@ def test_tweaks_for_actions(self) -> None: This tests the behaviour of tweaks_for_actions. """ - actions: List[Union[Dict[str, str], str]] = [ + actions: list[Union[dict[str, str], str]] = [ {"set_tweak": "sound", "value": "default"}, {"set_tweak": "highlight"}, "notify", diff --git a/tests/replication/_base.py b/tests/replication/_base.py index 8a6394e9ef..84bdc84ce9 100644 --- a/tests/replication/_base.py +++ b/tests/replication/_base.py @@ -19,7 +19,7 @@ # import logging from collections import defaultdict -from typing import Any, Dict, List, Optional, Set, Tuple +from typing import Any, Optional from twisted.internet.address import IPv4Address from twisted.internet.protocol import Protocol, connectionDone @@ -108,7 +108,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self._client_transport: Optional[FakeTransport] = None self._server_transport: Optional[FakeTransport] = None - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: d = super().create_resource_dict() d["/_synapse/replication"] = ReplicationRestResource(self.hs) return d @@ -183,7 +183,7 @@ def handle_http_replication_attempt(self) -> SynapseRequest: # hook into the channel's request factory so that we can keep a record # of the requests - requests: List[SynapseRequest] = [] + requests: list[SynapseRequest] = [] real_request_factory = channel.requestFactory def request_factory(*args: Any, **kwargs: Any) -> SynapseRequest: @@ -256,7 +256,7 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase): # Redis replication only takes place on Postgres skip = "Requires Postgres" - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: """ Overrides the default config to enable Redis. Even if the test only uses make_worker_hs, the main process needs Redis @@ -491,7 +491,7 @@ def __init__(self, hs: HomeServer): super().__init__(hs) # list of received (stream_name, token, row) tuples - self.received_rdata_rows: List[Tuple[str, int, Any]] = [] + self.received_rdata_rows: list[tuple[str, int, Any]] = [] async def on_rdata( self, stream_name: str, instance_name: str, token: int, rows: list @@ -505,7 +505,7 @@ class FakeRedisPubSubServer: """A fake Redis server for pub/sub.""" def __init__(self) -> None: - self._subscribers_by_channel: Dict[bytes, Set["FakeRedisPubSubProtocol"]] = ( + self._subscribers_by_channel: dict[bytes, set["FakeRedisPubSubProtocol"]] = ( defaultdict(set) ) diff --git a/tests/replication/http/test__base.py b/tests/replication/http/test__base.py index 31d3163c01..b757c6428a 100644 --- a/tests/replication/http/test__base.py +++ b/tests/replication/http/test__base.py @@ -20,7 +20,6 @@ # from http import HTTPStatus -from typing import Tuple from twisted.web.server import Request @@ -52,7 +51,7 @@ async def _serialize_payload(**kwargs: ReplicationEndpoint) -> JsonDict: @cancellable async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} @@ -73,7 +72,7 @@ async def _serialize_payload(**kwargs: ReplicationEndpoint) -> JsonDict: async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} diff --git a/tests/replication/storage/test_events.py b/tests/replication/storage/test_events.py index fce3269005..1398689c2d 100644 --- a/tests/replication/storage/test_events.py +++ b/tests/replication/storage/test_events.py @@ -19,7 +19,7 @@ # # import logging -from typing import Any, Iterable, List, Optional, Tuple +from typing import Any, Iterable, Optional from canonicaljson import encode_canonical_json from parameterized import parameterized @@ -244,13 +244,13 @@ def build_event( key: Optional[str] = None, internal: Optional[dict] = None, depth: Optional[int] = None, - prev_events: Optional[List[Tuple[str, dict]]] = None, - auth_events: Optional[List[str]] = None, - prev_state: Optional[List[str]] = None, + prev_events: Optional[list[tuple[str, dict]]] = None, + auth_events: Optional[list[str]] = None, + prev_state: Optional[list[str]] = None, redacts: Optional[str] = None, push_actions: Iterable = frozenset(), **content: object, - ) -> Tuple[EventBase, EventContext]: + ) -> tuple[EventBase, EventContext]: prev_events = prev_events or [] auth_events = auth_events or [] prev_state = prev_state or [] diff --git a/tests/replication/tcp/streams/test_events.py b/tests/replication/tcp/streams/test_events.py index 452032205f..9607c03224 100644 --- a/tests/replication/tcp/streams/test_events.py +++ b/tests/replication/tcp/streams/test_events.py @@ -18,7 +18,7 @@ # # -from typing import Any, List, Optional +from typing import Any, Optional from parameterized import parameterized @@ -299,7 +299,7 @@ def test_update_function_huge_state_change( self.assertEqual(row.data.event_id, pl_event.event_id) # the state rows are unsorted - state_rows: List[EventsStreamCurrentStateRow] = [] + state_rows: list[EventsStreamCurrentStateRow] = [] for stream_name, _, row in received_event_rows: self.assertEqual("events", stream_name) self.assertIsInstance(row, EventsStreamRow) @@ -355,7 +355,7 @@ def test_update_function_state_row_limit(self) -> None: self.hs.get_datastores().main.get_latest_event_ids_in_room(self.room_id) ) - events: List[EventBase] = [] + events: list[EventBase] = [] for user in user_ids: events.extend( self._inject_state_event(sender=user) for _ in range(STATES_PER_USER) @@ -426,7 +426,7 @@ def test_update_function_state_row_limit(self) -> None: self.assertEqual(row.data.event_id, pl_events[i].event_id) # the state rows are unsorted - state_rows: List[EventsStreamCurrentStateRow] = [] + state_rows: list[EventsStreamCurrentStateRow] = [] for _ in range(STATES_PER_USER + 1): stream_name, token, row = received_event_rows.pop(0) self.assertEqual("events", stream_name) diff --git a/tests/replication/test_multi_media_repo.py b/tests/replication/test_multi_media_repo.py index f712ad1fe3..193c6c0198 100644 --- a/tests/replication/test_multi_media_repo.py +++ b/tests/replication/test_multi_media_repo.py @@ -20,7 +20,7 @@ # import logging import os -from typing import Any, Optional, Tuple +from typing import Any, Optional from twisted.internet.protocol import Factory from twisted.internet.testing import MemoryReactor @@ -78,7 +78,7 @@ def make_worker_hs( def _get_media_req( self, hs: HomeServer, target: str, media_id: str - ) -> Tuple[FakeChannel, Request]: + ) -> tuple[FakeChannel, Request]: """Request some remote media from the given HS by calling the download API. @@ -293,7 +293,7 @@ def make_worker_hs( def _get_media_req( self, hs: HomeServer, target: str, media_id: str - ) -> Tuple[FakeChannel, Request]: + ) -> tuple[FakeChannel, Request]: """Request some remote media from the given HS by calling the download API. diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py index 2a17389feb..f3740a8e35 100644 --- a/tests/rest/admin/test_admin.py +++ b/tests/rest/admin/test_admin.py @@ -20,7 +20,7 @@ # import urllib.parse -from typing import Dict, cast +from typing import cast from parameterized import parameterized @@ -65,7 +65,7 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase): room.register_servlets, ] - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources diff --git a/tests/rest/admin/test_event_reports.py b/tests/rest/admin/test_event_reports.py index 28be7fcd97..19d945bb42 100644 --- a/tests/rest/admin/test_event_reports.py +++ b/tests/rest/admin/test_event_reports.py @@ -18,7 +18,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List from twisted.internet.testing import MemoryReactor @@ -441,7 +440,7 @@ def _create_event_and_report_without_parameters( ) self.assertEqual(200, channel.code, msg=channel.json_body) - def _check_fields(self, content: List[JsonDict]) -> None: + def _check_fields(self, content: list[JsonDict]) -> None: """Checks that all attributes are present in an event report""" for c in content: self.assertIn("id", c) diff --git a/tests/rest/admin/test_federation.py b/tests/rest/admin/test_federation.py index d0b57d1faa..5586bb47e1 100644 --- a/tests/rest/admin/test_federation.py +++ b/tests/rest/admin/test_federation.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional +from typing import Optional from parameterized import parameterized @@ -272,7 +272,7 @@ def test_order_by(self) -> None: """Testing order list with parameter `order_by`""" def _order_test( - expected_destination_list: List[str], + expected_destination_list: list[str], order_by: Optional[str], dir: Optional[str] = None, ) -> None: @@ -521,7 +521,7 @@ def _create_destinations(self, number_destinations: int) -> None: dest = f"sub{i}.example.com" self._create_destination(dest, 50, 50, 50, 100) - def _check_fields(self, content: List[JsonDict]) -> None: + def _check_fields(self, content: list[JsonDict]) -> None: """Checks that the expected destination attributes are present in content Args: @@ -820,7 +820,7 @@ def _create_destination_rooms( self, number_rooms: int, destination: Optional[str] = None, - ) -> List[str]: + ) -> list[str]: """ Create the given number of rooms. The given `destination` homeserver will be recorded as a participant. @@ -853,7 +853,7 @@ def _create_destination_rooms( return room_ids - def _check_fields(self, content: List[JsonDict]) -> None: + def _check_fields(self, content: list[JsonDict]) -> None: """Checks that the expected room attributes are present in content Args: diff --git a/tests/rest/admin/test_jwks.py b/tests/rest/admin/test_jwks.py index 55b822c4d0..ee5588951b 100644 --- a/tests/rest/admin/test_jwks.py +++ b/tests/rest/admin/test_jwks.py @@ -19,7 +19,6 @@ # # -from typing import Dict from twisted.web.resource import Resource @@ -33,7 +32,7 @@ class JWKSTestCase(HomeserverTestCase): """Test /_synapse/jwks JWKS data.""" - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d diff --git a/tests/rest/admin/test_media.py b/tests/rest/admin/test_media.py index 86c09634cc..8cc54cc80c 100644 --- a/tests/rest/admin/test_media.py +++ b/tests/rest/admin/test_media.py @@ -20,7 +20,6 @@ # # import os -from typing import Dict from parameterized import parameterized @@ -51,7 +50,7 @@ class _AdminMediaTests(unittest.HomeserverTestCase): media.register_servlets, ] - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py index 30b2de26e4..6bd21630db 100644 --- a/tests/rest/admin/test_room.py +++ b/tests/rest/admin/test_room.py @@ -22,7 +22,7 @@ import time import urllib.parse from http import HTTPStatus -from typing import List, Optional +from typing import Optional from unittest.mock import AsyncMock, Mock from parameterized import parameterized @@ -1609,7 +1609,7 @@ def test_room_list_sort_order(self) -> None: def _order_test( order_type: str, - expected_room_list: List[str], + expected_room_list: list[str], reverse: bool = False, ) -> None: """Request the list of rooms in a certain order. Assert that order is what diff --git a/tests/rest/admin/test_scheduled_tasks.py b/tests/rest/admin/test_scheduled_tasks.py index 16b80e214b..264c62e2de 100644 --- a/tests/rest/admin/test_scheduled_tasks.py +++ b/tests/rest/admin/test_scheduled_tasks.py @@ -13,7 +13,7 @@ # # # -from typing import Mapping, Optional, Tuple +from typing import Mapping, Optional from twisted.internet.testing import MemoryReactor @@ -42,17 +42,17 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: # create and schedule a few tasks async def _test_task( task: ScheduledTask, - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: return TaskStatus.ACTIVE, None, None async def _finished_test_task( task: ScheduledTask, - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: return TaskStatus.COMPLETE, None, None async def _failed_test_task( task: ScheduledTask, - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: return TaskStatus.FAILED, None, "Everything failed" self._task_scheduler.register_action(_test_task, "test_task") diff --git a/tests/rest/admin/test_server_notice.py b/tests/rest/admin/test_server_notice.py index ebb6867d7c..5053fea9c9 100644 --- a/tests/rest/admin/test_server_notice.py +++ b/tests/rest/admin/test_server_notice.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Sequence +from typing import Sequence from twisted.internet.testing import MemoryReactor @@ -729,7 +729,7 @@ def _check_invite_and_join_status( return invited_rooms - def _sync_and_get_messages(self, room_id: str, token: str) -> List[JsonDict]: + def _sync_and_get_messages(self, room_id: str, token: str) -> list[JsonDict]: """ Do a sync and get messages of a room. diff --git a/tests/rest/admin/test_statistics.py b/tests/rest/admin/test_statistics.py index 4026c47a23..a18952983e 100644 --- a/tests/rest/admin/test_statistics.py +++ b/tests/rest/admin/test_statistics.py @@ -19,7 +19,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict, List, Optional +from typing import Optional from twisted.internet.testing import MemoryReactor from twisted.web.resource import Resource @@ -50,7 +50,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.url = "/_synapse/admin/v1/statistics/users/media" - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources @@ -485,7 +485,7 @@ def _create_media(self, user_token: str, number_media: int) -> None: # Upload some media into the room self.helper.upload_media(SMALL_PNG, tok=user_token, expect_code=200) - def _check_fields(self, content: List[JsonDict]) -> None: + def _check_fields(self, content: list[JsonDict]) -> None: """Checks that all attributes are present in content Args: content: List that is checked for content @@ -497,7 +497,7 @@ def _check_fields(self, content: List[JsonDict]) -> None: self.assertIn("media_length", c) def _order_test( - self, order_type: str, expected_user_list: List[str], dir: Optional[str] = None + self, order_type: str, expected_user_list: list[str], dir: Optional[str] = None ) -> None: """Request the list of users in a certain order. Assert that order is what we expect diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py index ca41cd6c31..040b21d471 100644 --- a/tests/rest/admin/test_user.py +++ b/tests/rest/admin/test_user.py @@ -27,7 +27,7 @@ import urllib.parse from binascii import unhexlify from http import HTTPStatus -from typing import Dict, List, Optional +from typing import Optional from unittest.mock import AsyncMock, Mock, patch from parameterized import parameterized, parameterized_class @@ -1185,7 +1185,7 @@ def test_filter_not_user_types(self) -> None: ) def test_user_type( - expected_user_ids: List[str], not_user_types: Optional[List[str]] = None + expected_user_ids: list[str], not_user_types: Optional[list[str]] = None ) -> None: """Runs a test for the not_user_types param Args: @@ -1262,7 +1262,7 @@ def test_filter_not_user_types_with_extra(self) -> None: ) def test_user_type( - expected_user_ids: List[str], not_user_types: Optional[List[str]] = None + expected_user_ids: list[str], not_user_types: Optional[list[str]] = None ) -> None: """Runs a test for the not_user_types param Args: @@ -1373,7 +1373,7 @@ def test_filter_locked(self) -> None: def _order_test( self, - expected_user_list: List[str], + expected_user_list: list[str], order_by: Optional[str], dir: Optional[str] = None, ) -> None: @@ -1403,7 +1403,7 @@ def _order_test( self.assertEqual(expected_user_list, returned_order) self._check_fields(channel.json_body["users"]) - def _check_fields(self, content: List[JsonDict]) -> None: + def _check_fields(self, content: list[JsonDict]) -> None: """Checks that the expected user attributes are present in content Args: content: List that is checked for content @@ -3690,7 +3690,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.other_user ) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources @@ -4138,7 +4138,7 @@ def test_order_by(self) -> None: [media2] + sorted([media1, media3]), "safe_from_quarantine", "b" ) - def _create_media_for_user(self, user_token: str, number_media: int) -> List[str]: + def _create_media_for_user(self, user_token: str, number_media: int) -> list[str]: """ Create a number of media for a specific user Args: @@ -4195,7 +4195,7 @@ def _create_media_and_access( return media_id - def _check_fields(self, content: List[JsonDict]) -> None: + def _check_fields(self, content: list[JsonDict]) -> None: """Checks that the expected user attributes are present in content Args: content: List that is checked for content @@ -4212,7 +4212,7 @@ def _check_fields(self, content: List[JsonDict]) -> None: def _order_test( self, - expected_media_list: List[str], + expected_media_list: list[str], order_by: Optional[str], dir: Optional[str] = None, ) -> None: diff --git a/tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py b/tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py index 4e151b9aae..de76334f64 100644 --- a/tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py +++ b/tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py @@ -13,7 +13,7 @@ # import logging from http import HTTPStatus -from typing import List, Optional, Tuple, cast +from typing import Optional, cast from twisted.test.proto_helpers import MemoryReactor @@ -358,7 +358,7 @@ def test_limit_and_companion_backpagination(self) -> None: using the companion /thread_subscriptions endpoint. """ - thread_root_ids: List[str] = [] + thread_root_ids: list[str] = [] def make_subscription() -> None: thread_root_resp = self.helper.send( @@ -455,7 +455,7 @@ def make_subscription() -> None: def _do_backpaginate( self, *, from_tok: str, to_tok: str, limit: int, access_token: str - ) -> Tuple[JsonDict, Optional[str]]: + ) -> tuple[JsonDict, Optional[str]]: channel = self.make_request( "GET", "/_matrix/client/unstable/io.element.msc4308/thread_subscriptions" diff --git a/tests/rest/client/sliding_sync/test_extension_to_device.py b/tests/rest/client/sliding_sync/test_extension_to_device.py index a77b0a2e9f..0b0a65babf 100644 --- a/tests/rest/client/sliding_sync/test_extension_to_device.py +++ b/tests/rest/client/sliding_sync/test_extension_to_device.py @@ -12,7 +12,6 @@ # . # import logging -from typing import List from parameterized import parameterized_class @@ -59,7 +58,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: super().prepare(reactor, clock, hs) def _assert_to_device_response( - self, response_body: JsonDict, expected_messages: List[JsonDict] + self, response_body: JsonDict, expected_messages: list[JsonDict] ) -> str: """Assert the sliding sync response was successful and has the expected to-device messages. diff --git a/tests/rest/client/sliding_sync/test_rooms_timeline.py b/tests/rest/client/sliding_sync/test_rooms_timeline.py index 44a6068c11..04a9cd5382 100644 --- a/tests/rest/client/sliding_sync/test_rooms_timeline.py +++ b/tests/rest/client/sliding_sync/test_rooms_timeline.py @@ -12,7 +12,7 @@ # . # import logging -from typing import List, Optional +from typing import Optional from parameterized import parameterized_class @@ -75,14 +75,14 @@ def _assertListEqual( if actual_items == expected_items: return - expected_lines: List[str] = [] + expected_lines: list[str] = [] for expected_item in expected_items: is_expected_in_actual = expected_item in actual_items expected_lines.append( "{} {}".format(" " if is_expected_in_actual else "?", expected_item) ) - actual_lines: List[str] = [] + actual_lines: list[str] = [] for actual_item in actual_items: is_actual_in_expected = actual_item in expected_items actual_lines.append( @@ -101,8 +101,8 @@ def _assertTimelineEqual( self, *, room_id: str, - actual_event_ids: List[str], - expected_event_ids: List[str], + actual_event_ids: list[str], + expected_event_ids: list[str], message: Optional[str] = None, ) -> None: """ diff --git a/tests/rest/client/sliding_sync/test_sliding_sync.py b/tests/rest/client/sliding_sync/test_sliding_sync.py index 8da5863b3a..9f4c6bad05 100644 --- a/tests/rest/client/sliding_sync/test_sliding_sync.py +++ b/tests/rest/client/sliding_sync/test_sliding_sync.py @@ -12,7 +12,7 @@ # . # import logging -from typing import Any, Dict, Iterable, List, Literal, Optional, Tuple +from typing import Any, Iterable, Literal, Optional from unittest.mock import AsyncMock from parameterized import parameterized, parameterized_class @@ -82,7 +82,7 @@ def default_config(self) -> JsonDict: def do_sync( self, sync_body: JsonDict, *, since: Optional[str] = None, tok: str - ) -> Tuple[JsonDict, str]: + ) -> tuple[JsonDict, str]: """Do a sliding sync request with given body. Asserts the request was successful. @@ -170,7 +170,7 @@ def _add_new_dm_to_global_account_data( # Scrutinize the account data since it has no concrete type. We're just copying # everything into a known type. It should be a mapping from user ID to a list of # room IDs. Ignore anything else. - new_dm_map: Dict[str, List[str]] = {} + new_dm_map: dict[str, list[str]] = {} if isinstance(existing_dm_map, dict): for user_id, room_ids in existing_dm_map.items(): if isinstance(user_id, str) and isinstance(room_ids, list): @@ -239,7 +239,7 @@ def _create_dm_room( def _create_remote_invite_room_for_user( self, invitee_user_id: str, - unsigned_invite_room_state: Optional[List[StrippedStateEvent]], + unsigned_invite_room_state: Optional[list[StrippedStateEvent]], invite_room_id: Optional[str] = None, ) -> str: """ diff --git a/tests/rest/client/test_account.py b/tests/rest/client/test_account.py index 773f49dfc9..c4c62c7800 100644 --- a/tests/rest/client/test_account.py +++ b/tests/rest/client/test_account.py @@ -23,7 +23,7 @@ import re from email.parser import Parser from http import HTTPStatus -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union from unittest.mock import Mock from twisted.internet.interfaces import IReactorTCP @@ -87,7 +87,7 @@ async def sendmail( ) -> None: self.email_attempts.append(msg_bytes) - self.email_attempts: List[bytes] = [] + self.email_attempts: list[bytes] = [] hs.get_send_email_handler()._sendmail = sendmail return hs @@ -721,7 +721,7 @@ class WhoamiTestCase(unittest.HomeserverTestCase): register.register_servlets, ] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["allow_guest_access"] = True return config @@ -827,7 +827,7 @@ async def sendmail( ) -> None: self.email_attempts.append(msg_bytes) - self.email_attempts: List[bytes] = [] + self.email_attempts: list[bytes] = [] self.hs.get_send_email_handler()._sendmail = sendmail return self.hs @@ -1501,10 +1501,10 @@ async def is_expired(user_id: str) -> bool: def _test_status( self, - users: Optional[List[str]], + users: Optional[list[str]], expected_status_code: int = HTTPStatus.OK, - expected_statuses: Optional[Dict[str, Dict[str, bool]]] = None, - expected_failures: Optional[List[str]] = None, + expected_statuses: Optional[dict[str, dict[str, bool]]] = None, + expected_failures: Optional[list[str]] = None, expected_errcode: Optional[str] = None, ) -> None: """Send a request to the account status endpoint and check that the response diff --git a/tests/rest/client/test_auth.py b/tests/rest/client/test_auth.py index f5b7f95721..5955d4b7a2 100644 --- a/tests/rest/client/test_auth.py +++ b/tests/rest/client/test_auth.py @@ -20,7 +20,7 @@ # import re from http import HTTPStatus -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Optional, Union from twisted.internet.defer import succeed from twisted.internet.testing import MemoryReactor @@ -47,7 +47,7 @@ class DummyRecaptchaChecker(UserInteractiveAuthChecker): def __init__(self, hs: HomeServer) -> None: super().__init__(hs) - self.recaptcha_attempts: List[Tuple[dict, str]] = [] + self.recaptcha_attempts: list[tuple[dict, str]] = [] def is_enabled(self) -> bool: return True @@ -178,7 +178,7 @@ class UIAuthTests(unittest.HomeserverTestCase): register.register_servlets, ] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() # public_baseurl uses an http:// scheme because FakeChannel.isSecure() returns @@ -195,7 +195,7 @@ def default_config(self) -> Dict[str, Any]: return config - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resource_dict = super().create_resource_dict() resource_dict.update(build_synapse_client_resource_tree(self.hs)) return resource_dict @@ -1091,7 +1091,7 @@ def test_many_token_refresh(self) -> None: was very slow if a lot of refreshes had been performed for the session. """ - def _refresh(refresh_token: str) -> Tuple[str, str]: + def _refresh(refresh_token: str) -> tuple[str, str]: """ Performs one refresh, returning the next refresh token and access token. """ @@ -1172,7 +1172,7 @@ def _txn(txn: LoggingTransaction) -> int: def oidc_config( id: str, with_localpart_template: bool, **kwargs: Any -) -> Dict[str, Any]: +) -> dict[str, Any]: """Sample OIDC provider config used in backchannel logout tests. Args: @@ -1185,7 +1185,7 @@ def oidc_config( A dict suitable for the `oidc_config` or the `oidc_providers[]` parts of the HS config """ - config: Dict[str, Any] = { + config: dict[str, Any] = { "idp_id": id, "idp_name": id, "issuer": TEST_OIDC_ISSUER, @@ -1213,7 +1213,7 @@ class OidcBackchannelLogoutTests(unittest.HomeserverTestCase): login.register_servlets, ] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() # public_baseurl uses an http:// scheme because FakeChannel.isSecure() returns @@ -1223,7 +1223,7 @@ def default_config(self) -> Dict[str, Any]: return config - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resource_dict = super().create_resource_dict() resource_dict.update(build_synapse_client_resource_tree(self.hs)) return resource_dict @@ -1363,7 +1363,7 @@ def test_logout_during_mapping(self) -> None: # We should have a user_mapping_session cookie cookie_headers = channel.headers.getRawHeaders("Set-Cookie") assert cookie_headers - cookies: Dict[str, str] = {} + cookies: dict[str, str] = {} for h in cookie_headers: key, value = h.split(";")[0].split("=", maxsplit=1) cookies[key] = value diff --git a/tests/rest/client/test_delayed_events.py b/tests/rest/client/test_delayed_events.py index 221a4902f2..c67ffc7668 100644 --- a/tests/rest/client/test_delayed_events.py +++ b/tests/rest/client/test_delayed_events.py @@ -15,7 +15,6 @@ """Tests REST events for /delayed_events paths.""" from http import HTTPStatus -from typing import List from parameterized import parameterized @@ -574,7 +573,7 @@ def test_delayed_state_is_cancelled_by_new_state_from_other_user( ) self.assertEqual(setter_expected, content.get(setter_key), content) - def _get_delayed_events(self) -> List[JsonDict]: + def _get_delayed_events(self) -> list[JsonDict]: channel = self.make_request( "GET", PATH_PREFIX, diff --git a/tests/rest/client/test_login.py b/tests/rest/client/test_login.py index c54e409a6c..1ebd59b42a 100644 --- a/tests/rest/client/test_login.py +++ b/tests/rest/client/test_login.py @@ -25,11 +25,8 @@ BinaryIO, Callable, Collection, - Dict, - List, Literal, Optional, - Tuple, Union, ) from unittest.mock import Mock @@ -146,11 +143,11 @@ async def check_login_for_spam( user_id: str, device_id: Optional[str], initial_display_name: Optional[str], - request_info: Collection[Tuple[Optional[str], str]], + request_info: Collection[tuple[Optional[str], str]], auth_provider_id: Optional[str] = None, ) -> Union[ Literal["NOT_SPAM"], - Tuple["synapse.module_api.errors.Codes", JsonDict], + tuple["synapse.module_api.errors.Codes", JsonDict], ]: return "NOT_SPAM" @@ -170,11 +167,11 @@ async def check_login_for_spam( user_id: str, device_id: Optional[str], initial_display_name: Optional[str], - request_info: Collection[Tuple[Optional[str], str]], + request_info: Collection[tuple[Optional[str], str]], auth_provider_id: Optional[str] = None, ) -> Union[ Literal["NOT_SPAM"], - Tuple["synapse.module_api.errors.Codes", JsonDict], + tuple["synapse.module_api.errors.Codes", JsonDict], ]: # Return an odd set of values to ensure that they get correctly passed # to the client. @@ -633,7 +630,7 @@ class MultiSSOTestCase(unittest.HomeserverTestCase): login.register_servlets, ] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = PUBLIC_BASEURL @@ -678,7 +675,7 @@ def default_config(self) -> Dict[str, Any]: def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.login_sso_redirect_url_builder = LoginSSORedirectURIBuilder(hs.config) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d @@ -730,7 +727,7 @@ def test_multi_sso_redirect(self) -> None: p.close() # there should be a link for each href - returned_idps: List[str] = [] + returned_idps: list[str] = [] for link in p.links: path, query = link.split("?", 1) self.assertEqual(path, "pick_idp") @@ -891,7 +888,7 @@ def test_login_via_oidc(self) -> None: # ... and should have set a cookie including the redirect url cookie_headers = channel.headers.getRawHeaders("Set-Cookie") assert cookie_headers - cookies: Dict[str, str] = {} + cookies: dict[str, str] = {} for h in cookie_headers: key, value = h.split(";")[0].split("=", maxsplit=1) cookies[key] = value @@ -1179,7 +1176,7 @@ class JWTTestCase(unittest.HomeserverTestCase): "algorithm": jwt_algorithm, } - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() # If jwt_config has been defined (eg via @override_config), don't replace it. @@ -1188,7 +1185,7 @@ def default_config(self) -> Dict[str, Any]: return config - def jwt_encode(self, payload: Dict[str, Any], secret: str = jwt_secret) -> str: + def jwt_encode(self, payload: dict[str, Any], secret: str = jwt_secret) -> str: header = {"alg": self.jwt_algorithm} result: bytes = jwt.encode(header, payload, secret) return result.decode("ascii") @@ -1426,7 +1423,7 @@ class JWTPubKeyTestCase(unittest.HomeserverTestCase): ] ) - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["jwt_config"] = { "enabled": True, @@ -1435,7 +1432,7 @@ def default_config(self) -> Dict[str, Any]: } return config - def jwt_encode(self, payload: Dict[str, Any], secret: str = jwt_privatekey) -> str: + def jwt_encode(self, payload: dict[str, Any], secret: str = jwt_privatekey) -> str: header = {"alg": "RS256"} if secret.startswith("-----BEGIN RSA PRIVATE KEY-----"): secret = JsonWebKey.import_key(secret, {"kty": "RSA"}) @@ -1630,7 +1627,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: ) return hs - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = PUBLIC_BASEURL @@ -1649,7 +1646,7 @@ def default_config(self) -> Dict[str, Any]: config["sso"] = {"client_whitelist": ["https://x"]} return config - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d @@ -1660,7 +1657,7 @@ def proceed_to_username_picker_page( displayname: str, email: str, picture: str, - ) -> Tuple[str, str]: + ) -> tuple[str, str]: # do the start of the login flow channel, _ = self.helper.auth_via_oidc( fake_oidc_server, @@ -1681,7 +1678,7 @@ def proceed_to_username_picker_page( self.assertEqual(picker_url, "/_synapse/client/pick_username/account_details") # ... with a username_mapping_session cookie - cookies: Dict[str, str] = {} + cookies: dict[str, str] = {} channel.extract_cookies(cookies) self.assertIn("username_mapping_session", cookies) session_id = cookies["username_mapping_session"] @@ -1894,5 +1891,5 @@ async def mock_get_file( max_size: Optional[int] = None, headers: Optional[RawHeaders] = None, is_allowed_content_type: Optional[Callable[[str], bool]] = None, -) -> Tuple[int, Dict[bytes, List[bytes]], str, int]: +) -> tuple[int, dict[bytes, list[bytes]], str, int]: return 0, {b"Content-Type": [b"image/png"]}, "", 200 diff --git a/tests/rest/client/test_media.py b/tests/rest/client/test_media.py index 91bf94b672..79f70db8a3 100644 --- a/tests/rest/client/test_media.py +++ b/tests/rest/client/test_media.py @@ -24,7 +24,7 @@ import os import re import shutil -from typing import Any, BinaryIO, ClassVar, Dict, List, Optional, Sequence, Tuple, Type +from typing import Any, BinaryIO, ClassVar, Optional, Sequence from unittest.mock import MagicMock, Mock, patch from urllib import parse from urllib.parse import quote, urlencode @@ -265,7 +265,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: assert self.media_repo.url_previewer is not None self.url_previewer = self.media_repo.url_previewer - self.lookups: Dict[str, Any] = {} + self.lookups: dict[str, Any] = {} class Resolver: def resolveHostName( @@ -273,7 +273,7 @@ def resolveHostName( resolutionReceiver: IResolutionReceiver, hostName: str, portNumber: int = 0, - addressTypes: Optional[Sequence[Type[IAddress]]] = None, + addressTypes: Optional[Sequence[type[IAddress]]] = None, transportSemantics: str = "TCP", ) -> IResolutionReceiver: resolution = HostResolution(hostName) @@ -1357,7 +1357,7 @@ def test_oembed_autodiscovery_blocked(self) -> None: self.assertEqual(body["og:title"], "Test") self.assertNotIn("og:image", body) - def _download_image(self) -> Tuple[str, str]: + def _download_image(self) -> tuple[str, str]: """Downloads an image into the URL cache. Returns: A (host, media_id) tuple representing the MXC URI of the image. @@ -1994,8 +1994,8 @@ class DownloadAndThumbnailTestCase(unittest.HomeserverTestCase): ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - self.fetches: List[ - Tuple[ + self.fetches: list[ + tuple[ "Deferred[Any]", str, str, @@ -2014,12 +2014,12 @@ def federation_get_file( retry_on_dns_fail: bool = True, ignore_backoff: bool = False, follow_redirects: bool = False, - ) -> "Deferred[Tuple[int, Dict[bytes, List[bytes]], bytes]]": + ) -> "Deferred[tuple[int, dict[bytes, list[bytes]], bytes]]": """A mock for MatrixFederationHttpClient.federation_get_file.""" def write_to( - r: Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]], bytes]], - ) -> Tuple[int, Dict[bytes, List[bytes]], bytes]: + r: tuple[bytes, tuple[int, dict[bytes, list[bytes]], bytes]], + ) -> tuple[int, dict[bytes, list[bytes]], bytes]: data, response = r output_stream.write(data) return response @@ -2029,7 +2029,7 @@ def write_err(f: Failure) -> Failure: output_stream.write(f.value.response) return f - d: Deferred[Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]], bytes]]] = ( + d: Deferred[tuple[bytes, tuple[int, dict[bytes, list[bytes]], bytes]]] = ( Deferred() ) self.fetches.append((d, destination, path, args)) @@ -2048,12 +2048,12 @@ def get_file( retry_on_dns_fail: bool = True, ignore_backoff: bool = False, follow_redirects: bool = False, - ) -> "Deferred[Tuple[int, Dict[bytes, List[bytes]]]]": + ) -> "Deferred[tuple[int, dict[bytes, list[bytes]]]]": """A mock for MatrixFederationHttpClient.get_file.""" def write_to( - r: Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]]]], - ) -> Tuple[int, Dict[bytes, List[bytes]]]: + r: tuple[bytes, tuple[int, dict[bytes, list[bytes]]]], + ) -> tuple[int, dict[bytes, list[bytes]]]: data, response = r output_stream.write(data) return response @@ -2063,7 +2063,7 @@ def write_err(f: Failure) -> Failure: output_stream.write(f.value.response) return f - d: Deferred[Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]]]]] = Deferred() + d: Deferred[tuple[bytes, tuple[int, dict[bytes, list[bytes]]]]] = Deferred() self.fetches.append((d, destination, path, args)) # Note that this callback changes the value held by d. d_after_callback = d.addCallbacks(write_to, write_err) @@ -2538,7 +2538,7 @@ def test_same_quality(self, method: str, desired_size: int) -> None: @parameterized_class(configs) class AuthenticatedMediaTestCase(unittest.HomeserverTestCase): - extra_config: Dict[str, Any] + extra_config: dict[str, Any] servlets = [ media.register_servlets, login.register_servlets, @@ -2576,7 +2576,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.user = self.register_user("user", "pass") self.tok = self.login("user", "pass") - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources @@ -2895,7 +2895,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.user = self.register_user("user", "pass") self.tok = self.login("user", "pass") - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources @@ -3012,7 +3012,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: async def _get_media_upload_limits_for_user( self, user_id: str, - ) -> Optional[List[MediaUploadLimit]]: + ) -> Optional[list[MediaUploadLimit]]: # user1 has custom limits if user_id == self.user1: # n.b. we return these in increasing duration order and Synapse will need to sort them correctly @@ -3060,7 +3060,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: on_media_upload_limit_exceeded=self._on_media_upload_limit_exceeded, ) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources diff --git a/tests/rest/client/test_notifications.py b/tests/rest/client/test_notifications.py index e00152389b..7e2a63955c 100644 --- a/tests/rest/client/test_notifications.py +++ b/tests/rest/client/test_notifications.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional, Tuple +from typing import Optional from unittest.mock import AsyncMock, Mock from twisted.internet.testing import MemoryReactor @@ -156,7 +156,7 @@ def test_pagination_of_notifications(self) -> None: def _request_notifications( self, from_token: Optional[str], limit: int, expected_count: int - ) -> Tuple[List[str], str]: + ) -> tuple[list[str], str]: """ Make a request to /notifications to get the latest events to be notified about. diff --git a/tests/rest/client/test_profile.py b/tests/rest/client/test_profile.py index 18b3d3a089..aa9b72c65e 100644 --- a/tests/rest/client/test_profile.py +++ b/tests/rest/client/test_profile.py @@ -24,7 +24,7 @@ import logging import urllib.parse from http import HTTPStatus -from typing import Any, Dict, Optional +from typing import Any, Optional from canonicaljson import encode_canonical_json @@ -778,7 +778,7 @@ def test_set_custom_field_other(self) -> None: self.assertEqual(channel.code, 403, channel.result) self.assertEqual(channel.json_body["errcode"], Codes.FORBIDDEN) - def _setup_local_files(self, names_and_props: Dict[str, Dict[str, Any]]) -> None: + def _setup_local_files(self, names_and_props: dict[str, dict[str, Any]]) -> None: """Stores metadata about files in the database. Args: diff --git a/tests/rest/client/test_redactions.py b/tests/rest/client/test_redactions.py index e3ca108d03..88be8748ee 100644 --- a/tests/rest/client/test_redactions.py +++ b/tests/rest/client/test_redactions.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional +from typing import Optional from parameterized import parameterized @@ -85,7 +85,7 @@ def _redact_event( room_id: str, event_id: str, expect_code: int = 200, - with_relations: Optional[List[str]] = None, + with_relations: Optional[list[str]] = None, content: Optional[JsonDict] = None, ) -> JsonDict: """Helper function to send a redaction event. @@ -104,7 +104,7 @@ def _redact_event( self.assertEqual(channel.code, expect_code) return channel.json_body - def _sync_room_timeline(self, access_token: str, room_id: str) -> List[JsonDict]: + def _sync_room_timeline(self, access_token: str, room_id: str) -> list[JsonDict]: channel = self.make_request("GET", "sync", access_token=access_token) self.assertEqual(channel.code, 200) room_sync = channel.json_body["rooms"]["join"][room_id] diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py index c7c81aa81c..2c0396a3de 100644 --- a/tests/rest/client/test_register.py +++ b/tests/rest/client/test_register.py @@ -22,7 +22,7 @@ import datetime import importlib.resources as importlib_resources import os -from typing import Any, Dict, List, Tuple +from typing import Any from unittest.mock import AsyncMock from twisted.internet.testing import MemoryReactor @@ -54,7 +54,7 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): ] url = b"/_matrix/client/r0/register" - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["allow_guest_access"] = True return config @@ -1032,7 +1032,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: async def sendmail(*args: Any, **kwargs: Any) -> None: self.email_attempts.append((args, kwargs)) - self.email_attempts: List[Tuple[Any, Any]] = [] + self.email_attempts: list[tuple[Any, Any]] = [] self.hs.get_send_email_handler()._sendmail = sendmail self.store = self.hs.get_datastores().main @@ -1146,7 +1146,7 @@ def test_deactivated_user(self) -> None: self.assertEqual(len(self.email_attempts), 0) - def create_user(self) -> Tuple[str, str]: + def create_user(self) -> tuple[str, str]: user_id = self.register_user("kermit", "monkey") tok = self.login("kermit", "monkey") # We need to manually add an email address otherwise the handler will do @@ -1250,7 +1250,7 @@ class RegistrationTokenValidityRestServletTestCase(unittest.HomeserverTestCase): servlets = [register.register_servlets] url = "/_matrix/client/v1/register/m.login.registration_token/validity" - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["registration_requires_token"] = True return config diff --git a/tests/rest/client/test_relations.py b/tests/rest/client/test_relations.py index 21fb86367a..3912a3c772 100644 --- a/tests/rest/client/test_relations.py +++ b/tests/rest/client/test_relations.py @@ -20,7 +20,7 @@ # import urllib.parse -from typing import Any, Callable, Dict, List, Optional, Tuple +from typing import Any, Callable, Optional from unittest.mock import AsyncMock, patch from twisted.internet.testing import MemoryReactor @@ -48,7 +48,7 @@ class BaseRelationsTestCase(unittest.HomeserverTestCase): ] hijack_auth = False - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: # We need to enable msc1849 support for aggregations config = super().default_config() @@ -69,7 +69,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: res = self.helper.send(self.room, body="Hi!", tok=self.user_token) self.parent_id = res["event_id"] - def _create_user(self, localpart: str) -> Tuple[str, str]: + def _create_user(self, localpart: str) -> tuple[str, str]: user_id = self.register_user(localpart, "abc123") access_token = self.login(localpart, "abc123") @@ -123,7 +123,7 @@ def _send_relation( self.assertEqual(expected_response_code, channel.code, channel.json_body) return channel - def _get_related_events(self) -> List[str]: + def _get_related_events(self) -> list[str]: """ Requests /relations on the parent ID and returns a list of event IDs. """ @@ -149,7 +149,7 @@ def _get_bundled_aggregations(self) -> JsonDict: self.assertEqual(200, channel.code, channel.json_body) return channel.json_body["unsigned"].get("m.relations", {}) - def _find_event_in_chunk(self, events: List[JsonDict]) -> JsonDict: + def _find_event_in_chunk(self, events: list[JsonDict]) -> JsonDict: """ Find the parent event in a chunk of events and assert that it has the proper bundled aggregations. """ @@ -846,7 +846,7 @@ def test_repeated_paginate_relations(self) -> None: expected_event_ids.append(channel.json_body["event_id"]) prev_token: Optional[str] = "" - found_event_ids: List[str] = [] + found_event_ids: list[str] = [] for _ in range(20): from_token = "" if prev_token: @@ -1484,9 +1484,9 @@ class RelationIgnoredUserTestCase(BaseRelationsTestCase): def _test_ignored_user( self, relation_type: str, - allowed_event_ids: List[str], - ignored_event_ids: List[str], - ) -> Tuple[JsonDict, JsonDict]: + allowed_event_ids: list[str], + ignored_event_ids: list[str], + ) -> tuple[JsonDict, JsonDict]: """ Fetch the relations and ensure they're all there, then ignore user2, and repeat. @@ -1600,7 +1600,7 @@ def _redact(self, event_id: str) -> None: ) self.assertEqual(200, channel.code, channel.json_body) - def _get_threads(self) -> List[Tuple[str, str]]: + def _get_threads(self) -> list[tuple[str, str]]: """Request the threads in the room and returns a list of thread ID and latest event ID.""" # Request the threads in the room. channel = self.make_request( @@ -1793,7 +1793,7 @@ def test_redact_parent_thread(self) -> None: class ThreadsTestCase(BaseRelationsTestCase): - def _get_threads(self, body: JsonDict) -> List[Tuple[str, str]]: + def _get_threads(self, body: JsonDict) -> list[tuple[str, str]]: return [ ( ev["event_id"], diff --git a/tests/rest/client/test_rendezvous.py b/tests/rest/client/test_rendezvous.py index 160f852705..dc4f833fa2 100644 --- a/tests/rest/client/test_rendezvous.py +++ b/tests/rest/client/test_rendezvous.py @@ -19,7 +19,6 @@ # # -from typing import Dict from urllib.parse import urlparse from twisted.internet.testing import MemoryReactor @@ -46,7 +45,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.hs = self.setup_test_homeserver() return self.hs - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: return { **super().create_resource_dict(), "/_synapse/client/rendezvous": MSC4108RendezvousSessionResource(self.hs), diff --git a/tests/rest/client/test_retention.py b/tests/rest/client/test_retention.py index 7a816a66e0..758d62e63b 100644 --- a/tests/rest/client/test_retention.py +++ b/tests/rest/client/test_retention.py @@ -17,7 +17,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict +from typing import Any from unittest.mock import Mock from twisted.internet.testing import MemoryReactor @@ -265,7 +265,7 @@ class RetentionNoDefaultPolicyTestCase(unittest.HomeserverTestCase): room.register_servlets, ] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() retention_config = { diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index feae5f77cd..4142aed363 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -25,7 +25,7 @@ import json from http import HTTPStatus -from typing import Any, Dict, Iterable, List, Literal, Optional, Tuple, Union +from typing import Any, Iterable, Literal, Optional, Union from unittest.mock import AsyncMock, Mock, call, patch from urllib import parse as urlparse @@ -989,7 +989,7 @@ async def user_may_join_room_tuple( mxid: str, room_id: str, is_invite: bool, - ) -> Tuple[Codes, dict]: + ) -> tuple[Codes, dict]: return Codes.INCOMPATIBLE_ROOM_VERSION, {} join_mock.side_effect = user_may_join_room_tuple @@ -1002,7 +1002,7 @@ async def user_may_join_room_tuple( self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) self.assertEqual(join_mock.call_count, 0) - def _create_basic_room(self) -> Tuple[int, object]: + def _create_basic_room(self) -> tuple[int, object]: """ Tries to create a basic room and returns the response code. """ @@ -1351,7 +1351,7 @@ def test_spam_checker_may_join_room(self) -> None: """ # Register a dummy callback. Make it allow all room joins for now. - return_value: Union[Literal["NOT_SPAM"], Tuple[Codes, dict], Codes] = ( + return_value: Union[Literal["NOT_SPAM"], tuple[Codes, dict], Codes] = ( synapse.module_api.NOT_SPAM ) @@ -1359,7 +1359,7 @@ async def user_may_join_room( userid: str, room_id: str, is_invited: bool, - ) -> Union[Literal["NOT_SPAM"], Tuple[Codes, dict], Codes]: + ) -> Union[Literal["NOT_SPAM"], tuple[Codes, dict], Codes]: return return_value # `spec` argument is needed for this function mock to have `__qualname__`, which @@ -1848,12 +1848,12 @@ def test_rooms_messages_sent(self) -> None: def test_spam_checker_check_event_for_spam( self, name: str, - value: Union[str, bool, Codes, Tuple[Codes, JsonDict]], + value: Union[str, bool, Codes, tuple[Codes, JsonDict]], expected_code: int, expected_fields: dict, ) -> None: class SpamCheck: - mock_return_value: Union[str, bool, Codes, Tuple[Codes, JsonDict], bool] = ( + mock_return_value: Union[str, bool, Codes, tuple[Codes, JsonDict], bool] = ( "NOT_SPAM" ) mock_content: Optional[JsonDict] = None @@ -1861,7 +1861,7 @@ class SpamCheck: async def check_event_for_spam( self, event: synapse.events.EventBase, - ) -> Union[str, Codes, Tuple[Codes, JsonDict], bool]: + ) -> Union[str, Codes, tuple[Codes, JsonDict], bool]: self.mock_content = event.content return self.mock_return_value @@ -1915,7 +1915,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.admin_user_id = self.register_user("admin", "pass") self.admin_access_token = self.login("admin", "pass") - def power_levels(self, room_id: str) -> Dict[str, Any]: + def power_levels(self, room_id: str) -> dict[str, Any]: return self.helper.get_state( room_id, "m.room.power_levels", self.admin_access_token ) @@ -2076,7 +2076,7 @@ def test_any_room_override_defeats_config_override(self) -> None: # Given the server has config allowing normal users to post my event type # And I am a normal member of a room # But the room was created with special permissions - extra_content: Dict[str, Any] = { + extra_content: dict[str, Any] = { "power_level_content_override": {"events": {}}, } room_id = self.helper.create_room_as( @@ -2707,9 +2707,9 @@ def default_config(self) -> JsonDict: def make_public_rooms_request( self, - room_types: Optional[List[Union[str, None]]], + room_types: Optional[list[Union[str, None]]], instance_id: Optional[str] = None, - ) -> Tuple[List[Dict[str, Any]], int]: + ) -> tuple[list[dict[str, Any]], int]: body: JsonDict = {"filter": {PublicRoomsFilterFields.ROOM_TYPES: room_types}} if instance_id: body["third_party_instance_id"] = "test|test" @@ -3470,7 +3470,7 @@ def _send_labelled_messages_in_room(self) -> str: class RelationsTestCase(PaginationTestCase): - def _filter_messages(self, filter: JsonDict) -> List[str]: + def _filter_messages(self, filter: JsonDict) -> list[str]: """Make a request to /messages with a filter, returns the chunk of events.""" from_token = self.get_success( self.from_token.to_string(self.hs.get_datastores().main) @@ -4529,8 +4529,8 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: def _check_redactions( self, - original_events: List[EventBase], - pulled_events: List[JsonDict], + original_events: list[EventBase], + pulled_events: list[JsonDict], expect_redaction: bool, reason: Optional[str] = None, ) -> None: diff --git a/tests/rest/client/test_sync.py b/tests/rest/client/test_sync.py index e949bb69e6..fcbf3fd53c 100644 --- a/tests/rest/client/test_sync.py +++ b/tests/rest/client/test_sync.py @@ -20,7 +20,6 @@ # import json import logging -from typing import List from parameterized import parameterized @@ -131,7 +130,7 @@ def test_sync_filter_labels_not_labels(self) -> None: self.assertEqual(len(events), 1, [event["content"] for event in events]) self.assertEqual(events[0]["content"]["body"], "with wrong label", events[0]) - def _test_sync_filter_labels(self, sync_filter: str) -> List[JsonDict]: + def _test_sync_filter_labels(self, sync_filter: str) -> list[JsonDict]: user_id = self.register_user("kermit", "test") tok = self.login("kermit", "test") diff --git a/tests/rest/client/test_third_party_rules.py b/tests/rest/client/test_third_party_rules.py index 4161faa11f..78fa8f4e1c 100644 --- a/tests/rest/client/test_third_party_rules.py +++ b/tests/rest/client/test_third_party_rules.py @@ -19,7 +19,7 @@ # # import threading -from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Optional, Union from unittest.mock import AsyncMock, Mock from twisted.internet.testing import MemoryReactor @@ -48,7 +48,7 @@ class LegacyThirdPartyRulesTestModule: - def __init__(self, config: Dict, module_api: "ModuleApi") -> None: + def __init__(self, config: dict, module_api: "ModuleApi") -> None: # keep a record of the "current" rules module, so that the test can patch # it if desired. thread_local.rules_module = self @@ -65,12 +65,12 @@ async def check_event_allowed( return True @staticmethod - def parse_config(config: Dict[str, Any]) -> Dict[str, Any]: + def parse_config(config: dict[str, Any]) -> dict[str, Any]: return config class LegacyDenyNewRooms(LegacyThirdPartyRulesTestModule): - def __init__(self, config: Dict, module_api: "ModuleApi") -> None: + def __init__(self, config: dict, module_api: "ModuleApi") -> None: super().__init__(config, module_api) async def on_create_room( @@ -80,7 +80,7 @@ async def on_create_room( class LegacyChangeEvents(LegacyThirdPartyRulesTestModule): - def __init__(self, config: Dict, module_api: "ModuleApi") -> None: + def __init__(self, config: dict, module_api: "ModuleApi") -> None: super().__init__(config, module_api) async def check_event_allowed( @@ -150,7 +150,7 @@ def test_third_party_rules(self) -> None: # types async def check( ev: EventBase, state: StateMap[EventBase] - ) -> Tuple[bool, Optional[JsonDict]]: + ) -> tuple[bool, Optional[JsonDict]]: return ev.type != "foo.bar.forbidden", None callback = Mock(spec=[], side_effect=check) @@ -207,7 +207,7 @@ def error_dict(self, config: Optional[HomeServerConfig]) -> JsonDict: # add a callback that will raise our hacky exception async def check( ev: EventBase, state: StateMap[EventBase] - ) -> Tuple[bool, Optional[JsonDict]]: + ) -> tuple[bool, Optional[JsonDict]]: raise NastyHackException(429, "message") self.hs.get_module_api_callbacks().third_party_event_rules._check_event_allowed_callbacks = [ @@ -235,7 +235,7 @@ def test_cannot_modify_event(self) -> None: # first patch the event checker so that it will try to modify the event async def check( ev: EventBase, state: StateMap[EventBase] - ) -> Tuple[bool, Optional[JsonDict]]: + ) -> tuple[bool, Optional[JsonDict]]: ev.content = {"x": "y"} return True, None @@ -260,7 +260,7 @@ def test_modify_event(self) -> None: # first patch the event checker so that it will modify the event async def check( ev: EventBase, state: StateMap[EventBase] - ) -> Tuple[bool, Optional[JsonDict]]: + ) -> tuple[bool, Optional[JsonDict]]: d = ev.get_dict() d["content"] = {"x": "y"} return True, d @@ -295,7 +295,7 @@ def test_message_edit(self) -> None: # first patch the event checker so that it will modify the event async def check( ev: EventBase, state: StateMap[EventBase] - ) -> Tuple[bool, Optional[JsonDict]]: + ) -> tuple[bool, Optional[JsonDict]]: d = ev.get_dict() d["content"] = { "msgtype": "m.text", @@ -443,7 +443,7 @@ def test_sent_event_end_up_in_room_state(self) -> None: # Define a callback that sends a custom event on power levels update. async def test_fn( event: EventBase, state_events: StateMap[EventBase] - ) -> Tuple[bool, Optional[JsonDict]]: + ) -> tuple[bool, Optional[JsonDict]]: if event.is_state() and event.type == EventTypes.PowerLevels: await api.create_and_send_event_into_room( { diff --git a/tests/rest/client/test_transactions.py b/tests/rest/client/test_transactions.py index bb83988d76..64d22d485a 100644 --- a/tests/rest/client/test_transactions.py +++ b/tests/rest/client/test_transactions.py @@ -20,7 +20,7 @@ # from http import HTTPStatus -from typing import Any, Generator, Tuple, cast +from typing import Any, Generator, cast from unittest.mock import AsyncMock, Mock, call from twisted.internet import defer, reactor as _reactor @@ -92,7 +92,7 @@ def test_logcontexts_with_async_result( self, ) -> Generator["defer.Deferred[Any]", object, None]: @defer.inlineCallbacks - def cb() -> Generator["defer.Deferred[object]", object, Tuple[int, JsonDict]]: + def cb() -> Generator["defer.Deferred[object]", object, tuple[int, JsonDict]]: # Ignore `multiple-internal-clocks` linter error here since we are creating a `Clock` # for testing purposes. yield defer.ensureDeferred( @@ -124,7 +124,7 @@ def test_does_not_cache_exceptions( """ called = [False] - def cb() -> "defer.Deferred[Tuple[int, JsonDict]]": + def cb() -> "defer.Deferred[tuple[int, JsonDict]]": if called[0]: # return a valid result the second time return defer.succeed(self.mock_http_response) @@ -156,7 +156,7 @@ def test_does_not_cache_failures( """ called = [False] - def cb() -> "defer.Deferred[Tuple[int, JsonDict]]": + def cb() -> "defer.Deferred[tuple[int, JsonDict]]": if called[0]: # return a valid result the second time return defer.succeed(self.mock_http_response) diff --git a/tests/rest/client/utils.py b/tests/rest/client/utils.py index bb214759d9..d5c824b291 100644 --- a/tests/rest/client/utils.py +++ b/tests/rest/client/utils.py @@ -30,14 +30,12 @@ Any, AnyStr, Callable, - Dict, Iterable, Literal, Mapping, MutableMapping, Optional, Sequence, - Tuple, overload, ) from urllib.parse import urlencode @@ -87,8 +85,8 @@ def create_room_as( room_version: Optional[str] = ..., tok: Optional[str] = ..., expect_code: Literal[200] = ..., - extra_content: Optional[Dict] = ..., - custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = ..., + extra_content: Optional[dict] = ..., + custom_headers: Optional[Iterable[tuple[AnyStr, AnyStr]]] = ..., ) -> str: ... @overload @@ -99,8 +97,8 @@ def create_room_as( room_version: Optional[str] = ..., tok: Optional[str] = ..., expect_code: int = ..., - extra_content: Optional[Dict] = ..., - custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = ..., + extra_content: Optional[dict] = ..., + custom_headers: Optional[Iterable[tuple[AnyStr, AnyStr]]] = ..., ) -> Optional[str]: ... def create_room_as( @@ -110,8 +108,8 @@ def create_room_as( room_version: Optional[str] = None, tok: Optional[str] = None, expect_code: int = HTTPStatus.OK, - extra_content: Optional[Dict] = None, - custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = None, + extra_content: Optional[dict] = None, + custom_headers: Optional[Iterable[tuple[AnyStr, AnyStr]]] = None, ) -> Optional[str]: """ Create a room. @@ -310,7 +308,7 @@ def change_membership( self.auth_user_id = src path = f"/_matrix/client/r0/rooms/{room}/state/m.room.member/{targ}" - url_params: Dict[str, str] = {} + url_params: dict[str, str] = {} if tok: url_params["access_token"] = tok @@ -378,7 +376,7 @@ def send( txn_id: Optional[str] = None, tok: Optional[str] = None, expect_code: int = HTTPStatus.OK, - custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = None, + custom_headers: Optional[Iterable[tuple[AnyStr, AnyStr]]] = None, type: str = "m.room.message", ) -> JsonDict: if body is None: @@ -430,7 +428,7 @@ def send_event( txn_id: Optional[str] = None, tok: Optional[str] = None, expect_code: int = HTTPStatus.OK, - custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = None, + custom_headers: Optional[Iterable[tuple[AnyStr, AnyStr]]] = None, ) -> JsonDict: if txn_id is None: txn_id = "m%s" % (str(time.time())) @@ -497,7 +495,7 @@ def _read_write_state( self, room_id: str, event_type: str, - body: Optional[Dict[str, Any]], + body: Optional[dict[str, Any]], tok: Optional[str], expect_code: int = HTTPStatus.OK, state_key: str = "", @@ -575,7 +573,7 @@ def send_state( self, room_id: str, event_type: str, - body: Dict[str, Any], + body: dict[str, Any], tok: Optional[str] = None, expect_code: int = HTTPStatus.OK, state_key: str = "", @@ -684,7 +682,7 @@ def login_via_oidc( with_sid: bool = False, idp_id: Optional[str] = None, expected_status: int = 200, - ) -> Tuple[JsonDict, FakeAuthorizationGrant]: + ) -> tuple[JsonDict, FakeAuthorizationGrant]: """Log in (as a new user) via OIDC Returns the result of the final token login and the fake authorization grant. @@ -757,7 +755,7 @@ def auth_via_oidc( ui_auth_session_id: Optional[str] = None, with_sid: bool = False, idp_id: Optional[str] = None, - ) -> Tuple[FakeChannel, FakeAuthorizationGrant]: + ) -> tuple[FakeChannel, FakeAuthorizationGrant]: """Perform an OIDC authentication flow via a mock OIDC provider. This can be used for either login or user-interactive auth. @@ -790,7 +788,7 @@ def auth_via_oidc( went. """ - cookies: Dict[str, str] = {} + cookies: dict[str, str] = {} with fake_server.patch_homeserver(hs=self.hs): # if we're doing a ui auth, hit the ui auth redirect endpoint @@ -824,7 +822,7 @@ def complete_oidc_auth( cookies: Mapping[str, str], user_info_dict: JsonDict, with_sid: bool = False, - ) -> Tuple[FakeChannel, FakeAuthorizationGrant]: + ) -> tuple[FakeChannel, FakeAuthorizationGrant]: """Mock out an OIDC authentication flow Assumes that an OIDC auth has been initiated by one of initiate_sso_login or diff --git a/tests/rest/key/v2/test_remote_key_resource.py b/tests/rest/key/v2/test_remote_key_resource.py index 8d2489f718..c412a19f9b 100644 --- a/tests/rest/key/v2/test_remote_key_resource.py +++ b/tests/rest/key/v2/test_remote_key_resource.py @@ -19,7 +19,7 @@ # # from io import BytesIO, StringIO -from typing import Any, Dict, Optional, Union +from typing import Any, Optional, Union from unittest.mock import Mock import signedjson.key @@ -156,7 +156,7 @@ class EndToEndPerspectivesTests(BaseRemoteKeyResourceTestCase): endpoint, to check that the two implementations are compatible. """ - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() # replace the signing key with our own diff --git a/tests/rest/media/test_domain_blocking.py b/tests/rest/media/test_domain_blocking.py index 9eb0222102..0bdbaa676d 100644 --- a/tests/rest/media/test_domain_blocking.py +++ b/tests/rest/media/test_domain_blocking.py @@ -18,7 +18,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict from twisted.internet.testing import MemoryReactor from twisted.web.resource import Resource @@ -65,7 +64,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: ) ) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: # We need to manually set the resource tree to include media, the # default only does `/_matrix/client` APIs. return {"/_matrix/media": self.hs.get_media_repository_resource()} diff --git a/tests/rest/media/test_url_preview.py b/tests/rest/media/test_url_preview.py index 7c8d2fc998..5af2e79f45 100644 --- a/tests/rest/media/test_url_preview.py +++ b/tests/rest/media/test_url_preview.py @@ -22,7 +22,7 @@ import json import os import re -from typing import Any, Dict, Optional, Sequence, Tuple, Type +from typing import Any, Optional, Sequence from urllib.parse import quote, urlencode from twisted.internet._resolver import HostResolution @@ -127,7 +127,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: assert self.media_repo.url_previewer is not None self.url_previewer = self.media_repo.url_previewer - self.lookups: Dict[str, Any] = {} + self.lookups: dict[str, Any] = {} class Resolver: def resolveHostName( @@ -135,7 +135,7 @@ def resolveHostName( resolutionReceiver: IResolutionReceiver, hostName: str, portNumber: int = 0, - addressTypes: Optional[Sequence[Type[IAddress]]] = None, + addressTypes: Optional[Sequence[type[IAddress]]] = None, transportSemantics: str = "TCP", ) -> IResolutionReceiver: resolution = HostResolution(hostName) @@ -150,7 +150,7 @@ def resolveHostName( self.reactor.nameResolver = Resolver() # type: ignore[assignment] - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: """Create a resource tree for the test server A resource tree is a mapping from path to twisted.web.resource. @@ -1227,7 +1227,7 @@ def test_oembed_autodiscovery_blocked(self) -> None: self.assertEqual(body["og:title"], "Test") self.assertNotIn("og:image", body) - def _download_image(self) -> Tuple[str, str]: + def _download_image(self) -> tuple[str, str]: """Downloads an image into the URL cache. Returns: A (host, media_id) tuple representing the MXC URI of the image. diff --git a/tests/rest/synapse/client/test_federation_whitelist.py b/tests/rest/synapse/client/test_federation_whitelist.py index f0067a8f2b..c4a990e32c 100644 --- a/tests/rest/synapse/client/test_federation_whitelist.py +++ b/tests/rest/synapse/client/test_federation_whitelist.py @@ -11,7 +11,6 @@ # See the GNU Affero General Public License for more details: # . -from typing import Dict from twisted.web.resource import Resource @@ -28,7 +27,7 @@ class FederationWhitelistTests(unittest.HomeserverTestCase): login.register_servlets, ] - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: base = super().create_resource_dict() base.update(build_synapse_client_resource_tree(self.hs)) return base diff --git a/tests/scripts/test_new_matrix_user.py b/tests/scripts/test_new_matrix_user.py index cae096e72b..2e71e2a797 100644 --- a/tests/scripts/test_new_matrix_user.py +++ b/tests/scripts/test_new_matrix_user.py @@ -18,7 +18,7 @@ # # -from typing import List, Optional +from typing import Optional from unittest.mock import Mock, patch from synapse._scripts.register_new_matrix_user import request_registration @@ -60,8 +60,8 @@ def post( requests.post = post # The fake stdout will be written here - out: List[str] = [] - err_code: List[int] = [] + out: list[str] = [] + err_code: list[int] = [] with patch("synapse._scripts.register_new_matrix_user.requests", requests): request_registration( @@ -96,8 +96,8 @@ def get(url: str, verify: Optional[bool] = None) -> Mock: requests.get = get # The fake stdout will be written here - out: List[str] = [] - err_code: List[int] = [] + out: list[str] = [] + err_code: list[int] = [] with patch("synapse._scripts.register_new_matrix_user.requests", requests): request_registration( @@ -151,8 +151,8 @@ def post( requests.post = post # The fake stdout will be written here - out: List[str] = [] - err_code: List[int] = [] + out: list[str] = [] + err_code: list[int] = [] with patch("synapse._scripts.register_new_matrix_user.requests", requests): request_registration( diff --git a/tests/server.py b/tests/server.py index 208556abaf..ff5c606180 100644 --- a/tests/server.py +++ b/tests/server.py @@ -35,15 +35,10 @@ Any, Awaitable, Callable, - Deque, - Dict, Iterable, - List, MutableMapping, Optional, Sequence, - Tuple, - Type, TypeVar, Union, cast, @@ -124,7 +119,7 @@ P = ParamSpec("P") # the type of thing that can be passed into `make_request` in the headers list -CustomHeaderType = Tuple[Union[str, bytes], Union[str, bytes]] +CustomHeaderType = tuple[Union[str, bytes], Union[str, bytes]] # A pre-prepared SQLite DB that is used as a template when creating new SQLite # DB each test run. This dramatically speeds up test set up when using SQLite. @@ -172,7 +167,7 @@ def json_body(self) -> JsonDict: return body @property - def json_list(self) -> List[JsonDict]: + def json_list(self) -> list[JsonDict]: body = json.loads(self.text_body) assert isinstance(body, list) return body @@ -211,7 +206,7 @@ def writeHeaders( version: bytes, code: bytes, reason: bytes, - headers: Union[Headers, List[Tuple[bytes, bytes]]], + headers: Union[Headers, list[tuple[bytes, bytes]]], ) -> None: self.result["version"] = version self.result["code"] = code @@ -367,7 +362,7 @@ def make_request( path: Union[bytes, str], content: Union[bytes, str, JsonDict] = b"", access_token: Optional[str] = None, - request: Type[Request] = SynapseRequest, + request: type[Request] = SynapseRequest, shorthand: bool = True, federation_auth_origin: Optional[bytes] = None, content_type: Optional[bytes] = None, @@ -492,10 +487,10 @@ class ThreadedMemoryReactorClock(MemoryReactorClock): def __init__(self) -> None: self.threadpool = ThreadPool(self) - self._tcp_callbacks: Dict[Tuple[str, int], Callable] = {} - self._udp: List[udp.Port] = [] - self.lookups: Dict[str, str] = {} - self._thread_callbacks: Deque[Callable[..., R]] = deque() + self._tcp_callbacks: dict[tuple[str, int], Callable] = {} + self._udp: list[udp.Port] = [] + self.lookups: dict[str, str] = {} + self._thread_callbacks: deque[Callable[..., R]] = deque() lookups = self.lookups @@ -622,7 +617,7 @@ def connectTCP( port: int, factory: ClientFactory, timeout: float = 30, - bindAddress: Optional[Tuple[str, int]] = None, + bindAddress: Optional[tuple[str, int]] = None, ) -> IConnector: """Fake L{IReactorTCP.connectTCP}.""" @@ -814,7 +809,7 @@ def _(res: Any) -> None: return d -def get_clock() -> Tuple[ThreadedMemoryReactorClock, Clock]: +def get_clock() -> tuple[ThreadedMemoryReactorClock, Clock]: # Ignore the linter error since this is an expected usage of creating a `Clock` for # testing purposes. reactor = ThreadedMemoryReactorClock() @@ -1041,7 +1036,7 @@ def setTcpKeepAlive(self, enabled: bool) -> None: def connect_client( reactor: ThreadedMemoryReactorClock, client_id: int -) -> Tuple[IProtocol, AccumulatingProtocol]: +) -> tuple[IProtocol, AccumulatingProtocol]: """ Connect a client to a fake TCP transport. @@ -1068,7 +1063,7 @@ def setup_test_homeserver( server_name: str = "test", config: Optional[HomeServerConfig] = None, reactor: Optional[ISynapseReactor] = None, - homeserver_to_use: Type[HomeServer] = TestHomeServer, + homeserver_to_use: type[HomeServer] = TestHomeServer, db_txn_limit: Optional[int] = None, **extra_homeserver_attributes: Any, ) -> HomeServer: diff --git a/tests/server_notices/test_resource_limits_server_notices.py b/tests/server_notices/test_resource_limits_server_notices.py index dd38528a7d..a0c5582496 100644 --- a/tests/server_notices/test_resource_limits_server_notices.py +++ b/tests/server_notices/test_resource_limits_server_notices.py @@ -17,7 +17,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Tuple from unittest.mock import AsyncMock, Mock from twisted.internet.testing import MemoryReactor @@ -363,7 +362,7 @@ def test_invite_with_notice(self) -> None: self.assertTrue(notice_in_room, "No server notice in room") - def _trigger_notice_and_join(self) -> Tuple[str, str, str]: + def _trigger_notice_and_join(self) -> tuple[str, str, str]: """Creates enough active users to hit the MAU limit and trigger a system notice about it, then joins the system notices room with one of the users created. diff --git a/tests/state/test_v2.py b/tests/state/test_v2.py index b4f2b98cc4..2cf411e30b 100644 --- a/tests/state/test_v2.py +++ b/tests/state/test_v2.py @@ -21,13 +21,9 @@ import itertools from typing import ( Collection, - Dict, Iterable, - List, Mapping, Optional, - Set, - Tuple, TypeVar, ) @@ -94,7 +90,7 @@ def __init__( self.content = content self.room_id = ROOM_ID - def to_event(self, auth_events: List[str], prev_events: List[str]) -> EventBase: + def to_event(self, auth_events: list[str], prev_events: list[str]) -> EventBase: """Given the auth_events and prev_events, convert to a Frozen Event Args: @@ -461,9 +457,9 @@ def test_mainline_sort(self) -> None: def do_check( self, - events: List[FakeEvent], - edges: List[List[str]], - expected_state_ids: List[str], + events: list[FakeEvent], + edges: list[list[str]], + expected_state_ids: list[str], ) -> None: """Take a list of events and edges and calculate the state of the graph at END, and asserts it matches `expected_state_ids` @@ -476,9 +472,9 @@ def do_check( the keys that haven't changed since START). """ # We want to sort the events into topological order for processing. - graph: Dict[str, Set[str]] = {} + graph: dict[str, set[str]] = {} - fake_event_map: Dict[str, FakeEvent] = {} + fake_event_map: dict[str, FakeEvent] = {} for ev in itertools.chain(INITIAL_EVENTS, events): graph[ev.node_id] = set() @@ -491,8 +487,8 @@ def do_check( for a, b in pairwise(edge_list): graph[a].add(b) - event_map: Dict[str, EventBase] = {} - state_at_event: Dict[str, StateMap[str]] = {} + event_map: dict[str, EventBase] = {} + state_at_event: dict[str, StateMap[str]] = {} # We copy the map as the sort consumes the graph graph_copy = {k: set(v) for k, v in graph.items()} @@ -568,7 +564,7 @@ def do_check( class LexicographicalTestCase(unittest.TestCase): def test_simple(self) -> None: - graph: Dict[str, Set[str]] = { + graph: dict[str, set[str]] = { "l": {"o"}, "m": {"n", "o"}, "n": {"o"}, @@ -1020,7 +1016,7 @@ def test_get_power_level_for_sender(self) -> None: T = TypeVar("T") -def pairwise(iterable: Iterable[T]) -> Iterable[Tuple[T, T]]: +def pairwise(iterable: Iterable[T]) -> Iterable[tuple[T, T]]: "s -> (s0,s1), (s1,s2), (s2, s3), ..." a, b = itertools.tee(iterable) next(b, None) @@ -1029,11 +1025,11 @@ def pairwise(iterable: Iterable[T]) -> Iterable[Tuple[T, T]]: @attr.s class TestStateResolutionStore: - event_map: Dict[str, EventBase] = attr.ib() + event_map: dict[str, EventBase] = attr.ib() def get_events( self, event_ids: Collection[str], allow_rejected: bool = False - ) -> "defer.Deferred[Dict[str, EventBase]]": + ) -> "defer.Deferred[dict[str, EventBase]]": """Get events from the database Args: @@ -1048,7 +1044,7 @@ def get_events( {eid: self.event_map[eid] for eid in event_ids if eid in self.event_map} ) - def _get_auth_chain(self, event_ids: Iterable[str]) -> List[str]: + def _get_auth_chain(self, event_ids: Iterable[str]) -> list[str]: """Gets the full auth chain for a set of events (including rejected events). @@ -1085,9 +1081,9 @@ def _get_auth_chain(self, event_ids: Iterable[str]) -> List[str]: def get_auth_chain_difference( self, room_id: str, - auth_sets: List[Set[str]], - conflicted_state: Optional[Set[str]], - additional_backwards_reachable_conflicted_events: Optional[Set[str]], + auth_sets: list[set[str]], + conflicted_state: Optional[set[str]], + additional_backwards_reachable_conflicted_events: Optional[set[str]], ) -> "defer.Deferred[StateDifference]": chains = [frozenset(self._get_auth_chain(a)) for a in auth_sets] diff --git a/tests/state/test_v21.py b/tests/state/test_v21.py index ff1715d4f7..7bef3decf0 100644 --- a/tests/state/test_v21.py +++ b/tests/state/test_v21.py @@ -18,7 +18,7 @@ # # import itertools -from typing import Dict, List, Optional, Sequence, Set +from typing import Optional, Sequence from twisted.internet import defer from twisted.test.proto_helpers import MemoryReactor @@ -357,11 +357,11 @@ async def _get_auth_difference_and_conflicted_subgraph( self, room_id: str, state_maps: Sequence[StateMap[str]], - event_map: Optional[Dict[str, EventBase]], + event_map: Optional[dict[str, EventBase]], state_res_store: StateResolutionStoreInterface, - ) -> Set[str]: + ) -> set[str]: _, conflicted_state = _seperate(state_maps) - conflicted_set: Optional[Set[str]] = set( + conflicted_set: Optional[set[str]] = set( itertools.chain.from_iterable(conflicted_state.values()) ) if event_map is None: @@ -377,7 +377,7 @@ async def _get_auth_difference_and_conflicted_subgraph( def get_resolution_and_verify_expected( self, state_maps: Sequence[StateMap[str]], - events: List[EventBase], + events: list[EventBase], expected: StateMap[str], ) -> None: room_id = events[0].room_id @@ -475,9 +475,9 @@ def create_event( event_type: str, state_key: Optional[str], sender: str, - content: Dict, - auth_events: List[str], - prev_events: Optional[List[str]] = None, + content: dict, + auth_events: list[str], + prev_events: Optional[list[str]] = None, room_id: Optional[str] = None, ) -> EventBase: """Short-hand for event_from_pdu_json for fields we typically care about. diff --git a/tests/storage/databases/main/test_end_to_end_keys.py b/tests/storage/databases/main/test_end_to_end_keys.py index d0dd8f866b..35e1e15d66 100644 --- a/tests/storage/databases/main/test_end_to_end_keys.py +++ b/tests/storage/databases/main/test_end_to_end_keys.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional, Tuple +from typing import Optional from twisted.internet.testing import MemoryReactor @@ -99,7 +99,7 @@ def test_master_replacement_only_applies_to_latest_master_key( def check_timestamp_column( txn: LoggingTransaction, - ) -> List[Tuple[JsonDict, Optional[int]]]: + ) -> list[tuple[JsonDict, Optional[int]]]: """Fetch all rows for Alice's keys.""" txn.execute( """ diff --git a/tests/storage/databases/main/test_events_worker.py b/tests/storage/databases/main/test_events_worker.py index a7c6bdd9b4..c786271c09 100644 --- a/tests/storage/databases/main/test_events_worker.py +++ b/tests/storage/databases/main/test_events_worker.py @@ -20,7 +20,7 @@ # import json from contextlib import contextmanager -from typing import Generator, List, Set, Tuple +from typing import Generator from unittest import mock from twisted.enterprise.adbapi import ConnectionPool @@ -60,7 +60,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.token = self.login(self.user, "pass") self.room_id = self.helper.create_room_as(self.user, tok=self.token) - self.event_ids: List[str] = [] + self.event_ids: list[str] = [] for i in range(3): event = self.get_success( inject_event( @@ -316,7 +316,7 @@ def test_get_lots_of_messages(self) -> None: room_id = self.helper.create_room_as(user_id, tok=user_tok) - event_ids: Set[str] = set() + event_ids: set[str] = set() for i in range(num_events): event = self.get_success( inject_event( @@ -371,7 +371,7 @@ def _populate_events(self) -> None: ) ) - self.event_ids: List[str] = [] + self.event_ids: list[str] = [] for idx in range(1, 21): # Stream ordering starts at 1. event_json = { "type": f"test {idx}", @@ -504,7 +504,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: def blocking_get_event_calls( self, ) -> Generator[ - Tuple["Deferred[None]", "Deferred[None]", "Deferred[None]"], None, None + tuple["Deferred[None]", "Deferred[None]", "Deferred[None]"], None, None ]: """Starts two concurrent `get_event` calls for the same event. diff --git a/tests/storage/databases/main/test_receipts.py b/tests/storage/databases/main/test_receipts.py index d084f5c2ba..2d63b52aca 100644 --- a/tests/storage/databases/main/test_receipts.py +++ b/tests/storage/databases/main/test_receipts.py @@ -19,7 +19,7 @@ # # -from typing import Any, Dict, Optional, Sequence, Tuple +from typing import Any, Optional, Sequence from twisted.internet.testing import MemoryReactor @@ -51,8 +51,8 @@ def _test_background_receipts_unique_index( update_name: str, index_name: str, table: str, - receipts: Dict[Tuple[str, str, str], Sequence[Dict[str, Any]]], - expected_unique_receipts: Dict[Tuple[str, str, str], Optional[Dict[str, Any]]], + receipts: dict[tuple[str, str, str], Sequence[dict[str, Any]]], + expected_unique_receipts: dict[tuple[str, str, str], Optional[dict[str, Any]]], ) -> None: """Test that the background update to uniqueify non-thread receipts in the given receipts table works properly. diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py index 5e773a5545..5602531880 100644 --- a/tests/storage/test__base.py +++ b/tests/storage/test__base.py @@ -20,7 +20,7 @@ # import secrets -from typing import Generator, List, Tuple, cast +from typing import Generator, cast from twisted.internet.testing import MemoryReactor @@ -52,9 +52,9 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: ) ) - def _dump_table_to_tuple(self) -> Generator[Tuple[int, str, str], None, None]: + def _dump_table_to_tuple(self) -> Generator[tuple[int, str, str], None, None]: yield from cast( - List[Tuple[int, str, str]], + list[tuple[int, str, str]], self.get_success( self.storage.db_pool.simple_select_list( self.table_name, None, ["id, username, value"] diff --git a/tests/storage/test_account_data.py b/tests/storage/test_account_data.py index 13c4be988e..d9307154da 100644 --- a/tests/storage/test_account_data.py +++ b/tests/storage/test_account_data.py @@ -19,7 +19,7 @@ # # -from typing import Iterable, Optional, Set +from typing import Iterable, Optional from twisted.internet.testing import MemoryReactor @@ -52,7 +52,7 @@ def _update_ignore_list( ) def assert_ignorers( - self, ignored_user_id: str, expected_ignorer_user_ids: Set[str] + self, ignored_user_id: str, expected_ignorer_user_ids: set[str] ) -> None: self.assertEqual( self.get_success(self.store.ignored_by(ignored_user_id)), @@ -60,7 +60,7 @@ def assert_ignorers( ) def assert_ignored( - self, ignorer_user_id: str, expected_ignored_user_ids: Set[str] + self, ignorer_user_id: str, expected_ignored_user_ids: set[str] ) -> None: self.assertEqual( self.get_success(self.store.ignored_users(ignorer_user_id)), diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index b4df92c7a1..4b9d069d6a 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -21,7 +21,7 @@ import json import os import tempfile -from typing import List, cast +from typing import cast from unittest.mock import AsyncMock, Mock import yaml @@ -48,7 +48,7 @@ class ApplicationServiceStoreTestCase(unittest.HomeserverTestCase): def setUp(self) -> None: super().setUp() - self.as_yaml_files: List[str] = [] + self.as_yaml_files: list[str] = [] self.hs.config.appservice.app_service_config_files = self.as_yaml_files self.hs.config.caches.event_cache_size = 1 @@ -123,7 +123,7 @@ def test_retrieval_of_all_services(self) -> None: class ApplicationServiceTransactionStoreTestCase(unittest.HomeserverTestCase): def setUp(self) -> None: super().setUp() - self.as_yaml_files: List[str] = [] + self.as_yaml_files: list[str] = [] self.hs.config.appservice.app_service_config_files = self.as_yaml_files self.hs.config.caches.event_cache_size = 1 @@ -180,7 +180,7 @@ def _set_state(self, id: str, state: ApplicationServiceState) -> defer.Deferred: ) def _insert_txn( - self, as_id: str, txn_id: int, events: List[Mock] + self, as_id: str, txn_id: int, events: list[Mock] ) -> "defer.Deferred[None]": return self.db_pool.runOperation( self.engine.convert_param_style( @@ -277,7 +277,7 @@ def test_create_appservice_txn_first( self, ) -> None: service = Mock(id=self.as_list[0]["id"]) - events = cast(List[EventBase], [Mock(event_id="e1"), Mock(event_id="e2")]) + events = cast(list[EventBase], [Mock(event_id="e1"), Mock(event_id="e2")]) txn = self.get_success( defer.ensureDeferred( self.store.create_appservice_txn( diff --git a/tests/storage/test_background_update.py b/tests/storage/test_background_update.py index cf63b50c2f..3505423691 100644 --- a/tests/storage/test_background_update.py +++ b/tests/storage/test_background_update.py @@ -19,7 +19,7 @@ # # import logging -from typing import List, Tuple, cast +from typing import cast from unittest.mock import AsyncMock, Mock import yaml @@ -535,7 +535,7 @@ def delta(txn: LoggingTransaction) -> None: # Check the correct values are in the new table. rows = cast( - List[Tuple[int, int]], + list[tuple[int, int]], self.get_success( self.store.db_pool.simple_select_list( table="test_constraint", @@ -652,7 +652,7 @@ def delta(txn: LoggingTransaction) -> None: # Check the correct values are in the new table. rows = cast( - List[Tuple[int, int]], + list[tuple[int, int]], self.get_success( self.store.db_pool.simple_select_list( table="test_constraint", diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index 1cd97a9dd7..2c1ba9d6c2 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -19,7 +19,7 @@ # # -from typing import Any, Dict, List, Optional, Tuple, cast +from typing import Any, Optional, cast from unittest.mock import AsyncMock from parameterized import parameterized @@ -104,7 +104,7 @@ def test_insert_new_client_ip_none_device_id(self) -> None: self.pump(0) result = cast( - List[Tuple[str, str, str, Optional[str], int]], + list[tuple[str, str, str, Optional[str], int]], self.get_success( self.store.db_pool.simple_select_list( table="user_ips", @@ -135,7 +135,7 @@ def test_insert_new_client_ip_none_device_id(self) -> None: self.pump(0) result = cast( - List[Tuple[str, str, str, Optional[str], int]], + list[tuple[str, str, str, Optional[str], int]], self.get_success( self.store.db_pool.simple_select_list( table="user_ips", @@ -184,7 +184,7 @@ def test_get_last_client_ip_by_device(self, after_persisting: bool) -> None: else: # Check that the new IP and user agent has not been stored yet db_result = cast( - List[Tuple[str, Optional[str], Optional[str], str, Optional[int]]], + list[tuple[str, Optional[str], Optional[str], str, Optional[int]]], self.get_success( self.store.db_pool.simple_select_list( table="devices", @@ -266,7 +266,7 @@ def test_get_last_client_ip_by_device_combined_data(self) -> None: # Check that the new IP and user agent has not been stored yet db_result = cast( - List[Tuple[str, Optional[str], Optional[str], str, Optional[int]]], + list[tuple[str, Optional[str], Optional[str], str, Optional[int]]], self.get_success( self.store.db_pool.simple_select_list( table="devices", @@ -381,7 +381,7 @@ def test_get_user_ip_and_agents_combined_data(self) -> None: # Check that the new IP and user agent has not been stored yet db_result = cast( - List[Tuple[str, str, str, int]], + list[tuple[str, str, str, int]], self.get_success( self.store.db_pool.simple_select_list( table="user_ips", @@ -589,7 +589,7 @@ def test_old_user_ips_pruned(self) -> None: # We should see that in the DB result = cast( - List[Tuple[str, str, str, Optional[str], int]], + list[tuple[str, str, str, Optional[str], int]], self.get_success( self.store.db_pool.simple_select_list( table="user_ips", @@ -616,7 +616,7 @@ def test_old_user_ips_pruned(self) -> None: # We should get no results. result = cast( - List[Tuple[str, str, str, Optional[str], int]], + list[tuple[str, str, str, Optional[str], int]], self.get_success( self.store.db_pool.simple_select_list( table="user_ips", @@ -695,7 +695,7 @@ def test_invalid_user_agents_are_ignored(self) -> None: # We should see that in the DB result = cast( - List[Tuple[str, str, str, Optional[str], int]], + list[tuple[str, str, str, Optional[str], int]], self.get_success( self.store.db_pool.simple_select_list( table="user_ips", @@ -745,9 +745,9 @@ def test_request_from_getPeer(self) -> None: def _runtest( self, - headers: Dict[bytes, bytes], + headers: dict[bytes, bytes], expected_ip: str, - make_request_args: Dict[str, Any], + make_request_args: dict[str, Any], ) -> None: device_id = "bleb" diff --git a/tests/storage/test_database.py b/tests/storage/test_database.py index fd6963bb82..ffcff3363f 100644 --- a/tests/storage/test_database.py +++ b/tests/storage/test_database.py @@ -19,7 +19,7 @@ # # -from typing import Callable, Tuple +from typing import Callable from unittest.mock import Mock, call from twisted.internet import defer @@ -149,7 +149,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: def _run_interaction( self, func: Callable[[LoggingTransaction], object] - ) -> Tuple[Mock, Mock]: + ) -> tuple[Mock, Mock]: """Run the given function in a database transaction, with callbacks registered. Args: diff --git a/tests/storage/test_devices.py b/tests/storage/test_devices.py index bd6fcd8eeb..1d1979e19f 100644 --- a/tests/storage/test_devices.py +++ b/tests/storage/test_devices.py @@ -19,7 +19,7 @@ # # -from typing import Collection, List, Tuple +from typing import Collection from twisted.internet.testing import MemoryReactor @@ -44,7 +44,7 @@ def default_config(self) -> JsonDict: config["federation_sender_instances"] = ["master"] return config - def add_device_change(self, user_id: str, device_ids: List[str], host: str) -> None: + def add_device_change(self, user_id: str, device_ids: list[str], host: str) -> None: """Add a device list change for the given device to `device_lists_outbound_pokes` table. """ @@ -306,7 +306,7 @@ def test_get_device_updates_by_remote_cross_signing_key_updates( def _check_devices_in_updates( self, expected_device_ids: Collection[str], - device_updates: List[Tuple[str, JsonDict]], + device_updates: list[tuple[str, JsonDict]], ) -> None: """Check that an specific device ids exist in a list of device update EDUs""" self.assertEqual(len(device_updates), len(expected_device_ids)) diff --git a/tests/storage/test_event_chain.py b/tests/storage/test_event_chain.py index fe9bb7bcca..175a5ffc78 100644 --- a/tests/storage/test_event_chain.py +++ b/tests/storage/test_event_chain.py @@ -19,7 +19,7 @@ # # -from typing import Dict, List, Set, Tuple, cast +from typing import cast from parameterized import parameterized @@ -420,7 +420,7 @@ def test_out_of_order_events(self) -> None: def persist( self, - events: List[EventBase], + events: list[EventBase], ) -> None: """Persist the given events and check that the links generated match those given. @@ -464,11 +464,11 @@ def _persist(txn: LoggingTransaction) -> None: ) def fetch_chains( - self, events: List[EventBase] - ) -> Tuple[Dict[str, Tuple[int, int]], _LinkMap]: + self, events: list[EventBase] + ) -> tuple[dict[str, tuple[int, int]], _LinkMap]: # Fetch the map from event ID -> (chain ID, sequence number) rows = cast( - List[Tuple[str, int, int]], + list[tuple[str, int, int]], self.get_success( self.store.db_pool.simple_select_many_batch( table="event_auth_chains", @@ -487,7 +487,7 @@ def fetch_chains( # Fetch all the links and pass them to the _LinkMap. auth_chain_rows = cast( - List[Tuple[int, int, int, int]], + list[tuple[int, int, int, int]], self.get_success( self.store.db_pool.simple_select_many_batch( table="event_auth_chain_links", @@ -575,7 +575,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.token = self.login("foo", "pass") self.requester = create_requester(self.user_id) - def _generate_room(self) -> Tuple[str, List[Set[str]]]: + def _generate_room(self) -> tuple[str, list[set[str]]]: """Insert a room without a chain cover index.""" room_id = self.helper.create_room_as(self.user_id, tok=self.token) diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py index ee9cf3687f..d8c6a1cd04 100644 --- a/tests/storage/test_event_federation.py +++ b/tests/storage/test_event_federation.py @@ -21,14 +21,9 @@ import datetime from typing import ( Collection, - Dict, - FrozenSet, Iterable, - List, Mapping, NamedTuple, - Set, - Tuple, TypeVar, Union, cast, @@ -74,7 +69,7 @@ # | | # K J -AUTH_GRAPH: Dict[str, List[str]] = { +AUTH_GRAPH: dict[str, list[str]] = { "a": ["e"], "b": ["e"], "c": ["g", "i"], @@ -108,7 +103,7 @@ def get_all_topologically_sorted_orders( nodes: Iterable[T], graph: Mapping[T, Collection[T]], -) -> List[List[T]]: +) -> list[list[T]]: """Given a set of nodes and a graph, return all possible topological orderings. """ @@ -117,7 +112,7 @@ def get_all_topologically_sorted_orders( # we have a choice over which node to consider next. degree_map = dict.fromkeys(nodes, 0) - reverse_graph: Dict[T, Set[T]] = {} + reverse_graph: dict[T, set[T]] = {} for node, edges in graph.items(): if node not in degree_map: @@ -138,10 +133,10 @@ def get_all_topologically_sorted_orders( def _get_all_topologically_sorted_orders_inner( - reverse_graph: Dict[T, Set[T]], - zero_degree: List[T], - degree_map: Dict[T, int], -) -> List[List[T]]: + reverse_graph: dict[T, set[T]], + zero_degree: list[T], + degree_map: dict[T, int], +) -> list[list[T]]: new_paths = [] # Rather than only choosing *one* item from the list of nodes with zero @@ -175,7 +170,7 @@ def _get_all_topologically_sorted_orders_inner( def get_all_topologically_consistent_subsets( nodes: Iterable[T], graph: Mapping[T, Collection[T]], -) -> Set[FrozenSet[T]]: +) -> set[frozenset[T]]: """Get all subsets of the graph where if node N is in the subgraph, then all nodes that can reach that node (i.e. for all X there exists a path X -> N) are in the subgraph. @@ -195,7 +190,7 @@ def get_all_topologically_consistent_subsets( @attr.s(auto_attribs=True, frozen=True, slots=True) class _BackfillSetupInfo: room_id: str - depth_map: Dict[str, int] + depth_map: dict[str, int] class EventFederationWorkerStoreTestCase(tests.unittest.HomeserverTestCase): @@ -573,7 +568,7 @@ def test_auth_difference_partial_cover(self) -> None: # | | # K J - auth_graph: Dict[str, List[str]] = { + auth_graph: dict[str, list[str]] = { "a": ["e"], "b": ["e"], "c": ["g", "i"], @@ -756,11 +751,11 @@ class TestNode(NamedTuple): seq_num: int class TestLink(NamedTuple): - origin_chain_and_seq: Tuple[int, int] - target_chain_and_seq: Tuple[int, int] + origin_chain_and_seq: tuple[int, int] + target_chain_and_seq: tuple[int, int] # Map to chain IDs / seq nums - nodes: List[TestNode] = [ + nodes: list[TestNode] = [ TestNode("A1", 1, 1), TestNode("A2", 1, 2), TestNode("A3", 1, 3), @@ -779,7 +774,7 @@ class TestLink(NamedTuple): TestNode("G1", 7, 1), TestNode("G2", 7, 2), ] - links: List[TestLink] = [ + links: list[TestLink] = [ TestLink((2, 1), (1, 2)), # B1 -> A2 TestLink((3, 1), (2, 2)), # C1 -> B2 TestLink((4, 1), (3, 1)), # D1 -> C1 @@ -818,9 +813,9 @@ class TestLink(NamedTuple): # Define the test cases class TestCase(NamedTuple): name: str - conflicted: Set[str] - additional_backwards_reachable: Set[str] - want_conflicted_subgraph: Set[str] + conflicted: set[str] + additional_backwards_reachable: set[str] + want_conflicted_subgraph: set[str] # Reminder: # A1 <- A2 <- A3 @@ -936,7 +931,7 @@ def test_prune_inbound_federation_queue(self, room_version: RoomVersion) -> None room_id = "some_room_id" - def prev_event_format(prev_event_id: str) -> Union[Tuple[str, dict], str]: + def prev_event_format(prev_event_id: str) -> Union[tuple[str, dict], str]: """Account for differences in prev_events format across room versions""" if room_version.event_format == EventFormatVersions.ROOM_V1_V2: return prev_event_id, {} @@ -1034,7 +1029,7 @@ def _setup_room_for_backfill_tests(self) -> _BackfillSetupInfo: # | # 5 (newest) - event_graph: Dict[str, List[str]] = { + event_graph: dict[str, list[str]] = { "1": [], "2": ["1"], "3": ["2", "A"], @@ -1050,7 +1045,7 @@ def _setup_room_for_backfill_tests(self) -> _BackfillSetupInfo: "b6": ["3"], } - depth_map: Dict[str, int] = { + depth_map: dict[str, int] = { "1": 1, "2": 2, "b1": 3, @@ -1070,7 +1065,7 @@ def _setup_room_for_backfill_tests(self) -> _BackfillSetupInfo: # The rest are events in the room but not backfilled tet. our_server_events = {"5", "4", "B", "3", "A"} - complete_event_dict_map: Dict[str, JsonDict] = {} + complete_event_dict_map: dict[str, JsonDict] = {} stream_ordering = 0 for event_id, prev_event_ids in event_graph.items(): depth = depth_map[event_id] @@ -1425,14 +1420,14 @@ def test_get_event_ids_to_not_pull_from_backoff_retry_after_backoff_duration( class FakeEvent: event_id: str room_id: str - auth_events: List[str] + auth_events: list[str] type = "foo" state_key = "foo" internal_metadata = EventInternalMetadata({}) - def auth_event_ids(self) -> List[str]: + def auth_event_ids(self) -> list[str]: return self.auth_events def is_state(self) -> bool: diff --git a/tests/storage/test_event_push_actions.py b/tests/storage/test_event_push_actions.py index 30ba1ad94a..ef6c0f2465 100644 --- a/tests/storage/test_event_push_actions.py +++ b/tests/storage/test_event_push_actions.py @@ -19,7 +19,7 @@ # # -from typing import Optional, Tuple +from typing import Optional from twisted.internet.testing import MemoryReactor @@ -47,7 +47,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: assert persist_events_store is not None self.persist_events_store = persist_events_store - def _create_users_and_room(self) -> Tuple[str, str, str, str, str]: + def _create_users_and_room(self) -> tuple[str, str, str, str, str]: """ Creates two users and a shared room. diff --git a/tests/storage/test_events.py b/tests/storage/test_events.py index 25a380e325..5c7f814078 100644 --- a/tests/storage/test_events.py +++ b/tests/storage/test_events.py @@ -20,7 +20,7 @@ # import logging -from typing import Dict, List, Optional +from typing import Optional from twisted.internet.testing import MemoryReactor @@ -54,7 +54,7 @@ def prepare( def test_get_senders_for_event_ids(self) -> None: """Tests the `get_senders_for_event_ids` storage function.""" - users_and_tokens: Dict[str, str] = {} + users_and_tokens: dict[str, str] = {} for localpart_suffix in range(10): localpart = f"user_{localpart_suffix}" user_id = self.register_user(localpart, "rabbit") @@ -70,7 +70,7 @@ def test_get_senders_for_event_ids(self) -> None: room_id = self.helper.create_room_as( room_creator_user_id, tok=room_creator_token ) - event_ids_to_senders: Dict[str, str] = {} + event_ids_to_senders: dict[str, str] = {} for user_id, token in users_and_tokens.items(): if user_id == room_creator_user_id: continue @@ -180,7 +180,7 @@ def persist_event( ) self.get_success(self._persistence.persist_event(event, context)) - def assert_extremities(self, expected_extremities: List[str]) -> None: + def assert_extremities(self, expected_extremities: list[str]) -> None: """Assert the current extremities for the room""" extremities = self.get_success( self.store.get_prev_events_for_room(self.room_id) diff --git a/tests/storage/test_events_bg_updates.py b/tests/storage/test_events_bg_updates.py index a1375aa4ac..d1a794c5a1 100644 --- a/tests/storage/test_events_bg_updates.py +++ b/tests/storage/test_events_bg_updates.py @@ -13,7 +13,6 @@ # # -from typing import Dict from twisted.internet.testing import MemoryReactor @@ -48,7 +47,7 @@ def prepare( ) ) - def create_room(self, room_version: RoomVersion) -> Dict[str, int]: + def create_room(self, room_version: RoomVersion) -> dict[str, int]: """Create a room with a known room version and insert events. Returns the set of event IDs that exceed MAX_DEPTH and @@ -67,7 +66,7 @@ def create_room(self, room_version: RoomVersion) -> Dict[str, int]: ) # Insert events with some depths exceeding MAX_DEPTH - event_id_to_depth: Dict[str, int] = {} + event_id_to_depth: dict[str, int] = {} for depth in range(MAX_DEPTH - 5, MAX_DEPTH + 5): event_id = f"$event{depth}:example.com" event_id_to_depth[event_id] = depth diff --git a/tests/storage/test_id_generators.py b/tests/storage/test_id_generators.py index 4c1311a00e..4846e8cac3 100644 --- a/tests/storage/test_id_generators.py +++ b/tests/storage/test_id_generators.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict, List, Optional +from typing import Optional from twisted.internet.testing import MemoryReactor @@ -43,12 +43,12 @@ class MultiWriterIdGeneratorBase(HomeserverTestCase): positive: bool = True - tables: List[str] = ["foobar"] + tables: list[str] = ["foobar"] def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.store = hs.get_datastores().main self.db_pool: DatabasePool = self.store.db_pool - self.instances: Dict[str, MultiWriterIdGenerator] = {} + self.instances: dict[str, MultiWriterIdGenerator] = {} self.get_success(self.db_pool.runInteraction("_setup_db", self._setup_db)) @@ -76,7 +76,7 @@ def _setup_db(self, txn: LoggingTransaction) -> None: def _create_id_generator( self, instance_name: str = "master", - writers: Optional[List[str]] = None, + writers: Optional[list[str]] = None, ) -> MultiWriterIdGenerator: def _create(conn: LoggingDatabaseConnection) -> MultiWriterIdGenerator: return MultiWriterIdGenerator( diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 9a3b44219d..9ea2fa5311 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -17,7 +17,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict, List +from typing import Any from unittest.mock import AsyncMock from twisted.internet.testing import MemoryReactor @@ -32,7 +32,7 @@ FORTY_DAYS = 40 * 24 * 60 * 60 -def gen_3pids(count: int) -> List[Dict[str, Any]]: +def gen_3pids(count: int) -> list[dict[str, Any]]: """Generate `count` threepids as a list.""" return [ {"medium": "email", "address": "user%i@matrix.org" % i} for i in range(count) @@ -40,7 +40,7 @@ def gen_3pids(count: int) -> List[Dict[str, Any]]: class MonthlyActiveUsersTestCase(unittest.HomeserverTestCase): - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = default_config("test") config.update({"limit_usage_by_mau": True, "max_mau_value": 50}) diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py index 7565376a59..2c188b8046 100644 --- a/tests/storage/test_redaction.py +++ b/tests/storage/test_redaction.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional, cast +from typing import Optional, cast from canonicaljson import json @@ -247,8 +247,8 @@ def __init__(self, base_builder: EventBuilder, event_id: str): async def build( self, - prev_event_ids: List[str], - auth_event_ids: Optional[List[str]], + prev_event_ids: list[str], + auth_event_ids: Optional[list[str]], depth: Optional[int] = None, ) -> EventBase: built_event = await self._base_builder.build( diff --git a/tests/storage/test_rollback_worker.py b/tests/storage/test_rollback_worker.py index f61eb2e319..125c4499b0 100644 --- a/tests/storage/test_rollback_worker.py +++ b/tests/storage/test_rollback_worker.py @@ -18,7 +18,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List from unittest import mock from twisted.internet.testing import MemoryReactor @@ -34,7 +33,7 @@ from tests.unittest import HomeserverTestCase -def fake_listdir(filepath: str) -> List[str]: +def fake_listdir(filepath: str) -> list[str]: """ A fake implementation of os.listdir which we can use to mock out the filesystem. diff --git a/tests/storage/test_room_search.py b/tests/storage/test_room_search.py index e530e59fa6..2c0ef19e9e 100644 --- a/tests/storage/test_room_search.py +++ b/tests/storage/test_room_search.py @@ -19,7 +19,6 @@ # # -from typing import List, Tuple from unittest.case import SkipTest from twisted.internet.testing import MemoryReactor @@ -317,7 +316,7 @@ def test_tokenize_query(self) -> None: ) def _check_test_cases( - self, store: DataStore, cases: List[Tuple[str, bool]] + self, store: DataStore, cases: list[tuple[str, bool]] ) -> None: # Run all the test cases versus search_msgs for query, expect_to_contain in cases: diff --git a/tests/storage/test_roommember.py b/tests/storage/test_roommember.py index b8933d957b..c5487d81e6 100644 --- a/tests/storage/test_roommember.py +++ b/tests/storage/test_roommember.py @@ -20,7 +20,7 @@ # # import logging -from typing import List, Optional, Tuple, cast +from typing import Optional, cast from twisted.internet.testing import MemoryReactor @@ -133,7 +133,7 @@ def test__null_byte_in_display_name_properly_handled(self) -> None: room = self.helper.create_room_as(self.u_alice, tok=self.t_alice) res = cast( - List[Tuple[Optional[str], str]], + list[tuple[Optional[str], str]], self.get_success( self.store.db_pool.simple_select_list( "room_memberships", @@ -165,7 +165,7 @@ def test__null_byte_in_display_name_properly_handled(self) -> None: ) res2 = cast( - List[Tuple[Optional[str], str]], + list[tuple[Optional[str], str]], self.get_success( self.store.db_pool.simple_select_list( "room_memberships", @@ -408,7 +408,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: def _assert_member_summary( self, actual_member_summary: MemberSummary, - expected_member_list: List[str], + expected_member_list: list[str], *, expected_member_count: Optional[int] = None, ) -> None: diff --git a/tests/storage/test_sliding_sync_tables.py b/tests/storage/test_sliding_sync_tables.py index f0df166bab..5cfc1a9c29 100644 --- a/tests/storage/test_sliding_sync_tables.py +++ b/tests/storage/test_sliding_sync_tables.py @@ -18,7 +18,7 @@ # # import logging -from typing import Dict, List, Optional, Tuple, cast +from typing import Optional, cast import attr from parameterized import parameterized @@ -112,7 +112,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.state_handler = self.hs.get_state_handler() - def _get_sliding_sync_joined_rooms(self) -> Dict[str, _SlidingSyncJoinedRoomResult]: + def _get_sliding_sync_joined_rooms(self) -> dict[str, _SlidingSyncJoinedRoomResult]: """ Return the rows from the `sliding_sync_joined_rooms` table. @@ -120,7 +120,7 @@ def _get_sliding_sync_joined_rooms(self) -> Dict[str, _SlidingSyncJoinedRoomResu Mapping from room_id to _SlidingSyncJoinedRoomResult. """ rows = cast( - List[Tuple[str, int, int, str, str, bool, str]], + list[tuple[str, int, int, str, str, bool, str]], self.get_success( self.store.db_pool.simple_select_list( "sliding_sync_joined_rooms", @@ -153,7 +153,7 @@ def _get_sliding_sync_joined_rooms(self) -> Dict[str, _SlidingSyncJoinedRoomResu def _get_sliding_sync_membership_snapshots( self, - ) -> Dict[Tuple[str, str], _SlidingSyncMembershipSnapshotResult]: + ) -> dict[tuple[str, str], _SlidingSyncMembershipSnapshotResult]: """ Return the rows from the `sliding_sync_membership_snapshots` table. @@ -161,7 +161,7 @@ def _get_sliding_sync_membership_snapshots( Mapping from the (room_id, user_id) to _SlidingSyncMembershipSnapshotResult. """ rows = cast( - List[Tuple[str, str, str, str, str, int, int, bool, str, str, bool, str]], + list[tuple[str, str, str, str, str, int, int, bool, str, str, bool, str]], self.get_success( self.store.db_pool.simple_select_list( "sliding_sync_membership_snapshots", @@ -207,8 +207,8 @@ def _get_sliding_sync_membership_snapshots( def _create_remote_invite_room_for_user( self, invitee_user_id: str, - unsigned_invite_room_state: Optional[List[StrippedStateEvent]], - ) -> Tuple[str, EventBase]: + unsigned_invite_room_state: Optional[list[StrippedStateEvent]], + ) -> tuple[str, EventBase]: """ Create a fake invite for a remote room and persist it. @@ -2246,7 +2246,7 @@ def test_non_join_server_left_room(self) -> None: ] ) def test_non_join_remote_invite_no_stripped_state( - self, _description: str, stripped_state: Optional[List[StrippedStateEvent]] + self, _description: str, stripped_state: Optional[list[StrippedStateEvent]] ) -> None: """ Test remote invite with no stripped state provided shows up in diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index bf6da71549..8e821c6d18 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -20,7 +20,7 @@ # import logging -from typing import List, Tuple, cast +from typing import cast from immutabledict import immutabledict @@ -593,7 +593,7 @@ def test_batched_state_group_storing(self) -> None: # check that only state events are in state_groups, and all state events are in state_groups res = cast( - List[Tuple[str]], + list[tuple[str]], self.get_success( self.store.db_pool.simple_select_list( table="state_groups", @@ -618,7 +618,7 @@ def test_batched_state_group_storing(self) -> None: for event, context in processed_events_and_context: if event.is_state(): state = cast( - List[Tuple[str, str]], + list[tuple[str, str]], self.get_success( self.store.db_pool.simple_select_list( table="state_groups_state", @@ -631,7 +631,7 @@ def test_batched_state_group_storing(self) -> None: self.assertEqual(event.state_key, state[0][1]) groups = cast( - List[Tuple[str]], + list[tuple[str]], self.get_success( self.store.db_pool.simple_select_list( table="state_group_edges", diff --git a/tests/storage/test_stream.py b/tests/storage/test_stream.py index 0777c254c0..d51fa1f8ba 100644 --- a/tests/storage/test_stream.py +++ b/tests/storage/test_stream.py @@ -20,7 +20,6 @@ # import logging -from typing import List, Tuple from unittest.mock import AsyncMock, patch from immutabledict import immutabledict @@ -150,7 +149,7 @@ def prepare( ) self.event_id_none = res["event_id"] - def _filter_messages(self, filter: JsonDict) -> List[str]: + def _filter_messages(self, filter: JsonDict) -> list[str]: """Make a request to /messages with a filter, returns the chunk of events.""" events, next_key, _ = self.get_success( @@ -324,7 +323,7 @@ def _update_persisted_instance_name_for_event( def _send_event_on_instance( self, instance_name: str, room_id: str, access_token: str - ) -> Tuple[JsonDict, PersistedEventPosition]: + ) -> tuple[JsonDict, PersistedEventPosition]: """ Send an event in a room and mimic that it was persisted by a specific instance/worker. diff --git a/tests/storage/test_user_directory.py b/tests/storage/test_user_directory.py index 26e045135e..83d3357c65 100644 --- a/tests/storage/test_user_directory.py +++ b/tests/storage/test_user_directory.py @@ -19,7 +19,7 @@ # # import re -from typing import Any, Dict, List, Optional, Set, Tuple, cast +from typing import Any, Optional, cast from unittest import mock from unittest.mock import Mock, patch @@ -56,21 +56,21 @@ class GetUserDirectoryTables: def __init__(self, store: DataStore): self.store = store - async def get_users_in_public_rooms(self) -> Set[Tuple[str, str]]: + async def get_users_in_public_rooms(self) -> set[tuple[str, str]]: """Fetch the entire `users_in_public_rooms` table. Returns a list of tuples (user_id, room_id) where room_id is public and contains the user with the given id. """ r = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.store.db_pool.simple_select_list( "users_in_public_rooms", None, ("user_id", "room_id") ), ) return set(r) - async def get_users_who_share_private_rooms(self) -> Set[Tuple[str, str, str]]: + async def get_users_who_share_private_rooms(self) -> set[tuple[str, str, str]]: """Fetch the entire `users_who_share_private_rooms` table. Returns a set of tuples (user_id, other_user_id, room_id) corresponding @@ -78,7 +78,7 @@ async def get_users_who_share_private_rooms(self) -> Set[Tuple[str, str, str]]: """ rows = cast( - List[Tuple[str, str, str]], + list[tuple[str, str, str]], await self.store.db_pool.simple_select_list( "users_who_share_private_rooms", None, @@ -87,13 +87,13 @@ async def get_users_who_share_private_rooms(self) -> Set[Tuple[str, str, str]]: ) return set(rows) - async def get_users_in_user_directory(self) -> Set[str]: + async def get_users_in_user_directory(self) -> set[str]: """Fetch the set of users in the `user_directory` table. This is useful when checking we've correctly excluded users from the directory. """ result = cast( - List[Tuple[str]], + list[tuple[str]], await self.store.db_pool.simple_select_list( "user_directory", None, @@ -102,7 +102,7 @@ async def get_users_in_user_directory(self) -> Set[str]: ) return {row[0] for row in result} - async def get_profiles_in_user_directory(self) -> Dict[str, ProfileInfo]: + async def get_profiles_in_user_directory(self) -> dict[str, ProfileInfo]: """Fetch users and their profiles from the `user_directory` table. This is useful when we want to inspect display names and avatars. @@ -110,7 +110,7 @@ async def get_profiles_in_user_directory(self) -> Dict[str, ProfileInfo]: thing missing is an unused room_id column. """ rows = cast( - List[Tuple[str, Optional[str], Optional[str]]], + list[tuple[str, Optional[str], Optional[str]]], await self.store.db_pool.simple_select_list( "user_directory", None, @@ -124,7 +124,7 @@ async def get_profiles_in_user_directory(self) -> Dict[str, ProfileInfo]: async def get_tables( self, - ) -> Tuple[Set[str], Set[Tuple[str, str]], Set[Tuple[str, str, str]]]: + ) -> tuple[set[str], set[tuple[str, str]], set[tuple[str, str, str]]]: """Multiple tests want to inspect these tables, so expose them together.""" return ( await self.get_users_in_user_directory(), @@ -277,7 +277,7 @@ def test_initial(self) -> None: def _create_rooms_and_inject_memberships( self, creator: str, token: str, joiner: str - ) -> Tuple[str, str]: + ) -> tuple[str, str]: """Create a public and private room as a normal user. Then get the `joiner` into those rooms. """ diff --git a/tests/storage/util/test_partial_state_events_tracker.py b/tests/storage/util/test_partial_state_events_tracker.py index 1e5663f137..026bc58180 100644 --- a/tests/storage/util/test_partial_state_events_tracker.py +++ b/tests/storage/util/test_partial_state_events_tracker.py @@ -19,7 +19,7 @@ # # -from typing import Collection, Dict +from typing import Collection from unittest import mock from twisted.internet.defer import CancelledError, ensureDeferred @@ -35,9 +35,9 @@ class PartialStateEventsTrackerTestCase(TestCase): def setUp(self) -> None: # the results to be returned by the mocked get_partial_state_events - self._events_dict: Dict[str, bool] = {} + self._events_dict: dict[str, bool] = {} - async def get_partial_state_events(events: Collection[str]) -> Dict[str, bool]: + async def get_partial_state_events(events: Collection[str]) -> dict[str, bool]: return {e: self._events_dict[e] for e in events} self.mock_store = mock.Mock(spec_set=["get_partial_state_events"]) @@ -73,7 +73,7 @@ def test_un_partial_state_race(self) -> None: # registration of the listener, it should not block. self._events_dict = {"event1": True, "event2": False} - async def get_partial_state_events(events: Collection[str]) -> Dict[str, bool]: + async def get_partial_state_events(events: Collection[str]) -> dict[str, bool]: res = {e: self._events_dict[e] for e in events} # change the result for next time self._events_dict = {"event1": False, "event2": False} @@ -91,13 +91,13 @@ def test_un_partial_state_during_get_partial_state_events(self) -> None: self._events_dict = {"event1": True, "event2": False} - async def get_partial_state_events1(events: Collection[str]) -> Dict[str, bool]: + async def get_partial_state_events1(events: Collection[str]) -> dict[str, bool]: self.mock_store.get_partial_state_events.side_effect = ( get_partial_state_events2 ) return {e: self._events_dict[e] for e in events} - async def get_partial_state_events2(events: Collection[str]) -> Dict[str, bool]: + async def get_partial_state_events2(events: Collection[str]) -> dict[str, bool]: self.tracker.notify_un_partial_stated("event1") self._events_dict["event1"] = False return {e: self._events_dict[e] for e in events} diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py index f12402f5f2..7737101967 100644 --- a/tests/test_event_auth.py +++ b/tests/test_event_auth.py @@ -20,7 +20,7 @@ # import unittest -from typing import Any, Collection, Dict, Iterable, List, Optional +from typing import Any, Collection, Iterable, Optional from parameterized import parameterized @@ -39,7 +39,7 @@ class _StubEventSourceStore: """A stub implementation of the EventSourceStore""" def __init__(self) -> None: - self._store: Dict[str, EventBase] = {} + self._store: dict[str, EventBase] = {} def add_event(self, event: EventBase) -> None: self._store[event.event_id] = event @@ -54,7 +54,7 @@ async def get_events( redact_behaviour: EventRedactBehaviour, get_prev_content: bool = False, allow_rejected: bool = False, - ) -> Dict[str, EventBase]: + ) -> dict[str, EventBase]: assert allow_rejected assert not get_prev_content assert redact_behaviour == EventRedactBehaviour.as_is @@ -745,7 +745,7 @@ def test_room_v10_rejects_other_non_integer_power_levels(self) -> None: test_room_v10_rejects_string_power_levels above handles the string case. """ - def create_event(pl_event_content: Dict[str, Any]) -> EventBase: + def create_event(pl_event_content: dict[str, Any]) -> EventBase: return make_event_from_dict( { "room_id": TEST_ROOM_ID, @@ -759,7 +759,7 @@ def create_event(pl_event_content: Dict[str, Any]) -> EventBase: room_version=RoomVersions.V10, ) - contents: Iterable[Dict[str, Any]] = [ + contents: Iterable[dict[str, Any]] = [ {"notifications": {"room": None}}, {"users": {"@alice:wonderland": []}}, {"users_default": {}}, @@ -861,7 +861,7 @@ def _alias_event(room_version: RoomVersion, sender: str, **kwargs: Any) -> Event def _build_auth_dict_for_room_version( room_version: RoomVersion, auth_events: Iterable[EventBase] -) -> List: +) -> list: if room_version.event_format == EventFormatVersions.ROOM_V1_V2: return [(e.event_id, "not_used") for e in auth_events] else: diff --git a/tests/test_mau.py b/tests/test_mau.py index fa98242bf7..e535e7dc2e 100644 --- a/tests/test_mau.py +++ b/tests/test_mau.py @@ -20,7 +20,7 @@ """Tests REST events for /rooms paths.""" -from typing import List, Optional +from typing import Optional from twisted.internet.testing import MemoryReactor @@ -249,7 +249,7 @@ def test_tracked_but_not_limited(self) -> None: } ) def test_as_trial_days(self) -> None: - user_tokens: List[str] = [] + user_tokens: list[str] = [] def advance_time_and_sync() -> None: self.reactor.advance(24 * 60 * 61) diff --git a/tests/test_server.py b/tests/test_server.py index 1854a3c4d4..e7d3febe3f 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -20,7 +20,7 @@ import re from http import HTTPStatus -from typing import Awaitable, Callable, Dict, NoReturn, Optional, Tuple +from typing import Awaitable, Callable, NoReturn, Optional from twisted.internet.defer import Deferred from twisted.web.resource import Resource @@ -70,7 +70,7 @@ def test_handler_for_request(self) -> None: def _callback( request: SynapseRequest, **kwargs: object - ) -> Tuple[int, Dict[str, object]]: + ) -> tuple[int, dict[str, object]]: got_kwargs.update(kwargs) return 200, kwargs @@ -192,7 +192,7 @@ def test_head_request(self) -> None: def _callback( request: SynapseRequest, **kwargs: object - ) -> Tuple[int, Dict[str, object]]: + ) -> tuple[int, dict[str, object]]: return 200, {"result": True} res = JsonResource(self.homeserver) @@ -405,11 +405,11 @@ def __init__(self, clock: Clock): self.clock = clock @cancellable - async def _async_render_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def _async_render_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} - async def _async_render_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def _async_render_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} @@ -422,11 +422,11 @@ def __init__(self, clock: Clock): self.clock = clock @cancellable - async def _async_render_GET(self, request: SynapseRequest) -> Tuple[int, bytes]: + async def _async_render_GET(self, request: SynapseRequest) -> tuple[int, bytes]: await self.clock.sleep(1.0) return HTTPStatus.OK, b"ok" - async def _async_render_POST(self, request: SynapseRequest) -> Tuple[int, bytes]: + async def _async_render_POST(self, request: SynapseRequest) -> tuple[int, bytes]: await self.clock.sleep(1.0) return HTTPStatus.OK, b"ok" diff --git a/tests/test_state.py b/tests/test_state.py index ab7b52e90c..6e5a6d845d 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -21,14 +21,10 @@ from typing import ( Any, Collection, - Dict, Generator, Iterable, Iterator, - List, Optional, - Set, - Tuple, ) from unittest.mock import AsyncMock, Mock @@ -57,7 +53,7 @@ def create_event( state_key: Optional[str] = None, depth: int = 2, event_id: Optional[str] = None, - prev_events: Optional[List[Tuple[str, dict]]] = None, + prev_events: Optional[list[tuple[str, dict]]] = None, **kwargs: Any, ) -> EventBase: global _next_event_id @@ -91,16 +87,16 @@ def create_event( class _DummyStore: def __init__(self) -> None: - self._event_to_state_group: Dict[str, int] = {} - self._group_to_state: Dict[int, MutableStateMap[str]] = {} + self._event_to_state_group: dict[str, int] = {} + self._group_to_state: dict[int, MutableStateMap[str]] = {} - self._event_id_to_event: Dict[str, EventBase] = {} + self._event_id_to_event: dict[str, EventBase] = {} self._next_group = 1 async def get_state_groups_ids( self, room_id: str, event_ids: Collection[str] - ) -> Dict[int, MutableStateMap[str]]: + ) -> dict[int, MutableStateMap[str]]: groups = {} for event_id in event_ids: group = self._event_to_state_group.get(event_id) @@ -137,7 +133,7 @@ async def store_state_group( async def get_events( self, event_ids: Collection[str], **kwargs: Any - ) -> Dict[str, EventBase]: + ) -> dict[str, EventBase]: return { e_id: self._event_id_to_event[e_id] for e_id in event_ids @@ -146,12 +142,12 @@ async def get_events( async def get_partial_state_events( self, event_ids: Collection[str] - ) -> Dict[str, bool]: + ) -> dict[str, bool]: return dict.fromkeys(event_ids, False) async def get_state_group_delta( self, name: str - ) -> Tuple[Optional[int], Optional[StateMap[str]]]: + ) -> tuple[Optional[int], Optional[StateMap[str]]]: return None, None def register_events(self, events: Iterable[EventBase]) -> None: @@ -170,7 +166,7 @@ async def get_room_version_id(self, room_id: str) -> str: async def get_state_group_for_events( self, event_ids: Collection[str], await_full_state: bool = True - ) -> Dict[str, int]: + ) -> dict[str, int]: res = {} for event in event_ids: res[event] = self._event_to_state_group[event] @@ -178,7 +174,7 @@ async def get_state_group_for_events( async def get_state_for_groups( self, groups: Collection[int] - ) -> Dict[int, MutableStateMap[str]]: + ) -> dict[int, MutableStateMap[str]]: res = {} for group in groups: state = self._group_to_state[group] @@ -193,15 +189,15 @@ def __init__(self, **kwargs: Any) -> None: class Graph: - def __init__(self, nodes: Dict[str, DictObj], edges: Dict[str, List[str]]): - events: Dict[str, EventBase] = {} - clobbered: Set[str] = set() + def __init__(self, nodes: dict[str, DictObj], edges: dict[str, list[str]]): + events: dict[str, EventBase] = {} + clobbered: set[str] = set() for event_id, fields in nodes.items(): refs = edges.get(event_id) if refs: clobbered.difference_update(refs) - prev_events: List[Tuple[str, dict]] = [(r, {}) for r in refs] + prev_events: list[tuple[str, dict]] = [(r, {}) for r in refs] else: prev_events = [] @@ -281,7 +277,7 @@ def test_branch_no_conflict(self) -> Generator[defer.Deferred, Any, None]: self.dummy_store.register_events(graph.walk()) - context_store: Dict[str, EventContext] = {} + context_store: dict[str, EventContext] = {} for event in graph.walk(): context = yield defer.ensureDeferred( @@ -328,7 +324,7 @@ def test_branch_basic_conflict( self.dummy_store.register_events(graph.walk()) - context_store: Dict[str, EventContext] = {} + context_store: dict[str, EventContext] = {} for event in graph.walk(): context = yield defer.ensureDeferred( @@ -389,7 +385,7 @@ def test_branch_have_banned_conflict( self.dummy_store.register_events(graph.walk()) - context_store: Dict[str, EventContext] = {} + context_store: dict[str, EventContext] = {} for event in graph.walk(): context = yield defer.ensureDeferred( @@ -467,7 +463,7 @@ def test_branch_have_perms_conflict( self.dummy_store.register_events(graph.walk()) - context_store: Dict[str, EventContext] = {} + context_store: dict[str, EventContext] = {} for event in graph.walk(): context = yield defer.ensureDeferred( @@ -490,7 +486,7 @@ def test_branch_have_perms_conflict( self.assertEqual(ctx_d.state_group_before_event, ctx_d.state_group) def _add_depths( - self, nodes: Dict[str, DictObj], edges: Dict[str, List[str]] + self, nodes: dict[str, DictObj], edges: dict[str, list[str]] ) -> None: def _get_depth(ev: str) -> int: node = nodes[ev] diff --git a/tests/test_types.py b/tests/test_types.py index 0c08bc8ecc..1802f0fae3 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -19,7 +19,6 @@ # # -from typing import Type from unittest import skipUnless from immutabledict import immutabledict @@ -152,7 +151,7 @@ def test_non_ascii(self) -> None: class MultiWriterTokenTestCase(unittest.HomeserverTestCase): """Tests for the different types of multi writer tokens.""" - token_type: Type[AbstractMultiWriterStreamToken] + token_type: type[AbstractMultiWriterStreamToken] def test_basic_token(self) -> None: """Test that a simple stream token can be serialized and unserialized""" diff --git a/tests/test_utils/__init__.py b/tests/test_utils/__init__.py index 3e6fd03600..0df5a4e6c3 100644 --- a/tests/test_utils/__init__.py +++ b/tests/test_utils/__init__.py @@ -28,7 +28,7 @@ import sys import warnings from binascii import unhexlify -from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, TypeVar +from typing import TYPE_CHECKING, Awaitable, Callable, TypeVar import attr import zope.interface @@ -102,7 +102,7 @@ class FakeResponse: # type: ignore[misc] attribute, and didn't support deliverBody until recently. """ - version: Tuple[bytes, int, int] = (b"HTTP", 1, 1) + version: tuple[bytes, int, int] = (b"HTTP", 1, 1) # HTTP response code code: int = 200 diff --git a/tests/test_utils/event_injection.py b/tests/test_utils/event_injection.py index c1eaf9a575..9cdb456b1b 100644 --- a/tests/test_utils/event_injection.py +++ b/tests/test_utils/event_injection.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, List, Optional, Tuple +from typing import Any, Optional import synapse.server from synapse.api.constants import EventTypes @@ -62,7 +62,7 @@ async def inject_member_event( async def inject_event( hs: synapse.server.HomeServer, room_version: Optional[str] = None, - prev_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, **kwargs: Any, ) -> EventBase: """Inject a generic event into a room @@ -87,9 +87,9 @@ async def inject_event( async def create_event( hs: synapse.server.HomeServer, room_version: Optional[str] = None, - prev_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, **kwargs: Any, -) -> Tuple[EventBase, EventContext]: +) -> tuple[EventBase, EventContext]: if room_version is None: room_version = await hs.get_datastores().main.get_room_version_id( kwargs["room_id"] diff --git a/tests/test_utils/html_parsers.py b/tests/test_utils/html_parsers.py index a0f39cb130..aff1626295 100644 --- a/tests/test_utils/html_parsers.py +++ b/tests/test_utils/html_parsers.py @@ -20,7 +20,7 @@ # from html.parser import HTMLParser -from typing import Dict, Iterable, List, NoReturn, Optional, Tuple +from typing import Iterable, NoReturn, Optional class TestHtmlParser(HTMLParser): @@ -30,16 +30,16 @@ def __init__(self) -> None: super().__init__() # a list of links found in the doc - self.links: List[str] = [] + self.links: list[str] = [] # the values of any hidden s: map from name to value - self.hiddens: Dict[str, Optional[str]] = {} + self.hiddens: dict[str, Optional[str]] = {} # the values of any radio buttons: map from name to list of values - self.radios: Dict[str, List[Optional[str]]] = {} + self.radios: dict[str, list[Optional[str]]] = {} def handle_starttag( - self, tag: str, attrs: Iterable[Tuple[str, Optional[str]]] + self, tag: str, attrs: Iterable[tuple[str, Optional[str]]] ) -> None: attr_dict = dict(attrs) if tag == "a": diff --git a/tests/test_utils/oidc.py b/tests/test_utils/oidc.py index f2de8bded5..c2d6af029a 100644 --- a/tests/test_utils/oidc.py +++ b/tests/test_utils/oidc.py @@ -23,7 +23,7 @@ import base64 import json from hashlib import sha256 -from typing import Any, ContextManager, Dict, List, Optional, Tuple +from typing import Any, ContextManager, Optional from unittest.mock import Mock, patch from urllib.parse import parse_qs @@ -75,16 +75,16 @@ def __init__(self, clock: Clock, issuer: str): self.post_token_handler = Mock(side_effect=self._post_token_handler) # A code -> grant mapping - self._authorization_grants: Dict[str, FakeAuthorizationGrant] = {} + self._authorization_grants: dict[str, FakeAuthorizationGrant] = {} # An access token -> grant mapping - self._sessions: Dict[str, FakeAuthorizationGrant] = {} + self._sessions: dict[str, FakeAuthorizationGrant] = {} # We generate here an ECDSA key with the P-256 curve (ES256 algorithm) used for # signing JWTs. ECDSA keys are really quick to generate compared to RSA. self._key = ECKey.generate_key(crv="P-256", is_private=True) self._jwks = KeySet([ECKey.import_key(self._key.as_pem(is_private=False))]) - self._id_token_overrides: Dict[str, Any] = {} + self._id_token_overrides: dict[str, Any] = {} def reset_mocks(self) -> None: self.request.reset_mock() @@ -222,7 +222,7 @@ def start_authorization( userinfo: dict, nonce: Optional[str] = None, with_sid: bool = False, - ) -> Tuple[str, FakeAuthorizationGrant]: + ) -> tuple[str, FakeAuthorizationGrant]: """Start an authorization request, and get back the code to use on the authorization endpoint.""" code = random_string(10) sid = None @@ -242,7 +242,7 @@ def start_authorization( return code, grant - def exchange_code(self, code: str) -> Optional[Dict[str, Any]]: + def exchange_code(self, code: str) -> Optional[dict[str, Any]]: grant = self._authorization_grants.pop(code, None) if grant is None: return None @@ -269,7 +269,7 @@ def buggy_endpoint( metadata: bool = False, token: bool = False, userinfo: bool = False, - ) -> ContextManager[Dict[str, Mock]]: + ) -> ContextManager[dict[str, Mock]]: """A context which makes a set of endpoints return a 500 error. Args: @@ -356,7 +356,7 @@ def _get_userinfo_handler(self, access_token: Optional[str]) -> IResponse: return FakeResponse.json(payload=user_info) - def _post_token_handler(self, params: Dict[str, List[str]]) -> IResponse: + def _post_token_handler(self, params: dict[str, list[str]]) -> IResponse: """Handles requests to the token endpoint.""" code = params.get("code", []) diff --git a/tests/unittest.py b/tests/unittest.py index 9ab052e7c0..1007f40456 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -33,16 +33,12 @@ Awaitable, Callable, ClassVar, - Dict, Generic, Iterable, - List, Mapping, NoReturn, Optional, Protocol, - Tuple, - Type, TypeVar, Union, ) @@ -169,7 +165,7 @@ def _parse_config_dict(config: str) -> HomeServerConfig: return config_obj -def make_homeserver_config_obj(config: Dict[str, Any]) -> HomeServerConfig: +def make_homeserver_config_obj(config: dict[str, Any]) -> HomeServerConfig: """Creates a :class:`HomeServerConfig` instance with the given configuration dict. This is equivalent to:: @@ -250,7 +246,7 @@ def tearDown(orig: Callable[[], R]) -> R: return ret - def assertObjectHasAttributes(self, attrs: Dict[str, object], obj: object) -> None: + def assertObjectHasAttributes(self, attrs: dict[str, object], obj: object) -> None: """Asserts that the given object has each of the attributes given, and that the value of each matches according to assertEqual.""" for key in attrs.keys(): @@ -299,14 +295,14 @@ def assertIncludes( elif not exact and actual_items >= expected_items: return - expected_lines: List[str] = [] + expected_lines: list[str] = [] for expected_item in expected_items: is_expected_in_actual = expected_item in actual_items expected_lines.append( "{} {}".format(" " if is_expected_in_actual else "?", expected_item) ) - actual_lines: List[str] = [] + actual_lines: list[str] = [] for actual_item in actual_items: is_actual_in_expected = actual_item in expected_items actual_lines.append( @@ -379,7 +375,7 @@ class HomeserverTestCase(TestCase): hijack_auth: ClassVar[bool] = True needs_threadpool: ClassVar[bool] = False - servlets: ClassVar[List[RegisterServletsFunc]] = [] + servlets: ClassVar[list[RegisterServletsFunc]] = [] def __init__(self, methodName: str): super().__init__(methodName) @@ -527,7 +523,7 @@ def create_test_resource(self) -> Resource: create_resource_tree(self.create_resource_dict(), root_resource) return root_resource - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: """Create a resource tree for the test server A resource tree is a mapping from path to twisted.web.resource. @@ -578,7 +574,7 @@ def make_request( path: Union[bytes, str], content: Union[bytes, str, JsonDict] = b"", access_token: Optional[str] = None, - request: Type[Request] = SynapseRequest, + request: type[Request] = SynapseRequest, shorthand: bool = True, federation_auth_origin: Optional[bytes] = None, content_type: Optional[bytes] = None, @@ -709,7 +705,7 @@ def get_success(self, d: Awaitable[TV], by: float = 0.0) -> TV: return self.successResultOf(deferred) def get_failure( - self, d: Awaitable[Any], exc: Type[_ExcType], by: float = 0.0 + self, d: Awaitable[Any], exc: type[_ExcType], by: float = 0.0 ) -> _TypedFailure[_ExcType]: """ Run a Deferred and get a Failure from it. The failure must be of the type `exc`. @@ -799,7 +795,7 @@ def register_appservice_user( username: str, appservice_token: str, inhibit_login: bool = False, - ) -> Tuple[str, Optional[str]]: + ) -> tuple[str, Optional[str]]: """Register an appservice user as an application service. Requires the client-facing registration API be registered. @@ -831,7 +827,7 @@ def login( username: str, password: str, device_id: Optional[str] = None, - additional_request_fields: Optional[Dict[str, str]] = None, + additional_request_fields: Optional[dict[str, str]] = None, custom_headers: Optional[Iterable[CustomHeaderType]] = None, ) -> str: """ @@ -871,7 +867,7 @@ def create_and_send_event( room_id: str, user: UserID, soft_failed: bool = False, - prev_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, ) -> str: """ Create and send an event. @@ -963,7 +959,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: ) ) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: d = super().create_resource_dict() d["/_matrix/federation"] = TransportLayerServer(self.hs) return d diff --git a/tests/util/caches/test_deferred_cache.py b/tests/util/caches/test_deferred_cache.py index f0deb1554e..fc01a2f5e9 100644 --- a/tests/util/caches/test_deferred_cache.py +++ b/tests/util/caches/test_deferred_cache.py @@ -20,7 +20,6 @@ # from functools import partial -from typing import List, Tuple from twisted.internet import defer @@ -169,7 +168,7 @@ def test_get_immediate(self) -> None: self.assertEqual(v, 2) def test_invalidate(self) -> None: - cache: DeferredCache[Tuple[str], int] = DeferredCache( + cache: DeferredCache[tuple[str], int] = DeferredCache( name="test", clock=self.clock, server_name="test_server" ) cache.prefill(("foo",), 123) @@ -266,7 +265,7 @@ def test_eviction_lru(self) -> None: cache.get(3) def test_eviction_iterable(self) -> None: - cache: DeferredCache[int, List[str]] = DeferredCache( + cache: DeferredCache[int, list[str]] = DeferredCache( name="test", clock=self.clock, server_name="test_server", diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py index 0e3b6ae36b..e27f84fa6d 100644 --- a/tests/util/caches/test_descriptors.py +++ b/tests/util/caches/test_descriptors.py @@ -23,12 +23,9 @@ Any, Generator, Iterable, - List, Mapping, NoReturn, Optional, - Set, - Tuple, cast, ) from unittest import mock @@ -257,7 +254,7 @@ def fn(self, arg1: int) -> Deferred: return self.result obj = Cls() - callbacks: Set[str] = set() + callbacks: set[str] = set() # set off an asynchronous request origin_d: Deferred = Deferred() @@ -435,7 +432,7 @@ def __init__(self) -> None: _, self.clock = get_clock() # nb must be called this for @cached @descriptors.cached(iterable=True) - def fn(self, arg1: int, arg2: int) -> Tuple[str, ...]: + def fn(self, arg1: int, arg2: int) -> tuple[str, ...]: return self.mock(arg1, arg2) obj = Cls() @@ -925,7 +922,7 @@ def fn(self, arg1: int) -> None: pass @descriptors.cachedList(cached_method_name="fn", list_name="args1") - def list_fn(self, args1: List[int]) -> "Deferred[Mapping[int, str]]": + def list_fn(self, args1: list[int]) -> "Deferred[Mapping[int, str]]": return self.mock(args1) obj = Cls() @@ -970,7 +967,7 @@ def fn(self, arg1: int, arg2: int) -> None: pass @descriptors.cachedList(cached_method_name="fn", list_name="args1") - async def list_fn(self, args1: List[int], arg2: int) -> Mapping[int, str]: + async def list_fn(self, args1: list[int], arg2: int) -> Mapping[int, str]: # we want this to behave like an asynchronous function await run_on_reactor() return self.mock(args1, arg2) @@ -1012,7 +1009,7 @@ def fn(self, arg1: int) -> None: pass @cachedList(cached_method_name="fn", list_name="args") - async def list_fn(self, args: List[int]) -> Mapping[int, str]: + async def list_fn(self, args: list[int]) -> Mapping[int, str]: await complete_lookup return {arg: str(arg) for arg in args} @@ -1049,7 +1046,7 @@ def fn(self, arg1: int) -> None: pass @cachedList(cached_method_name="fn", list_name="args") - async def list_fn(self, args: List[int]) -> Mapping[int, str]: + async def list_fn(self, args: list[int]) -> Mapping[int, str]: await make_deferred_yieldable(complete_lookup) self.inner_context_was_finished = current_context().finished return {arg: str(arg) for arg in args} @@ -1097,7 +1094,7 @@ def fn(self, room_id: str, event_id: str) -> None: # of arguments as the underlying cached function, just with one of # the arguments being an iterable @descriptors.cachedList(cached_method_name="fn", list_name="keys") - def list_fn(self, keys: Iterable[Tuple[str, str]]) -> None: + def list_fn(self, keys: Iterable[tuple[str, str]]) -> None: pass # Corrected syntax ✅ diff --git a/tests/util/test_async_helpers.py b/tests/util/test_async_helpers.py index fd8d576aea..a02a2f0cef 100644 --- a/tests/util/test_async_helpers.py +++ b/tests/util/test_async_helpers.py @@ -19,7 +19,7 @@ # import logging import traceback -from typing import Any, Coroutine, List, NoReturn, Optional, Tuple, TypeVar +from typing import Any, Coroutine, NoReturn, Optional, TypeVar from parameterized import parameterized_class @@ -71,7 +71,7 @@ def check_called_first(res: int) -> int: observer1.addBoth(check_called_first) # store the results - results: List[Optional[int]] = [None, None] + results: list[Optional[int]] = [None, None] def check_val(res: int, idx: int) -> int: results[idx] = res @@ -102,7 +102,7 @@ def check_called_first(res: int) -> int: observer1.addBoth(check_called_first) # store the results - results: List[Optional[Failure]] = [None, None] + results: list[Optional[Failure]] = [None, None] def check_failure(res: Failure, idx: int) -> None: results[idx] = res @@ -644,7 +644,7 @@ def test_multiple_sleepers_wake(self) -> None: class GatherCoroutineTests(TestCase): """Tests for `gather_optional_coroutines`""" - def make_coroutine(self) -> Tuple[Coroutine[Any, Any, T], "defer.Deferred[T]"]: + def make_coroutine(self) -> tuple[Coroutine[Any, Any, T], "defer.Deferred[T]"]: """Returns a coroutine and a deferred that it is waiting on to resolve""" d: "defer.Deferred[T]" = defer.Deferred() diff --git a/tests/util/test_batching_queue.py b/tests/util/test_batching_queue.py index 60bfdf38aa..30b07dc6ad 100644 --- a/tests/util/test_batching_queue.py +++ b/tests/util/test_batching_queue.py @@ -18,7 +18,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Tuple from prometheus_client import Gauge @@ -47,7 +46,7 @@ def setUp(self) -> None: except KeyError: pass - self._pending_calls: List[Tuple[List[str], defer.Deferred]] = [] + self._pending_calls: list[tuple[list[str], defer.Deferred]] = [] self.queue: BatchingQueue[str, str] = BatchingQueue( name="test_queue", hs=self.hs, @@ -55,7 +54,7 @@ def setUp(self) -> None: process_batch_callback=self._process_queue, ) - async def _process_queue(self, values: List[str]) -> str: + async def _process_queue(self, values: list[str]) -> str: d: "defer.Deferred[str]" = defer.Deferred() self._pending_calls.append((values, d)) return await make_deferred_yieldable(d) diff --git a/tests/util/test_expiring_cache.py b/tests/util/test_expiring_cache.py index 35c0f02e3f..8964359a6e 100644 --- a/tests/util/test_expiring_cache.py +++ b/tests/util/test_expiring_cache.py @@ -19,7 +19,6 @@ # # -from typing import List from synapse.util.caches.expiringcache import ExpiringCache @@ -65,7 +64,7 @@ def test_eviction(self) -> None: def test_iterable_eviction(self) -> None: reactor, clock = get_clock() - cache: ExpiringCache[str, List[int]] = ExpiringCache( + cache: ExpiringCache[str, list[int]] = ExpiringCache( cache_name="test", server_name="testserver", hs=self.hs, diff --git a/tests/util/test_itertools.py b/tests/util/test_itertools.py index 7a593cc683..016389d49b 100644 --- a/tests/util/test_itertools.py +++ b/tests/util/test_itertools.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict, Iterable, List, Sequence +from typing import Iterable, Sequence from synapse.util.iterutils import ( chunk_seq, @@ -67,13 +67,13 @@ class SortTopologically(TestCase): def test_empty(self) -> None: "Test that an empty graph works correctly" - graph: Dict[int, List[int]] = {} + graph: dict[int, list[int]] = {} self.assertEqual(list(sorted_topologically([], graph)), []) def test_handle_empty_graph(self) -> None: "Test that a graph where a node doesn't have an entry is treated as empty" - graph: Dict[int, List[int]] = {} + graph: dict[int, list[int]] = {} # For disconnected nodes the output is simply sorted. self.assertEqual(list(sorted_topologically([1, 2], graph)), [1, 2]) @@ -81,7 +81,7 @@ def test_handle_empty_graph(self) -> None: def test_disconnected(self) -> None: "Test that a graph with no edges work" - graph: Dict[int, List[int]] = {1: [], 2: []} + graph: dict[int, list[int]] = {1: [], 2: []} # For disconnected nodes the output is simply sorted. self.assertEqual(list(sorted_topologically([1, 2], graph)), [1, 2]) @@ -89,19 +89,19 @@ def test_disconnected(self) -> None: def test_linear(self) -> None: "Test that a simple `4 -> 3 -> 2 -> 1` graph works" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3]} self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4]) def test_subset(self) -> None: "Test that only sorting a subset of the graph works" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3]} self.assertEqual(list(sorted_topologically([4, 3], graph)), [3, 4]) def test_fork(self) -> None: "Test that a forked graph works" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [1], 4: [2, 3]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [1], 4: [2, 3]} # Valid orderings are `[1, 3, 2, 4]` or `[1, 2, 3, 4]`, but we should # always get the same one. @@ -109,13 +109,13 @@ def test_fork(self) -> None: def test_duplicates(self) -> None: "Test that a graph with duplicate edges work" - graph: Dict[int, List[int]] = {1: [], 2: [1, 1], 3: [2, 2], 4: [3]} + graph: dict[int, list[int]] = {1: [], 2: [1, 1], 3: [2, 2], 4: [3]} self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4]) def test_multiple_paths(self) -> None: "Test that a graph with multiple paths between two nodes work" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3, 2, 1]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3, 2, 1]} self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4]) @@ -126,13 +126,13 @@ class SortTopologicallyBatched(TestCase): def test_empty(self) -> None: "Test that an empty graph works correctly" - graph: Dict[int, List[int]] = {} + graph: dict[int, list[int]] = {} self.assertEqual(list(sorted_topologically_batched([], graph)), []) def test_handle_empty_graph(self) -> None: "Test that a graph where a node doesn't have an entry is treated as empty" - graph: Dict[int, List[int]] = {} + graph: dict[int, list[int]] = {} # For disconnected nodes the output is simply sorted. self.assertEqual(list(sorted_topologically_batched([1, 2], graph)), [[1, 2]]) @@ -140,7 +140,7 @@ def test_handle_empty_graph(self) -> None: def test_disconnected(self) -> None: "Test that a graph with no edges work" - graph: Dict[int, List[int]] = {1: [], 2: []} + graph: dict[int, list[int]] = {1: [], 2: []} # For disconnected nodes the output is simply sorted. self.assertEqual(list(sorted_topologically_batched([1, 2], graph)), [[1, 2]]) @@ -148,7 +148,7 @@ def test_disconnected(self) -> None: def test_linear(self) -> None: "Test that a simple `4 -> 3 -> 2 -> 1` graph works" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3]} self.assertEqual( list(sorted_topologically_batched([4, 3, 2, 1], graph)), @@ -157,13 +157,13 @@ def test_linear(self) -> None: def test_subset(self) -> None: "Test that only sorting a subset of the graph works" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3]} self.assertEqual(list(sorted_topologically_batched([4, 3], graph)), [[3], [4]]) def test_fork(self) -> None: "Test that a forked graph works" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [1], 4: [2, 3]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [1], 4: [2, 3]} # Valid orderings are `[1, 3, 2, 4]` or `[1, 2, 3, 4]`, but we should # always get the same one. @@ -173,7 +173,7 @@ def test_fork(self) -> None: def test_duplicates(self) -> None: "Test that a graph with duplicate edges work" - graph: Dict[int, List[int]] = {1: [], 2: [1, 1], 3: [2, 2], 4: [3]} + graph: dict[int, list[int]] = {1: [], 2: [1, 1], 3: [2, 2], 4: [3]} self.assertEqual( list(sorted_topologically_batched([4, 3, 2, 1], graph)), @@ -182,7 +182,7 @@ def test_duplicates(self) -> None: def test_multiple_paths(self) -> None: "Test that a graph with multiple paths between two nodes work" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3, 2, 1]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3, 2, 1]} self.assertEqual( list(sorted_topologically_batched([4, 3, 2, 1], graph)), diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py index 722ce79dcc..b2a631d747 100644 --- a/tests/util/test_linearizer.py +++ b/tests/util/test_linearizer.py @@ -19,7 +19,7 @@ # # -from typing import Hashable, Protocol, Tuple +from typing import Hashable, Protocol from twisted.internet import defer from twisted.internet.defer import CancelledError, Deferred @@ -43,7 +43,7 @@ def setUp(self) -> None: def _start_task( self, linearizer: Linearizer, key: Hashable - ) -> Tuple["Deferred[None]", "Deferred[None]", UnblockFunction]: + ) -> tuple["Deferred[None]", "Deferred[None]", UnblockFunction]: """Starts a task which acquires the linearizer lock, blocks, then completes. Args: diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py index 56e9996b00..dcbfcfa2e4 100644 --- a/tests/util/test_lrucache.py +++ b/tests/util/test_lrucache.py @@ -20,7 +20,6 @@ # -from typing import List, Tuple from unittest.mock import Mock, patch from synapse.metrics.jemalloc import JemallocStats @@ -84,7 +83,7 @@ def test_pop(self) -> None: def test_del_multi(self) -> None: # The type here isn't quite correct as they don't handle TreeCache well. - cache: LruCache[Tuple[str, str], str] = LruCache( + cache: LruCache[tuple[str, str], str] = LruCache( max_size=4, clock=self.clock, cache_type=TreeCache, @@ -211,7 +210,7 @@ def test_del_multi(self) -> None: m3 = Mock() m4 = Mock() # The type here isn't quite correct as they don't handle TreeCache well. - cache: LruCache[Tuple[str, str], str] = LruCache( + cache: LruCache[tuple[str, str], str] = LruCache( max_size=4, clock=self.clock, cache_type=TreeCache, @@ -295,7 +294,7 @@ def test_eviction(self) -> None: class LruCacheSizedTestCase(unittest.HomeserverTestCase): def test_evict(self) -> None: - cache: LruCache[str, List[int]] = LruCache( + cache: LruCache[str, list[int]] = LruCache( max_size=5, clock=self.clock, size_callback=len, server_name="test_server" ) cache["key1"] = [0] @@ -320,7 +319,7 @@ def test_evict(self) -> None: def test_zero_size_drop_from_cache(self) -> None: """Test that `drop_from_cache` works correctly with 0-sized entries.""" - cache: LruCache[str, List[int]] = LruCache( + cache: LruCache[str, list[int]] = LruCache( max_size=5, clock=self.clock, size_callback=lambda x: 0, diff --git a/tests/util/test_mutable_overlay_mapping.py b/tests/util/test_mutable_overlay_mapping.py index a7335fca73..ed738919e4 100644 --- a/tests/util/test_mutable_overlay_mapping.py +++ b/tests/util/test_mutable_overlay_mapping.py @@ -13,7 +13,6 @@ # import unittest -from typing import Dict from synapse.util import MutableOverlayMapping @@ -24,7 +23,7 @@ class TestMutableOverlayMapping(unittest.TestCase): def test_init(self) -> None: """Test initialization with different input types.""" # Test with empty dict - empty_dict: Dict[str, int] = {} + empty_dict: dict[str, int] = {} mapping = MutableOverlayMapping(empty_dict) self.assertEqual(len(mapping), 0) diff --git a/tests/util/test_rwlock.py b/tests/util/test_rwlock.py index 12f821d684..36771188ae 100644 --- a/tests/util/test_rwlock.py +++ b/tests/util/test_rwlock.py @@ -19,7 +19,7 @@ # # -from typing import AsyncContextManager, Callable, Sequence, Tuple +from typing import AsyncContextManager, Callable, Sequence from twisted.internet import defer from twisted.internet.defer import CancelledError, Deferred @@ -35,7 +35,7 @@ def _start_reader_or_writer( read_or_write: Callable[[str], AsyncContextManager], key: str, return_value: str, - ) -> Tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]: + ) -> tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]: """Starts a reader or writer which acquires the lock, blocks, then completes. Args: @@ -67,7 +67,7 @@ async def reader_or_writer() -> str: def _start_blocking_reader( self, rwlock: ReadWriteLock, key: str, return_value: str - ) -> Tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]: + ) -> tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]: """Starts a reader which acquires the lock, blocks, then releases the lock. See the docstring for `_start_reader_or_writer` for details about the arguments @@ -77,7 +77,7 @@ def _start_blocking_reader( def _start_blocking_writer( self, rwlock: ReadWriteLock, key: str, return_value: str - ) -> Tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]: + ) -> tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]: """Starts a writer which acquires the lock, blocks, then releases the lock. See the docstring for `_start_reader_or_writer` for details about the arguments @@ -87,7 +87,7 @@ def _start_blocking_writer( def _start_nonblocking_reader( self, rwlock: ReadWriteLock, key: str, return_value: str - ) -> Tuple["Deferred[str]", "Deferred[None]"]: + ) -> tuple["Deferred[str]", "Deferred[None]"]: """Starts a reader which acquires the lock, then releases it immediately. See the docstring for `_start_reader_or_writer` for details about the arguments. @@ -106,7 +106,7 @@ def _start_nonblocking_reader( def _start_nonblocking_writer( self, rwlock: ReadWriteLock, key: str, return_value: str - ) -> Tuple["Deferred[str]", "Deferred[None]"]: + ) -> tuple["Deferred[str]", "Deferred[None]"]: """Starts a writer which acquires the lock, then releases it immediately. See the docstring for `_start_reader_or_writer` for details about the arguments. diff --git a/tests/util/test_task_scheduler.py b/tests/util/test_task_scheduler.py index e97f0ed611..43c3ce52ea 100644 --- a/tests/util/test_task_scheduler.py +++ b/tests/util/test_task_scheduler.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional, Tuple +from typing import Optional from twisted.internet.task import deferLater from twisted.internet.testing import MemoryReactor @@ -42,7 +42,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: async def _test_task( self, task: ScheduledTask - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: # This test task will copy the parameters to the result result = None if task.params: @@ -85,7 +85,7 @@ def test_schedule_task(self) -> None: async def _sleeping_task( self, task: ScheduledTask - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: # Sleep for a second await deferLater(self.reactor, 1, lambda: None) return TaskStatus.COMPLETE, None, None @@ -103,7 +103,7 @@ def test_schedule_lot_of_tasks(self) -> None: ) ) - def get_tasks_of_status(status: TaskStatus) -> List[ScheduledTask]: + def get_tasks_of_status(status: TaskStatus) -> list[ScheduledTask]: tasks = ( self.get_success(self.task_scheduler.get_task(task_id)) for task_id in task_ids @@ -151,7 +151,7 @@ def get_tasks_of_status(status: TaskStatus) -> List[ScheduledTask]: async def _raising_task( self, task: ScheduledTask - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: raise Exception("raising") def test_schedule_raising_task(self) -> None: @@ -165,7 +165,7 @@ def test_schedule_raising_task(self) -> None: async def _resumable_task( self, task: ScheduledTask - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: if task.result and "in_progress" in task.result: return TaskStatus.COMPLETE, {"success": True}, None else: @@ -201,7 +201,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: async def _test_task( self, task: ScheduledTask - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: return (TaskStatus.COMPLETE, None, None) @override_config({"run_background_tasks_on": "worker1"}) diff --git a/tests/utils.py b/tests/utils.py index 051388ee2e..b3d59a0ebe 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -24,10 +24,8 @@ import signal from types import FrameType, TracebackType from typing import ( - Dict, Literal, Optional, - Type, TypeVar, Union, overload, @@ -134,7 +132,7 @@ def _cleanup() -> None: @overload def default_config( server_name: str, parse: Literal[False] = ... -) -> Dict[str, object]: ... +) -> dict[str, object]: ... @overload @@ -143,7 +141,7 @@ def default_config(server_name: str, parse: Literal[True]) -> HomeServerConfig: def default_config( server_name: str, parse: bool = False -) -> Union[Dict[str, object], HomeServerConfig]: +) -> Union[dict[str, object], HomeServerConfig]: """ Create a reasonable test config. @@ -283,7 +281,7 @@ async def create_room(hs: HomeServer, room_id: str, creator_id: str) -> None: T = TypeVar("T") -def checked_cast(type: Type[T], x: object) -> T: +def checked_cast(type: type[T], x: object) -> T: """A version of typing.cast that is checked at runtime. We have our own function for this for two reasons: @@ -337,7 +335,7 @@ def __enter__(self) -> None: def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: From b835eb253ca02f3080012e886ee628c075142102 Mon Sep 17 00:00:00 2001 From: Bryce Servis Date: Thu, 23 Oct 2025 10:10:10 -0500 Subject: [PATCH 07/72] Make optional networking and security settings for Redis more apparent in workers.md (#19073) I couldn't really find any documentation regarding how to setup TLS communication between Synapse and Redis, so I looked through the source code and found it. I figured I should go ahead and document it here. --- changelog.d/19073.doc | 1 + docs/workers.md | 3 +++ 2 files changed, 4 insertions(+) create mode 100644 changelog.d/19073.doc diff --git a/changelog.d/19073.doc b/changelog.d/19073.doc new file mode 100644 index 0000000000..6bbaaba99e --- /dev/null +++ b/changelog.d/19073.doc @@ -0,0 +1 @@ +Point out additional Redis configuration options available in the worker docs. Contributed by @servisbryce. diff --git a/docs/workers.md b/docs/workers.md index 18bb0b76f6..f766b40251 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -120,6 +120,9 @@ worker_replication_secret: "" redis: enabled: true + # For additional Redis configuration options (TLS, authentication, etc.), + # see the Synapse configuration documentation: + # https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#redis instance_map: main: From 5556b491c1a6775a6592316067d12e35a743d45c Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 24 Oct 2025 11:19:44 +0200 Subject: [PATCH 08/72] Spruce up generated announcement text in the release script (#19089) --- changelog.d/19089.misc | 1 + scripts-dev/release.py | 21 +++++++++++++++++---- 2 files changed, 18 insertions(+), 4 deletions(-) create mode 100644 changelog.d/19089.misc diff --git a/changelog.d/19089.misc b/changelog.d/19089.misc new file mode 100644 index 0000000000..81c8775fd0 --- /dev/null +++ b/changelog.d/19089.misc @@ -0,0 +1 @@ +Update the release script's generated announcement text to include a title and extra text for RC's. \ No newline at end of file diff --git a/scripts-dev/release.py b/scripts-dev/release.py index 16f1fc5f2a..c20237eab8 100755 --- a/scripts-dev/release.py +++ b/scripts-dev/release.py @@ -715,18 +715,31 @@ def _announce() -> None: current_version = get_package_version() tag_name = f"v{current_version}" + is_rc = "rc" in tag_name + + release_text = f""" +### Synapse {current_version} {"🧪" if is_rc else "🚀"} - click.echo( - f""" Hi everyone. Synapse {current_version} has just been released. +""" + + if "rc" in tag_name: + release_text += ( + "\nThis is a release candidate. Please help us test it out " + "before the final release by deploying it to non-production environments, " + "and reporting any issues you find to " + "[the issue tracker](https://github.com/element-hq/synapse/issues). Thanks!\n" + ) + release_text += f""" [notes](https://github.com/element-hq/synapse/releases/tag/{tag_name}) | \ [docker](https://hub.docker.com/r/matrixdotorg/synapse/tags?name={tag_name}) | \ [debs](https://packages.matrix.org/debian/) | \ [pypi](https://pypi.org/project/matrix-synapse/{current_version}/)""" - ) - if "rc" in tag_name: + click.echo(release_text) + + if is_rc: click.echo( """ Announce the RC in From 72d0de9f3037b1ebf3d88596d79a4fa9fd10db05 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 24 Oct 2025 11:39:06 +0200 Subject: [PATCH 09/72] Don't exit the release script if there are uncommitted changes (#19088) --- changelog.d/19088.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/19088.misc diff --git a/changelog.d/19088.misc b/changelog.d/19088.misc new file mode 100644 index 0000000000..3224b3697d --- /dev/null +++ b/changelog.d/19088.misc @@ -0,0 +1 @@ +Don't immediately exit the release script if the checkout is dirty. Instead, allow the user to clear the dirty changes and retry. \ No newline at end of file From 45a042ae888ee30d93186245de5973a4f7352649 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 24 Oct 2025 11:39:29 +0200 Subject: [PATCH 10/72] Remove cibuildwheel `pp38*` skip selector (#19085) --- changelog.d/19085.misc | 1 + pyproject.toml | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/19085.misc diff --git a/changelog.d/19085.misc b/changelog.d/19085.misc new file mode 100644 index 0000000000..d48fad9d5d --- /dev/null +++ b/changelog.d/19085.misc @@ -0,0 +1 @@ +Remove `pp38*` skip selector from cibuildwheel to silence warning. \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index b0cb355c52..2b43f182ac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -387,10 +387,10 @@ build-backend = "poetry.core.masonry.api" # Skip unsupported platforms (by us or by Rust). # See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets. # We skip: -# - CPython and PyPy 3.8: EOLed +# - CPython 3.8: EOLed # - musllinux i686: excluded to reduce number of wheels we build. # c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677 -skip = "cp38* pp38* *-musllinux_i686" +skip = "cp38* *-musllinux_i686" # Enable non-default builds. # "pypy" used to be included by default up until cibuildwheel 3. enable = "pypy" From a092d2053ad073512c7ff407c2d8bf495c46777a Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 24 Oct 2025 12:19:04 +0200 Subject: [PATCH 11/72] Fix deprecation warning in release script (#19080) --- changelog.d/19080.misc | 1 + scripts-dev/release.py | 16 +++++++++++----- 2 files changed, 12 insertions(+), 5 deletions(-) create mode 100644 changelog.d/19080.misc diff --git a/changelog.d/19080.misc b/changelog.d/19080.misc new file mode 100644 index 0000000000..c738be3fe9 --- /dev/null +++ b/changelog.d/19080.misc @@ -0,0 +1 @@ +Update deprecated code in the release script to prevent a warning message from being printed. \ No newline at end of file diff --git a/scripts-dev/release.py b/scripts-dev/release.py index c20237eab8..111c184ccb 100755 --- a/scripts-dev/release.py +++ b/scripts-dev/release.py @@ -38,6 +38,7 @@ import click import git import github +import github.Auth from click.exceptions import ClickException from git import GitCommandError, Repo from github import BadCredentialsException, Github @@ -429,7 +430,7 @@ def _publish(gh_token: str) -> None: if gh_token: # Test that the GH Token is valid before continuing. - gh = Github(gh_token) + gh = Github(auth=github.Auth.Token(token=gh_token)) gh.get_user() # Make sure we're in a git repo. @@ -442,7 +443,7 @@ def _publish(gh_token: str) -> None: return # Publish the draft release - gh = Github(gh_token) + gh = Github(auth=github.Auth.Token(token=gh_token)) gh_repo = gh.get_repo("element-hq/synapse") for release in gh_repo.get_releases(): if release.title == tag_name: @@ -487,8 +488,13 @@ def _upload(gh_token: Optional[str]) -> None: click.echo(f"Tag {tag_name} ({tag.commit}) is not currently checked out!") click.get_current_context().abort() + if gh_token: + gh = Github(auth=github.Auth.Token(token=gh_token)) + else: + # Use github anonymously. + gh = Github() + # Query all the assets corresponding to this release. - gh = Github(gh_token) gh_repo = gh.get_repo("element-hq/synapse") gh_release = gh_repo.get_release(tag_name) @@ -764,7 +770,7 @@ def _announce() -> None: def full(gh_token: str) -> None: if gh_token: # Test that the GH Token is valid before continuing. - gh = Github(gh_token) + gh = Github(auth=github.Auth.Token(token=gh_token)) gh.get_user() click.echo("1. If this is a security release, read the security wiki page.") @@ -850,7 +856,7 @@ def check_valid_gh_token(gh_token: Optional[str]) -> None: return try: - gh = Github(gh_token) + gh = Github(auth=github.Auth.Token(token=gh_token)) # We need to lookup name to trigger a request. _name = gh.get_user().name From 123eff1bc0eff6efae68f2c2408d343e9ed8d3d4 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 24 Oct 2025 12:19:40 +0200 Subject: [PATCH 12/72] Update poetry dev dependencies name (#19081) --- changelog.d/19081.misc | 1 + mypy.ini | 2 +- poetry.lock | 2 +- pyproject.toml | 2 +- 4 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/19081.misc diff --git a/changelog.d/19081.misc b/changelog.d/19081.misc new file mode 100644 index 0000000000..8518840fb6 --- /dev/null +++ b/changelog.d/19081.misc @@ -0,0 +1 @@ +Update the deprecated poetry development dependencies group name in `pyproject.toml`. \ No newline at end of file diff --git a/mypy.ini b/mypy.ini index ae903f858a..eefe405fe5 100644 --- a/mypy.ini +++ b/mypy.ini @@ -69,7 +69,7 @@ warn_unused_ignores = False ;; https://github.com/python/typeshed/tree/master/stubs ;; and for each package `foo` there's a corresponding `types-foo` package on PyPI, ;; which we can pull in as a dev dependency by adding to `pyproject.toml`'s -;; `[tool.poetry.dev-dependencies]` list. +;; `[tool.poetry.group.dev.dependencies]` list. # https://github.com/lepture/authlib/issues/460 [mypy-authlib.*] diff --git a/poetry.lock b/poetry.lock index 1a26e23fad..efbd856b6d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3293,4 +3293,4 @@ url-preview = ["lxml"] [metadata] lock-version = "2.1" python-versions = "^3.9.0" -content-hash = "0058b93ca13a3f2a0cfc28485ddd8202c42d0015dbaf3b9692e43f37fe2a0be6" +content-hash = "5d71c862b924bc2af936cb6fef264a023213153543f738af31357deaf6de19b8" diff --git a/pyproject.toml b/pyproject.toml index 2b43f182ac..27265357d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -325,7 +325,7 @@ all = [ # - systemd: this is a system-based requirement ] -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] # We pin development dependencies in poetry.lock so that our tests don't start # failing on new releases. Keeping lower bounds loose here means that dependabot # can bump versions without having to update the content-hash in the lockfile. From a2fa61d1b5d90be7252c255fd89598e0e3f1c777 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 Oct 2025 13:17:35 +0100 Subject: [PATCH 13/72] Bump msgpack from 1.1.1 to 1.1.2 (#19050) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 125 +++++++++++++++++++++++++++------------------------- 1 file changed, 64 insertions(+), 61 deletions(-) diff --git a/poetry.lock b/poetry.lock index efbd856b6d..2589390c06 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1342,71 +1342,74 @@ files = [ [[package]] name = "msgpack" -version = "1.1.1" +version = "1.1.2" description = "MessagePack serializer" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "msgpack-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:353b6fc0c36fde68b661a12949d7d49f8f51ff5fa019c1e47c87c4ff34b080ed"}, - {file = "msgpack-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:79c408fcf76a958491b4e3b103d1c417044544b68e96d06432a189b43d1215c8"}, - {file = "msgpack-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78426096939c2c7482bf31ef15ca219a9e24460289c00dd0b94411040bb73ad2"}, - {file = "msgpack-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b17ba27727a36cb73aabacaa44b13090feb88a01d012c0f4be70c00f75048b4"}, - {file = "msgpack-1.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a17ac1ea6ec3c7687d70201cfda3b1e8061466f28f686c24f627cae4ea8efd0"}, - {file = "msgpack-1.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:88d1e966c9235c1d4e2afac21ca83933ba59537e2e2727a999bf3f515ca2af26"}, - {file = "msgpack-1.1.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f6d58656842e1b2ddbe07f43f56b10a60f2ba5826164910968f5933e5178af75"}, - {file = "msgpack-1.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96decdfc4adcbc087f5ea7ebdcfd3dee9a13358cae6e81d54be962efc38f6338"}, - {file = "msgpack-1.1.1-cp310-cp310-win32.whl", hash = "sha256:6640fd979ca9a212e4bcdf6eb74051ade2c690b862b679bfcb60ae46e6dc4bfd"}, - {file = "msgpack-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:8b65b53204fe1bd037c40c4148d00ef918eb2108d24c9aaa20bc31f9810ce0a8"}, - {file = "msgpack-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:71ef05c1726884e44f8b1d1773604ab5d4d17729d8491403a705e649116c9558"}, - {file = "msgpack-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:36043272c6aede309d29d56851f8841ba907a1a3d04435e43e8a19928e243c1d"}, - {file = "msgpack-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a32747b1b39c3ac27d0670122b57e6e57f28eefb725e0b625618d1b59bf9d1e0"}, - {file = "msgpack-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a8b10fdb84a43e50d38057b06901ec9da52baac6983d3f709d8507f3889d43f"}, - {file = "msgpack-1.1.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba0c325c3f485dc54ec298d8b024e134acf07c10d494ffa24373bea729acf704"}, - {file = "msgpack-1.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:88daaf7d146e48ec71212ce21109b66e06a98e5e44dca47d853cbfe171d6c8d2"}, - {file = "msgpack-1.1.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8b55ea20dc59b181d3f47103f113e6f28a5e1c89fd5b67b9140edb442ab67f2"}, - {file = "msgpack-1.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a28e8072ae9779f20427af07f53bbb8b4aa81151054e882aee333b158da8752"}, - {file = "msgpack-1.1.1-cp311-cp311-win32.whl", hash = "sha256:7da8831f9a0fdb526621ba09a281fadc58ea12701bc709e7b8cbc362feabc295"}, - {file = "msgpack-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fd1b58e1431008a57247d6e7cc4faa41c3607e8e7d4aaf81f7c29ea013cb458"}, - {file = "msgpack-1.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ae497b11f4c21558d95de9f64fff7053544f4d1a17731c866143ed6bb4591238"}, - {file = "msgpack-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:33be9ab121df9b6b461ff91baac6f2731f83d9b27ed948c5b9d1978ae28bf157"}, - {file = "msgpack-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f64ae8fe7ffba251fecb8408540c34ee9df1c26674c50c4544d72dbf792e5ce"}, - {file = "msgpack-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a494554874691720ba5891c9b0b39474ba43ffb1aaf32a5dac874effb1619e1a"}, - {file = "msgpack-1.1.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb643284ab0ed26f6957d969fe0dd8bb17beb567beb8998140b5e38a90974f6c"}, - {file = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d275a9e3c81b1093c060c3837e580c37f47c51eca031f7b5fb76f7b8470f5f9b"}, - {file = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fd6b577e4541676e0cc9ddc1709d25014d3ad9a66caa19962c4f5de30fc09ef"}, - {file = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb29aaa613c0a1c40d1af111abf025f1732cab333f96f285d6a93b934738a68a"}, - {file = "msgpack-1.1.1-cp312-cp312-win32.whl", hash = "sha256:870b9a626280c86cff9c576ec0d9cbcc54a1e5ebda9cd26dab12baf41fee218c"}, - {file = "msgpack-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:5692095123007180dca3e788bb4c399cc26626da51629a31d40207cb262e67f4"}, - {file = "msgpack-1.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3765afa6bd4832fc11c3749be4ba4b69a0e8d7b728f78e68120a157a4c5d41f0"}, - {file = "msgpack-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8ddb2bcfd1a8b9e431c8d6f4f7db0773084e107730ecf3472f1dfe9ad583f3d9"}, - {file = "msgpack-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:196a736f0526a03653d829d7d4c5500a97eea3648aebfd4b6743875f28aa2af8"}, - {file = "msgpack-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d592d06e3cc2f537ceeeb23d38799c6ad83255289bb84c2e5792e5a8dea268a"}, - {file = "msgpack-1.1.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4df2311b0ce24f06ba253fda361f938dfecd7b961576f9be3f3fbd60e87130ac"}, - {file = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e4141c5a32b5e37905b5940aacbc59739f036930367d7acce7a64e4dec1f5e0b"}, - {file = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b1ce7f41670c5a69e1389420436f41385b1aa2504c3b0c30620764b15dded2e7"}, - {file = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4147151acabb9caed4e474c3344181e91ff7a388b888f1e19ea04f7e73dc7ad5"}, - {file = "msgpack-1.1.1-cp313-cp313-win32.whl", hash = "sha256:500e85823a27d6d9bba1d057c871b4210c1dd6fb01fbb764e37e4e8847376323"}, - {file = "msgpack-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:6d489fba546295983abd142812bda76b57e33d0b9f5d5b71c09a583285506f69"}, - {file = "msgpack-1.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bba1be28247e68994355e028dcd668316db30c1f758d3241a7b903ac78dcd285"}, - {file = "msgpack-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8f93dcddb243159c9e4109c9750ba5b335ab8d48d9522c5308cd05d7e3ce600"}, - {file = "msgpack-1.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fbbc0b906a24038c9958a1ba7ae0918ad35b06cb449d398b76a7d08470b0ed9"}, - {file = "msgpack-1.1.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:61e35a55a546a1690d9d09effaa436c25ae6130573b6ee9829c37ef0f18d5e78"}, - {file = "msgpack-1.1.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:1abfc6e949b352dadf4bce0eb78023212ec5ac42f6abfd469ce91d783c149c2a"}, - {file = "msgpack-1.1.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:996f2609ddf0142daba4cefd767d6db26958aac8439ee41db9cc0db9f4c4c3a6"}, - {file = "msgpack-1.1.1-cp38-cp38-win32.whl", hash = "sha256:4d3237b224b930d58e9d83c81c0dba7aacc20fcc2f89c1e5423aa0529a4cd142"}, - {file = "msgpack-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:da8f41e602574ece93dbbda1fab24650d6bf2a24089f9e9dbb4f5730ec1e58ad"}, - {file = "msgpack-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5be6b6bc52fad84d010cb45433720327ce886009d862f46b26d4d154001994b"}, - {file = "msgpack-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a89cd8c087ea67e64844287ea52888239cbd2940884eafd2dcd25754fb72232"}, - {file = "msgpack-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d75f3807a9900a7d575d8d6674a3a47e9f227e8716256f35bc6f03fc597ffbf"}, - {file = "msgpack-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d182dac0221eb8faef2e6f44701812b467c02674a322c739355c39e94730cdbf"}, - {file = "msgpack-1.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b13fe0fb4aac1aa5320cd693b297fe6fdef0e7bea5518cbc2dd5299f873ae90"}, - {file = "msgpack-1.1.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:435807eeb1bc791ceb3247d13c79868deb22184e1fc4224808750f0d7d1affc1"}, - {file = "msgpack-1.1.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4835d17af722609a45e16037bb1d4d78b7bdf19d6c0128116d178956618c4e88"}, - {file = "msgpack-1.1.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a8ef6e342c137888ebbfb233e02b8fbd689bb5b5fcc59b34711ac47ebd504478"}, - {file = "msgpack-1.1.1-cp39-cp39-win32.whl", hash = "sha256:61abccf9de335d9efd149e2fff97ed5974f2481b3353772e8e2dd3402ba2bd57"}, - {file = "msgpack-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:40eae974c873b2992fd36424a5d9407f93e97656d999f43fca9d29f820899084"}, - {file = "msgpack-1.1.1.tar.gz", hash = "sha256:77b79ce34a2bdab2594f490c8e80dd62a02d650b91a75159a63ec413b8d104cd"}, + {file = "msgpack-1.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0051fffef5a37ca2cd16978ae4f0aef92f164df86823871b5162812bebecd8e2"}, + {file = "msgpack-1.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a605409040f2da88676e9c9e5853b3449ba8011973616189ea5ee55ddbc5bc87"}, + {file = "msgpack-1.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b696e83c9f1532b4af884045ba7f3aa741a63b2bc22617293a2c6a7c645f251"}, + {file = "msgpack-1.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:365c0bbe981a27d8932da71af63ef86acc59ed5c01ad929e09a0b88c6294e28a"}, + {file = "msgpack-1.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:41d1a5d875680166d3ac5c38573896453bbbea7092936d2e107214daf43b1d4f"}, + {file = "msgpack-1.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:354e81bcdebaab427c3df4281187edc765d5d76bfb3a7c125af9da7a27e8458f"}, + {file = "msgpack-1.1.2-cp310-cp310-win32.whl", hash = "sha256:e64c8d2f5e5d5fda7b842f55dec6133260ea8f53c4257d64494c534f306bf7a9"}, + {file = "msgpack-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:db6192777d943bdaaafb6ba66d44bf65aa0e9c5616fa1d2da9bb08828c6b39aa"}, + {file = "msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c"}, + {file = "msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0"}, + {file = "msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296"}, + {file = "msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef"}, + {file = "msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c"}, + {file = "msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e"}, + {file = "msgpack-1.1.2-cp311-cp311-win32.whl", hash = "sha256:602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e"}, + {file = "msgpack-1.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68"}, + {file = "msgpack-1.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406"}, + {file = "msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa"}, + {file = "msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb"}, + {file = "msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f"}, + {file = "msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42"}, + {file = "msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9"}, + {file = "msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620"}, + {file = "msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029"}, + {file = "msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b"}, + {file = "msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69"}, + {file = "msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf"}, + {file = "msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7"}, + {file = "msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999"}, + {file = "msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e"}, + {file = "msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162"}, + {file = "msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794"}, + {file = "msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c"}, + {file = "msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9"}, + {file = "msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84"}, + {file = "msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00"}, + {file = "msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939"}, + {file = "msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e"}, + {file = "msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931"}, + {file = "msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014"}, + {file = "msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2"}, + {file = "msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717"}, + {file = "msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b"}, + {file = "msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af"}, + {file = "msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a"}, + {file = "msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b"}, + {file = "msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245"}, + {file = "msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90"}, + {file = "msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20"}, + {file = "msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27"}, + {file = "msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b"}, + {file = "msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff"}, + {file = "msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46"}, + {file = "msgpack-1.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ea5405c46e690122a76531ab97a079e184c0daf491e588592d6a23d3e32af99e"}, + {file = "msgpack-1.1.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fba231af7a933400238cb357ecccf8ab5d51535ea95d94fc35b7806218ff844"}, + {file = "msgpack-1.1.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a8f6e7d30253714751aa0b0c84ae28948e852ee7fb0524082e6716769124bc23"}, + {file = "msgpack-1.1.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:94fd7dc7d8cb0a54432f296f2246bc39474e017204ca6f4ff345941d4ed285a7"}, + {file = "msgpack-1.1.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:350ad5353a467d9e3b126d8d1b90fe05ad081e2e1cef5753f8c345217c37e7b8"}, + {file = "msgpack-1.1.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6bde749afe671dc44893f8d08e83bf475a1a14570d67c4bb5cec5573463c8833"}, + {file = "msgpack-1.1.2-cp39-cp39-win32.whl", hash = "sha256:ad09b984828d6b7bb52d1d1d0c9be68ad781fa004ca39216c8a1e63c0f34ba3c"}, + {file = "msgpack-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:67016ae8c8965124fdede9d3769528ad8284f14d635337ffa6a713a580f6c030"}, + {file = "msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e"}, ] [[package]] From 1419b35a405a4207eeb3a6e3eaaade3bc136603f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 Oct 2025 13:18:25 +0100 Subject: [PATCH 14/72] Bump ijson from 3.4.0 to 3.4.0.post0 (#19051) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 182 +++++++++++++++++++++++++++------------------------- 1 file changed, 96 insertions(+), 86 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2589390c06..780d6528dd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -688,97 +688,107 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 [[package]] name = "ijson" -version = "3.4.0" +version = "3.4.0.post0" description = "Iterative JSON parser with standard Python iterator interfaces" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "ijson-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e27e50f6dcdee648f704abc5d31b976cd2f90b4642ed447cf03296d138433d09"}, - {file = "ijson-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2a753be681ac930740a4af9c93cfb4edc49a167faed48061ea650dc5b0f406f1"}, - {file = "ijson-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a07c47aed534e0ec198e6a2d4360b259d32ac654af59c015afc517ad7973b7fb"}, - {file = "ijson-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c55f48181e11c597cd7146fb31edc8058391201ead69f8f40d2ecbb0b3e4fc6"}, - {file = "ijson-3.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd5669f96f79d8a2dd5ae81cbd06770a4d42c435fd4a75c74ef28d9913b697d"}, - {file = "ijson-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e3ddd46d16b8542c63b1b8af7006c758d4e21cc1b86122c15f8530fae773461"}, - {file = "ijson-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1504cec7fe04be2bb0cc33b50c9dd3f83f98c0540ad4991d4017373b7853cfe6"}, - {file = "ijson-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2f2ff456adeb216603e25d7915f10584c1b958b6eafa60038d76d08fc8a5fb06"}, - {file = "ijson-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0ab00d75d61613a125fbbb524551658b1ad6919a52271ca16563ca5bc2737bb1"}, - {file = "ijson-3.4.0-cp310-cp310-win32.whl", hash = "sha256:ada421fd59fe2bfa4cfa64ba39aeba3f0753696cdcd4d50396a85f38b1d12b01"}, - {file = "ijson-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:8c75e82cec05d00ed3a4af5f4edf08f59d536ed1a86ac7e84044870872d82a33"}, - {file = "ijson-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e369bf5a173ca51846c243002ad8025d32032532523b06510881ecc8723ee54"}, - {file = "ijson-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:26e7da0a3cd2a56a1fde1b34231867693f21c528b683856f6691e95f9f39caec"}, - {file = "ijson-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c28c7f604729be22aa453e604e9617b665fa0c24cd25f9f47a970e8130c571a"}, - {file = "ijson-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bed8bcb84d3468940f97869da323ba09ae3e6b950df11dea9b62e2b231ca1e3"}, - {file = "ijson-3.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:296bc824f4088f2af814aaf973b0435bc887ce3d9f517b1577cc4e7d1afb1cb7"}, - {file = "ijson-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8145f8f40617b6a8aa24e28559d0adc8b889e56a203725226a8a60fa3501073f"}, - {file = "ijson-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b674a97bd503ea21bc85103e06b6493b1b2a12da3372950f53e1c664566a33a4"}, - {file = "ijson-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8bc731cf1c3282b021d3407a601a5a327613da9ad3c4cecb1123232623ae1826"}, - {file = "ijson-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:42ace5e940e0cf58c9de72f688d6829ddd815096d07927ee7e77df2648006365"}, - {file = "ijson-3.4.0-cp311-cp311-win32.whl", hash = "sha256:5be39a0df4cd3f02b304382ea8885391900ac62e95888af47525a287c50005e9"}, - {file = "ijson-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:0b1be1781792291e70d2e177acf564ec672a7907ba74f313583bdf39fe81f9b7"}, - {file = "ijson-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:956b148f88259a80a9027ffbe2d91705fae0c004fbfba3e5a24028fbe72311a9"}, - {file = "ijson-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:06b89960f5c721106394c7fba5760b3f67c515b8eb7d80f612388f5eca2f4621"}, - {file = "ijson-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9a0bb591cf250dd7e9dfab69d634745a7f3272d31cfe879f9156e0a081fd97ee"}, - {file = "ijson-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72e92de999977f4c6b660ffcf2b8d59604ccd531edcbfde05b642baf283e0de8"}, - {file = "ijson-3.4.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e9602157a5b869d44b6896e64f502c712a312fcde044c2e586fccb85d3e316e"}, - {file = "ijson-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e83660edb931a425b7ff662eb49db1f10d30ca6d4d350e5630edbed098bc01"}, - {file = "ijson-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:49bf8eac1c7b7913073865a859c215488461f7591b4fa6a33c14b51cb73659d0"}, - {file = "ijson-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:160b09273cb42019f1811469508b0a057d19f26434d44752bde6f281da6d3f32"}, - {file = "ijson-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2019ff4e6f354aa00c76c8591bd450899111c61f2354ad55cc127e2ce2492c44"}, - {file = "ijson-3.4.0-cp312-cp312-win32.whl", hash = "sha256:931c007bf6bb8330705429989b2deed6838c22b63358a330bf362b6e458ba0bf"}, - {file = "ijson-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:71523f2b64cb856a820223e94d23e88369f193017ecc789bb4de198cc9d349eb"}, - {file = "ijson-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e8d96f88d75196a61c9d9443de2b72c2d4a7ba9456ff117b57ae3bba23a54256"}, - {file = "ijson-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c45906ce2c1d3b62f15645476fc3a6ca279549127f01662a39ca5ed334a00cf9"}, - {file = "ijson-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4ab4bc2119b35c4363ea49f29563612237cae9413d2fbe54b223be098b97bc9e"}, - {file = "ijson-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97b0a9b5a15e61dfb1f14921ea4e0dba39f3a650df6d8f444ddbc2b19b479ff1"}, - {file = "ijson-3.4.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3047bb994dabedf11de11076ed1147a307924b6e5e2df6784fb2599c4ad8c60"}, - {file = "ijson-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68c83161b052e9f5dc8191acbc862bb1e63f8a35344cb5cd0db1afd3afd487a6"}, - {file = "ijson-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1eebd9b6c20eb1dffde0ae1f0fbb4aeacec2eb7b89adb5c7c0449fc9fd742760"}, - {file = "ijson-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:13fb6d5c35192c541421f3ee81239d91fc15a8d8f26c869250f941f4b346a86c"}, - {file = "ijson-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:28b7196ff7b37c4897c547a28fa4876919696739fc91c1f347651c9736877c69"}, - {file = "ijson-3.4.0-cp313-cp313-win32.whl", hash = "sha256:3c2691d2da42629522140f77b99587d6f5010440d58d36616f33bc7bdc830cc3"}, - {file = "ijson-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:c4554718c275a044c47eb3874f78f2c939f300215d9031e785a6711cc51b83fc"}, - {file = "ijson-3.4.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:915a65e3f3c0eee2ea937bc62aaedb6c14cc1e8f0bb9f3f4fb5a9e2bbfa4b480"}, - {file = "ijson-3.4.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:afbe9748707684b6c5adc295c4fdcf27765b300aec4d484e14a13dca4e5c0afa"}, - {file = "ijson-3.4.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d823f8f321b4d8d5fa020d0a84f089fec5d52b7c0762430476d9f8bf95bbc1a9"}, - {file = "ijson-3.4.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8a0a2c54f3becf76881188beefd98b484b1d3bd005769a740d5b433b089fa23"}, - {file = "ijson-3.4.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ced19a83ab09afa16257a0b15bc1aa888dbc555cb754be09d375c7f8d41051f2"}, - {file = "ijson-3.4.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8100f9885eff1f38d35cef80ef759a1bbf5fc946349afa681bd7d0e681b7f1a0"}, - {file = "ijson-3.4.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d7bcc3f7f21b0f703031ecd15209b1284ea51b2a329d66074b5261de3916c1eb"}, - {file = "ijson-3.4.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2dcb190227b09dd171bdcbfe4720fddd574933c66314818dfb3960c8a6246a77"}, - {file = "ijson-3.4.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:eda4cfb1d49c6073a901735aaa62e39cb7ab47f3ad7bb184862562f776f1fa8a"}, - {file = "ijson-3.4.0-cp313-cp313t-win32.whl", hash = "sha256:0772638efa1f3b72b51736833404f1cbd2f5beeb9c1a3d392e7d385b9160cba7"}, - {file = "ijson-3.4.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3d8a0d67f36e4fb97c61a724456ef0791504b16ce6f74917a31c2e92309bbeb9"}, - {file = "ijson-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8a990401dc7350c1739f42187823e68d2ef6964b55040c6e9f3a29461f9929e2"}, - {file = "ijson-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:80f50e0f5da4cd6b65e2d8ff38cb61b26559608a05dd3a3f9cfa6f19848e6f22"}, - {file = "ijson-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2d9ca52f5650d820a2e7aa672dea1c560f609e165337e5b3ed7cf56d696bf309"}, - {file = "ijson-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:940c8c5fd20fb89b56dde9194a4f1c7b779149f1ab26af6d8dc1da51a95d26dd"}, - {file = "ijson-3.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41dbb525666017ad856ac9b4f0f4b87d3e56b7dfde680d5f6d123556b22e2172"}, - {file = "ijson-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9f84f5e2eea5c2d271c97221c382db005534294d1175ddd046a12369617c41c"}, - {file = "ijson-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0cd126c11835839bba8ac0baaba568f67d701fc4f717791cf37b10b74a2ebd7"}, - {file = "ijson-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f9a9d3bbc6d91c24a2524a189d2aca703cb5f7e8eb34ad0aff3c91702404a983"}, - {file = "ijson-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:56679ee133470d0f1f598a8ad109d760fcfebeef4819531e29335aefb7e4cb1a"}, - {file = "ijson-3.4.0-cp39-cp39-win32.whl", hash = "sha256:583c15ded42ba80104fa1d0fa0dfdd89bb47922f3bb893a931bb843aeb55a3f3"}, - {file = "ijson-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:4563e603e56f4451572d96b47311dffef5b933d825f3417881d4d3630c6edac2"}, - {file = "ijson-3.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:54e989c35dba9cf163d532c14bcf0c260897d5f465643f0cd1fba9c908bed7ef"}, - {file = "ijson-3.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:494eeb8e87afef22fbb969a4cb81ac2c535f30406f334fb6136e9117b0bb5380"}, - {file = "ijson-3.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81603de95de1688958af65cd2294881a4790edae7de540b70c65c8253c5dc44a"}, - {file = "ijson-3.4.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8524be12c1773e1be466034cc49c1ecbe3d5b47bb86217bd2a57f73f970a6c19"}, - {file = "ijson-3.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17994696ec895d05e0cfa21b11c68c920c82634b4a3d8b8a1455d6fe9fdee8f7"}, - {file = "ijson-3.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0b67727aaee55d43b2e82b6a866c3cbcb2b66a5e9894212190cbd8773d0d9857"}, - {file = "ijson-3.4.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdc8c5ca0eec789ed99db29c68012dda05027af0860bb360afd28d825238d69d"}, - {file = "ijson-3.4.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8e6b44b6ec45d5b1a0ee9d97e0e65ab7f62258727004cbbe202bf5f198bc21f7"}, - {file = "ijson-3.4.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b51e239e4cb537929796e840d349fc731fdc0d58b1a0683ce5465ad725321e0f"}, - {file = "ijson-3.4.0-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed05d43ec02be8ddb1ab59579761f6656b25d241a77fd74f4f0f7ec09074318a"}, - {file = "ijson-3.4.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfeca1aaa59d93fd0a3718cbe5f7ef0effff85cf837e0bceb71831a47f39cc14"}, - {file = "ijson-3.4.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:7ca72ca12e9a1dd4252c97d952be34282907f263f7e28fcdff3a01b83981e837"}, - {file = "ijson-3.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0f79b2cd52bd220fff83b3ee4ef89b54fd897f57cc8564a6d8ab7ac669de3930"}, - {file = "ijson-3.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d16eed737610ad5ad8989b5864fbe09c64133129734e840c29085bb0d497fb03"}, - {file = "ijson-3.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b3aac1d7a27e1e3bdec5bd0689afe55c34aa499baa06a80852eda31f1ffa6dc"}, - {file = "ijson-3.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:784ae654aa9851851e87f323e9429b20b58a5399f83e6a7e348e080f2892081f"}, - {file = "ijson-3.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d05bd8fa6a8adefb32bbf7b993d2a2f4507db08453dd1a444c281413a6d9685"}, - {file = "ijson-3.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b5a05fd935cc28786b88c16976313086cd96414c6a3eb0a3822c47ab48b1793e"}, - {file = "ijson-3.4.0.tar.gz", hash = "sha256:5f74dcbad9d592c428d3ca3957f7115a42689ee7ee941458860900236ae9bb13"}, + {file = "ijson-3.4.0.post0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8f904a405b58a04b6ef0425f1babbc5c65feb66b0a4cc7f214d4ad7de106f77d"}, + {file = "ijson-3.4.0.post0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a07dcc1a8a1ddd76131a7c7528cbd12951c2e34eb3c3d63697b905069a2d65b1"}, + {file = "ijson-3.4.0.post0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ab3be841b8c430c1883b8c0775eb551f21b5500c102c7ee828afa35ddd701bdd"}, + {file = "ijson-3.4.0.post0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:43059ae0d657b11c5ddb11d149bc400c44f9e514fb8663057e9b2ea4d8d44c1f"}, + {file = "ijson-3.4.0.post0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0d3e82963096579d1385c06b2559570d7191e225664b7fa049617da838e1a4a4"}, + {file = "ijson-3.4.0.post0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:461ce4e87a21a261b60c0a68a2ad17c7dd214f0b90a0bec7e559a66b6ae3bd7e"}, + {file = "ijson-3.4.0.post0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:890cf6610c9554efcb9765a93e368efeb5bb6135f59ce0828d92eaefff07fde5"}, + {file = "ijson-3.4.0.post0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6793c29a5728e7751a7df01be58ba7da9b9690c12bf79d32094c70a908fa02b9"}, + {file = "ijson-3.4.0.post0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a56b6674d7feec0401c91f86c376f4e3d8ff8129128a8ad21ca43ec0b1242f79"}, + {file = "ijson-3.4.0.post0-cp310-cp310-win32.whl", hash = "sha256:01767fcbd75a5fa5a626069787b41f04681216b798510d5f63bcf66884386368"}, + {file = "ijson-3.4.0.post0-cp310-cp310-win_amd64.whl", hash = "sha256:09127c06e5dec753feb9e4b8c5f6a23603d1cd672d098159a17e53a73b898eec"}, + {file = "ijson-3.4.0.post0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b473112e72c0c506da425da3278367b6680f340ecc093084693a1e819d28435"}, + {file = "ijson-3.4.0.post0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:043f9b7cf9cc744263a78175e769947733710d2412d25180df44b1086b23ebd5"}, + {file = "ijson-3.4.0.post0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b55e49045f4c8031f3673f56662fd828dc9e8d65bd3b03a9420dda0d370e64ba"}, + {file = "ijson-3.4.0.post0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:11f13b73194ea2a5a8b4a2863f25b0b4624311f10db3a75747b510c4958179b0"}, + {file = "ijson-3.4.0.post0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:659acb2843433e080c271ecedf7d19c71adde1ee5274fc7faa2fec0a793f9f1c"}, + {file = "ijson-3.4.0.post0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:deda4cfcaafa72ca3fa845350045b1d0fef9364ec9f413241bb46988afbe6ee6"}, + {file = "ijson-3.4.0.post0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47352563e8c594360bacee2e0753e97025f0861234722d02faace62b1b6d2b2a"}, + {file = "ijson-3.4.0.post0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5a48b9486242d1295abe7fd0fbb6308867da5ca3f69b55c77922a93c2b6847aa"}, + {file = "ijson-3.4.0.post0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9c0886234d1fae15cf4581a430bdba03d79251c1ab3b07e30aa31b13ef28d01c"}, + {file = "ijson-3.4.0.post0-cp311-cp311-win32.whl", hash = "sha256:fecae19b5187d92900c73debb3a979b0b3290a53f85df1f8f3c5ba7d1e9fb9cb"}, + {file = "ijson-3.4.0.post0-cp311-cp311-win_amd64.whl", hash = "sha256:b39dbf87071f23a23c8077eea2ae7cfeeca9ff9ffec722dfc8b5f352e4dd729c"}, + {file = "ijson-3.4.0.post0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b607a500fca26101be47d2baf7cddb457b819ab60a75ce51ed1092a40da8b2f9"}, + {file = "ijson-3.4.0.post0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4827d9874a6a81625412c59f7ca979a84d01f7f6bfb3c6d4dc4c46d0382b14e0"}, + {file = "ijson-3.4.0.post0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4d4afec780881edb2a0d2dd40b1cdbe246e630022d5192f266172a0307986a7"}, + {file = "ijson-3.4.0.post0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:432fb60ffb952926f9438e0539011e2dfcd108f8426ee826ccc6173308c3ff2c"}, + {file = "ijson-3.4.0.post0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:54a0e3e05d9a0c95ecba73d9579f146cf6d5c5874116c849dba2d39a5f30380e"}, + {file = "ijson-3.4.0.post0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05807edc0bcbd222dc6ea32a2b897f0c81dc7f12c8580148bc82f6d7f5e7ec7b"}, + {file = "ijson-3.4.0.post0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a5269af16f715855d9864937f9dd5c348ca1ac49cee6a2c7a1b7091c159e874f"}, + {file = "ijson-3.4.0.post0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b200df83c901f5bfa416d069ac71077aa1608f854a4c50df1b84ced560e9c9ec"}, + {file = "ijson-3.4.0.post0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6458bd8e679cdff459a0a5e555b107c3bbacb1f382da3fe0f40e392871eb518d"}, + {file = "ijson-3.4.0.post0-cp312-cp312-win32.whl", hash = "sha256:55f7f656b5986326c978cbb3a9eea9e33f3ef6ecc4535b38f1d452c731da39ab"}, + {file = "ijson-3.4.0.post0-cp312-cp312-win_amd64.whl", hash = "sha256:e15833dcf6f6d188fdc624a31cd0520c3ba21b6855dc304bc7c1a8aeca02d4ac"}, + {file = "ijson-3.4.0.post0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:114ed248166ac06377e87a245a158d6b98019d2bdd3bb93995718e0bd996154f"}, + {file = "ijson-3.4.0.post0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ffb21203736b08fe27cb30df6a4f802fafb9ef7646c5ff7ef79569b63ea76c57"}, + {file = "ijson-3.4.0.post0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:07f20ecd748602ac7f18c617637e53bd73ded7f3b22260bba3abe401a7fc284e"}, + {file = "ijson-3.4.0.post0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:27aa193d47ffc6bc4e45453896ad98fb089a367e8283b973f1fe5c0198b60b4e"}, + {file = "ijson-3.4.0.post0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ccddb2894eb7af162ba43b9475ac5825d15d568832f82eb8783036e5d2aebd42"}, + {file = "ijson-3.4.0.post0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:61ab0b8c5bf707201dc67e02c116f4b6545c4afd7feb2264b989d242d9c4348a"}, + {file = "ijson-3.4.0.post0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:254cfb8c124af68327a0e7a49b50bbdacafd87c4690a3d62c96eb01020a685ef"}, + {file = "ijson-3.4.0.post0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:04ac9ca54db20f82aeda6379b5f4f6112fdb150d09ebce04affeab98a17b4ed3"}, + {file = "ijson-3.4.0.post0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a603d7474bf35e7b3a8e49c8dabfc4751841931301adff3f3318171c4e407f32"}, + {file = "ijson-3.4.0.post0-cp313-cp313-win32.whl", hash = "sha256:ec5bb1520cb212ebead7dba048bb9b70552c3440584f83b01b0abc96862e2a09"}, + {file = "ijson-3.4.0.post0-cp313-cp313-win_amd64.whl", hash = "sha256:3505dff18bdeb8b171eb28af6df34857e2be80dc01e2e3b624e77215ad58897f"}, + {file = "ijson-3.4.0.post0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:45a0b1c833ed2620eaf8da958f06ac8351c59e5e470e078400d23814670ed708"}, + {file = "ijson-3.4.0.post0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7809ec8c8f40228edaaa089f33e811dff4c5b8509702652870d3f286c9682e27"}, + {file = "ijson-3.4.0.post0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cf4a34c2cfe852aee75c89c05b0a4531c49dc0be27eeed221afd6fbf9c3e149c"}, + {file = "ijson-3.4.0.post0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a39d5d36067604b26b78de70b8951c90e9272450642661fe531a8f7a6936a7fa"}, + {file = "ijson-3.4.0.post0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83fc738d81c9ea686b452996110b8a6678296c481e0546857db24785bff8da92"}, + {file = "ijson-3.4.0.post0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b2a81aee91633868f5b40280e2523f7c5392e920a5082f47c5e991e516b483f6"}, + {file = "ijson-3.4.0.post0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:56169e298c5a2e7196aaa55da78ddc2415876a74fe6304f81b1eb0d3273346f7"}, + {file = "ijson-3.4.0.post0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:eeb9540f0b1a575cbb5968166706946458f98c16e7accc6f2fe71efa29864241"}, + {file = "ijson-3.4.0.post0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ba3478ff0bb49d7ba88783f491a99b6e3fa929c930ab062d2bb7837e6a38fe88"}, + {file = "ijson-3.4.0.post0-cp313-cp313t-win32.whl", hash = "sha256:b005ce84e82f28b00bf777a464833465dfe3efa43a0a26c77b5ac40723e1a728"}, + {file = "ijson-3.4.0.post0-cp313-cp313t-win_amd64.whl", hash = "sha256:fe9c84c9b1c8798afa407be1cea1603401d99bfc7c34497e19f4f5e5ddc9b441"}, + {file = "ijson-3.4.0.post0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da6a21b88cbf5ecbc53371283988d22c9643aa71ae2873bbeaefd2dea3b6160b"}, + {file = "ijson-3.4.0.post0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cf24a48a1c3ca9d44a04feb59ccefeb9aa52bb49b9cb70ad30518c25cce74bb7"}, + {file = "ijson-3.4.0.post0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d14427d366f95f21adcb97d0ed1f6d30f6fdc04d0aa1e4de839152c50c2b8d65"}, + {file = "ijson-3.4.0.post0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339d49f6c5d24051c85d9226be96d2d56e633cb8b7d09dd8099de8d8b51a97e2"}, + {file = "ijson-3.4.0.post0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7206afcb396aaef66c2b066997b4e9d9042c4b7d777f4d994e9cec6d322c2fe6"}, + {file = "ijson-3.4.0.post0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c8dd327da225887194fe8b93f2b3c9c256353e14a6b9eefc940ed17fde38f5b8"}, + {file = "ijson-3.4.0.post0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4810546e66128af51fd4a0c9a640e84e8508e9c15c4f247d8a3e3253b20e1465"}, + {file = "ijson-3.4.0.post0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:103a0838061297d063bca81d724b0958b616f372bd893bbc278320152252c652"}, + {file = "ijson-3.4.0.post0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:40007c977e230e04118b27322f25a72ae342a3d61464b2057fcd9b21eeb7427a"}, + {file = "ijson-3.4.0.post0-cp314-cp314-win32.whl", hash = "sha256:f932969fc1fd4449ca141cf5f47ff357656a154a361f28d9ebca0badc5b02297"}, + {file = "ijson-3.4.0.post0-cp314-cp314-win_amd64.whl", hash = "sha256:3ed19b1e4349240773a8ce4a4bfa450892d4a57949c02c515cd6be5a46b7696a"}, + {file = "ijson-3.4.0.post0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:226447e40ca9340a39ed07d68ea02ee14b52cb4fe649425b256c1f0073531c83"}, + {file = "ijson-3.4.0.post0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c88f0669d45d4b1aa017c9b68d378e7cd15d188dfb6f0209adc78b7f45590a7"}, + {file = "ijson-3.4.0.post0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:56b3089dc28c12492d92cc4896d2be585a89ecae34e25d08c1df88f21815cb50"}, + {file = "ijson-3.4.0.post0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c117321cfa7b749cc1213f9b4c80dc958f0a206df98ec038ae4bcbbdb8463a15"}, + {file = "ijson-3.4.0.post0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8311f48db6a33116db5c81682f08b6e2405501a4b4e460193ae69fec3cd1f87a"}, + {file = "ijson-3.4.0.post0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91c61a3e63e04da648737e6b4abd537df1b46fb8cdf3219b072e790bb3c1a46b"}, + {file = "ijson-3.4.0.post0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1709171023ce82651b2f132575c2e6282e47f64ad67bd3260da476418d0e7895"}, + {file = "ijson-3.4.0.post0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:5f0a72b1e3c0f78551670c12b2fdc1bf05f2796254d9c2055ba319bec2216020"}, + {file = "ijson-3.4.0.post0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b982a3597b0439ce9c8f4cfc929d86c6ed43907908be1e8463a34dc35fe5b258"}, + {file = "ijson-3.4.0.post0-cp314-cp314t-win32.whl", hash = "sha256:4e39bfdc36b0b460ef15a06550a6a385c64c81f7ac205ccff39bd45147918912"}, + {file = "ijson-3.4.0.post0-cp314-cp314t-win_amd64.whl", hash = "sha256:17e45262a5ddef39894013fb1548ee7094e444c8389eb1a97f86708b19bea03e"}, + {file = "ijson-3.4.0.post0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:35eb2760a42fd9461358b4be131287587b49ff504fc37fa3014dca6c27c343f4"}, + {file = "ijson-3.4.0.post0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f82ca7abfb3ef3cf2194c71dad634572bcccd62a5dd466649f78fe73d492c860"}, + {file = "ijson-3.4.0.post0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:97f5ef3d839fc24b0ad47e8b31b4751ae72c5d83606e3ee4c92bb25965c03a4f"}, + {file = "ijson-3.4.0.post0-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a2c873742e9f7e21378516217d81d6fa11d34bae860ed364832c00ab1dbf37ed"}, + {file = "ijson-3.4.0.post0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2f8b9ffa2c2dfe3289da9aec4e5ab52684fa2b2da2c853c7891b360ec46fba07"}, + {file = "ijson-3.4.0.post0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0634b21188c67e5cf471cc1d30d193d19f521d89e2125ab1fb602aa8ae61e050"}, + {file = "ijson-3.4.0.post0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3752dd6f51ef58a71799de745649deff293e959700f1b7f5b1989618da366f24"}, + {file = "ijson-3.4.0.post0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:57db77f4ea3eca09f519f627d9f9c76eb862b30edef5d899f031feeed94f05a1"}, + {file = "ijson-3.4.0.post0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:435270a4b75667305f6df3226e5224e83cd6906022d7fdcc9df05caae725f796"}, + {file = "ijson-3.4.0.post0-cp39-cp39-win32.whl", hash = "sha256:742c211b004ab51ccad2b301525d8a6eb2cf68a5fb82d78836f3a351eec44d4e"}, + {file = "ijson-3.4.0.post0-cp39-cp39-win_amd64.whl", hash = "sha256:35aaa979da875fa92bea5dc5969b1541b4912b165091761785459a43f0c20946"}, + {file = "ijson-3.4.0.post0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:add9242f886eae844a7410b84aee2bbb8bdc83c624f227cb1fdb2d0476a96cb1"}, + {file = "ijson-3.4.0.post0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:69718ed41710dfcaa7564b0af42abc05875d4f7aaa24627c808867ef32634bc7"}, + {file = "ijson-3.4.0.post0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:636b6eca96c6c43c04629c6b37fad0181662eaacf9877c71c698485637f752f9"}, + {file = "ijson-3.4.0.post0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb5e73028f6e63d27b3d286069fe350ed80a4ccc493b022b590fea4bb086710d"}, + {file = "ijson-3.4.0.post0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:461acf4320219459dabe5ed90a45cb86c9ba8cc6d6db9dad0d9427d42f57794c"}, + {file = "ijson-3.4.0.post0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a0fedf09c0f6ffa2a99e7e7fd9c5f3caf74e655c1ee015a0797383e99382ebc3"}, + {file = "ijson-3.4.0.post0.tar.gz", hash = "sha256:9aa02dc70bb245670a6ca7fba737b992aeeb4895360980622f7e568dbf23e41e"}, ] [[package]] From 40893be93c2c7e566134c2df99ad311aeb5a5cdb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 Oct 2025 13:24:16 +0100 Subject: [PATCH 15/72] Bump idna from 3.10 to 3.11 (#19053) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 780d6528dd..175039a69c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -673,14 +673,14 @@ test = ["coverage[toml]", "pretend", "pytest", "pytest-cov"] [[package]] name = "idna" -version = "3.10" +version = "3.11" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" groups = ["main", "dev"] files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, ] [package.extras] From 9d81bb703c2d07d006fb7fdf975f585e72dca642 Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Fri, 24 Oct 2025 14:21:49 -0400 Subject: [PATCH 16/72] Always treat `RETURNING` as supported by SQL engines (#19047) Can do this now that SQLite 3.35.0 added support for `RETURNING`. > The RETURNING syntax has been supported by SQLite since version 3.35.0 (2021-03-12). > > *-- https://sqlite.org/lang_returning.html* This also bumps the minimum supported SQLite version according to Synapse's [deprecation policy](https://element-hq.github.io/synapse/latest/deprecation_policy.html#platform-dependencies). Fix https://github.com/element-hq/synapse/issues/17577 --- .ci/scripts/calculate_jobs.py | 14 ++-- .github/workflows/latest_deps.yml | 4 +- .github/workflows/twisted_trunk.yml | 4 +- changelog.d/19047.doc | 1 + changelog.d/19047.misc | 1 + changelog.d/19047.removal | 1 + docs/deprecation_policy.md | 2 +- docs/development/contributing_guide.md | 2 +- synapse/storage/database.py | 39 +++-------- .../storage/databases/main/delayed_events.py | 35 ++++------ .../databases/main/event_federation.py | 68 +++++-------------- .../storage/databases/main/registration.py | 32 ++------- .../storage/databases/main/user_directory.py | 43 ++++-------- synapse/storage/engines/_base.py | 6 -- synapse/storage/engines/postgres.py | 5 -- synapse/storage/engines/sqlite.py | 9 +-- 16 files changed, 80 insertions(+), 186 deletions(-) create mode 100644 changelog.d/19047.doc create mode 100644 changelog.d/19047.misc create mode 100644 changelog.d/19047.removal diff --git a/.ci/scripts/calculate_jobs.py b/.ci/scripts/calculate_jobs.py index 5249acdc5d..f3b1bb1503 100755 --- a/.ci/scripts/calculate_jobs.py +++ b/.ci/scripts/calculate_jobs.py @@ -99,24 +99,24 @@ def set_output(key: str, value: str): # First calculate the various sytest jobs. # -# For each type of test we only run on bullseye on PRs +# For each type of test we only run on bookworm on PRs sytest_tests = [ { - "sytest-tag": "bullseye", + "sytest-tag": "bookworm", }, { - "sytest-tag": "bullseye", + "sytest-tag": "bookworm", "postgres": "postgres", }, { - "sytest-tag": "bullseye", + "sytest-tag": "bookworm", "postgres": "multi-postgres", "workers": "workers", }, { - "sytest-tag": "bullseye", + "sytest-tag": "bookworm", "postgres": "multi-postgres", "workers": "workers", "reactor": "asyncio", @@ -127,11 +127,11 @@ def set_output(key: str, value: str): sytest_tests.extend( [ { - "sytest-tag": "bullseye", + "sytest-tag": "bookworm", "reactor": "asyncio", }, { - "sytest-tag": "bullseye", + "sytest-tag": "bookworm", "postgres": "postgres", "reactor": "asyncio", }, diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index a00c52fcb2..526546531a 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -139,9 +139,9 @@ jobs: fail-fast: false matrix: include: - - sytest-tag: bullseye + - sytest-tag: bookworm - - sytest-tag: bullseye + - sytest-tag: bookworm postgres: postgres workers: workers redis: redis diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index 04a8db2cc7..3f14219bbc 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -108,11 +108,11 @@ jobs: if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest container: - # We're using debian:bullseye because it uses Python 3.9 which is our minimum supported Python version. + # We're using bookworm because that's what Debian oldstable is at the time of writing. # This job is a canary to warn us about unreleased twisted changes that would cause problems for us if # they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest # version, assuming that any incompatibilities on newer versions would also be present on the oldest. - image: matrixdotorg/sytest-synapse:bullseye + image: matrixdotorg/sytest-synapse:bookworm volumes: - ${{ github.workspace }}:/src diff --git a/changelog.d/19047.doc b/changelog.d/19047.doc new file mode 100644 index 0000000000..fee241f2a5 --- /dev/null +++ b/changelog.d/19047.doc @@ -0,0 +1 @@ +Update the link to the Debian oldstable package for SQLite. diff --git a/changelog.d/19047.misc b/changelog.d/19047.misc new file mode 100644 index 0000000000..47f686a158 --- /dev/null +++ b/changelog.d/19047.misc @@ -0,0 +1 @@ +Always treat `RETURNING` as supported by SQL engines, now that the minimum-supported versions of both SQLite and PostgreSQL support it. diff --git a/changelog.d/19047.removal b/changelog.d/19047.removal new file mode 100644 index 0000000000..da7a161868 --- /dev/null +++ b/changelog.d/19047.removal @@ -0,0 +1 @@ +Remove support for SQLite < 3.37.2. diff --git a/docs/deprecation_policy.md b/docs/deprecation_policy.md index 2f3a09723e..06c724d348 100644 --- a/docs/deprecation_policy.md +++ b/docs/deprecation_policy.md @@ -21,7 +21,7 @@ people building from source should ensure they can fetch recent versions of Rust (e.g. by using [rustup](https://rustup.rs/)). The oldest supported version of SQLite is the version -[provided](https://packages.debian.org/bullseye/libsqlite3-0) by +[provided](https://packages.debian.org/oldstable/libsqlite3-0) by [Debian oldstable](https://wiki.debian.org/DebianOldStable). diff --git a/docs/development/contributing_guide.md b/docs/development/contributing_guide.md index eb6f04e301..41fff1d6a3 100644 --- a/docs/development/contributing_guide.md +++ b/docs/development/contributing_guide.md @@ -320,7 +320,7 @@ The following command will let you run the integration test with the most common configuration: ```sh -$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bullseye +$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bookworm ``` (Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.) diff --git a/synapse/storage/database.py b/synapse/storage/database.py index 764ca9f229..b7f870bd26 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -1161,36 +1161,17 @@ def simple_insert_returning_txn( SQLite versions that don't support it). """ - if txn.database_engine.supports_returning: - sql = "INSERT INTO %s (%s) VALUES(%s) RETURNING %s" % ( - table, - ", ".join(k for k in values.keys()), - ", ".join("?" for _ in values.keys()), - ", ".join(k for k in returning), - ) - - txn.execute(sql, list(values.values())) - row = txn.fetchone() - assert row is not None - return row - else: - # For old versions of SQLite we do a standard insert and then can - # use `last_insert_rowid` to get at the row we just inserted - DatabasePool.simple_insert_txn( - txn, - table=table, - values=values, - ) - txn.execute("SELECT last_insert_rowid()") - row = txn.fetchone() - assert row is not None - (rowid,) = row + sql = "INSERT INTO %s (%s) VALUES(%s) RETURNING %s" % ( + table, + ", ".join(k for k in values.keys()), + ", ".join("?" for _ in values.keys()), + ", ".join(k for k in returning), + ) - row = DatabasePool.simple_select_one_txn( - txn, table=table, keyvalues={"rowid": rowid}, retcols=returning - ) - assert row is not None - return row + txn.execute(sql, list(values.values())) + row = txn.fetchone() + assert row is not None + return row async def simple_insert_many( self, diff --git a/synapse/storage/databases/main/delayed_events.py b/synapse/storage/databases/main/delayed_events.py index 33101327f5..6ad161db33 100644 --- a/synapse/storage/databases/main/delayed_events.py +++ b/synapse/storage/databases/main/delayed_events.py @@ -347,33 +347,28 @@ def process_target_delayed_event_txn( EventDetails, Optional[Timestamp], ]: - sql_cols = ", ".join( - ( - "room_id", - "event_type", - "state_key", - "origin_server_ts", - "content", - "device_id", - ) - ) - sql_update = "UPDATE delayed_events SET is_processed = TRUE" - sql_where = "WHERE delay_id = ? AND user_localpart = ? AND NOT is_processed" - sql_args = (delay_id, user_localpart) txn.execute( + """ + UPDATE delayed_events + SET is_processed = TRUE + WHERE delay_id = ? AND user_localpart = ? + AND NOT is_processed + RETURNING + room_id, + event_type, + state_key, + origin_server_ts, + content, + device_id + """, ( - f"{sql_update} {sql_where} RETURNING {sql_cols}" - if self.database_engine.supports_returning - else f"SELECT {sql_cols} FROM delayed_events {sql_where}" + delay_id, + user_localpart, ), - sql_args, ) row = txn.fetchone() if row is None: raise NotFoundError("Delayed event not found") - elif not self.database_engine.supports_returning: - txn.execute(f"{sql_update} {sql_where}", sql_args) - assert txn.rowcount == 1 event = EventDetails( RoomID.from_string(row[0]), diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index d889e8eceb..0a8571f0c8 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -2040,61 +2040,29 @@ async def remove_received_event_from_staging( Returns: The received_ts of the row that was deleted, if any. """ - if self.db_pool.engine.supports_returning: - def _remove_received_event_from_staging_txn( - txn: LoggingTransaction, - ) -> Optional[int]: - sql = """ - DELETE FROM federation_inbound_events_staging - WHERE origin = ? AND event_id = ? - RETURNING received_ts - """ - - txn.execute(sql, (origin, event_id)) - row = cast(Optional[tuple[int]], txn.fetchone()) - - if row is None: - return None - - return row[0] - - return await self.db_pool.runInteraction( - "remove_received_event_from_staging", - _remove_received_event_from_staging_txn, - db_autocommit=True, - ) + def _remove_received_event_from_staging_txn( + txn: LoggingTransaction, + ) -> Optional[int]: + sql = """ + DELETE FROM federation_inbound_events_staging + WHERE origin = ? AND event_id = ? + RETURNING received_ts + """ - else: + txn.execute(sql, (origin, event_id)) + row = cast(Optional[tuple[int]], txn.fetchone()) - def _remove_received_event_from_staging_txn( - txn: LoggingTransaction, - ) -> Optional[int]: - received_ts = self.db_pool.simple_select_one_onecol_txn( - txn, - table="federation_inbound_events_staging", - keyvalues={ - "origin": origin, - "event_id": event_id, - }, - retcol="received_ts", - allow_none=True, - ) - self.db_pool.simple_delete_txn( - txn, - table="federation_inbound_events_staging", - keyvalues={ - "origin": origin, - "event_id": event_id, - }, - ) + if row is None: + return None - return received_ts + return row[0] - return await self.db_pool.runInteraction( - "remove_received_event_from_staging", - _remove_received_event_from_staging_txn, - ) + return await self.db_pool.runInteraction( + "remove_received_event_from_staging", + _remove_received_event_from_staging_txn, + db_autocommit=True, + ) async def get_next_staged_event_id_for_room( self, diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py index 7ce9bf43e6..bad2d0b63a 100644 --- a/synapse/storage/databases/main/registration.py +++ b/synapse/storage/databases/main/registration.py @@ -2544,31 +2544,13 @@ def user_delete_access_tokens_for_devices_txn( ) args.append(user_id) - if self.database_engine.supports_returning: - sql = f""" - DELETE FROM access_tokens - WHERE {clause} AND user_id = ? - RETURNING token, id, device_id - """ - txn.execute(sql, args) - tokens_and_devices = txn.fetchall() - else: - tokens_and_devices = self.db_pool.simple_select_many_txn( - txn, - table="access_tokens", - column="device_id", - iterable=batch_device_ids, - keyvalues={"user_id": user_id}, - retcols=("token", "id", "device_id"), - ) - - self.db_pool.simple_delete_many_txn( - txn, - table="access_tokens", - keyvalues={"user_id": user_id}, - column="device_id", - values=batch_device_ids, - ) + sql = f""" + DELETE FROM access_tokens + WHERE {clause} AND user_id = ? + RETURNING token, id, device_id + """ + txn.execute(sql, args) + tokens_and_devices = txn.fetchall() self._invalidate_cache_and_stream_bulk( txn, diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py index 895d7e6148..7a57beee71 100644 --- a/synapse/storage/databases/main/user_directory.py +++ b/synapse/storage/databases/main/user_directory.py @@ -353,27 +353,19 @@ async def _populate_user_directory_process_users( def _populate_user_directory_process_users_txn( txn: LoggingTransaction, ) -> Optional[int]: - if self.database_engine.supports_returning: - # Note: we use an ORDER BY in the SELECT to force usage of an - # index. Otherwise, postgres does a sequential scan that is - # surprisingly slow (I think due to the fact it will read/skip - # over lots of already deleted rows). - sql = f""" - DELETE FROM {TEMP_TABLE + "_users"} - WHERE user_id IN ( - SELECT user_id FROM {TEMP_TABLE + "_users"} ORDER BY user_id LIMIT ? - ) - RETURNING user_id - """ - txn.execute(sql, (batch_size,)) - user_result = cast(list[tuple[str]], txn.fetchall()) - else: - sql = "SELECT user_id FROM %s ORDER BY user_id LIMIT %s" % ( - TEMP_TABLE + "_users", - str(batch_size), + # Note: we use an ORDER BY in the SELECT to force usage of an + # index. Otherwise, postgres does a sequential scan that is + # surprisingly slow (I think due to the fact it will read/skip + # over lots of already deleted rows). + sql = f""" + DELETE FROM {TEMP_TABLE + "_users"} + WHERE user_id IN ( + SELECT user_id FROM {TEMP_TABLE + "_users"} ORDER BY user_id LIMIT ? ) - txn.execute(sql) - user_result = cast(list[tuple[str]], txn.fetchall()) + RETURNING user_id + """ + txn.execute(sql, (batch_size,)) + user_result = cast(list[tuple[str]], txn.fetchall()) if not user_result: return None @@ -432,17 +424,6 @@ def _populate_user_directory_process_users_txn( # Actually insert the users with their profiles into the directory. self._update_profiles_in_user_dir_txn(txn, profiles_to_insert) - # We've finished processing the users. Delete it from the table, if - # we haven't already. - if not self.database_engine.supports_returning: - self.db_pool.simple_delete_many_txn( - txn, - table=TEMP_TABLE + "_users", - column="user_id", - values=users_to_work_on, - keyvalues={}, - ) - # Update the remaining counter. progress["remaining"] -= len(users_to_work_on) self.db_pool.updates._background_update_progress_txn( diff --git a/synapse/storage/engines/_base.py b/synapse/storage/engines/_base.py index 9fec42c2e0..be6981f77c 100644 --- a/synapse/storage/engines/_base.py +++ b/synapse/storage/engines/_base.py @@ -63,12 +63,6 @@ def supports_using_any_list(self) -> bool: """ ... - @property - @abc.abstractmethod - def supports_returning(self) -> bool: - """Do we support the `RETURNING` clause in insert/update/delete?""" - ... - @abc.abstractmethod def check_database( self, db_conn: ConnectionType, allow_outdated_version: bool = False diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py index 8a1bbfa0f5..b059b924c2 100644 --- a/synapse/storage/engines/postgres.py +++ b/synapse/storage/engines/postgres.py @@ -193,11 +193,6 @@ def supports_using_any_list(self) -> bool: """Do we support using `a = ANY(?)` and passing a list""" return True - @property - def supports_returning(self) -> bool: - """Do we support the `RETURNING` clause in insert/update/delete?""" - return True - def is_deadlock(self, error: Exception) -> bool: if isinstance(error, psycopg2.DatabaseError): # https://www.postgresql.org/docs/current/static/errcodes-appendix.html diff --git a/synapse/storage/engines/sqlite.py b/synapse/storage/engines/sqlite.py index ac3dc25bb5..b49d230eed 100644 --- a/synapse/storage/engines/sqlite.py +++ b/synapse/storage/engines/sqlite.py @@ -68,11 +68,6 @@ def supports_using_any_list(self) -> bool: """Do we support using `a = ANY(?)` and passing a list""" return False - @property - def supports_returning(self) -> bool: - """Do we support the `RETURNING` clause in insert/update/delete?""" - return sqlite3.sqlite_version_info >= (3, 35, 0) - def check_database( self, db_conn: sqlite3.Connection, allow_outdated_version: bool = False ) -> None: @@ -80,8 +75,8 @@ def check_database( # Synapse is untested against older SQLite versions, and we don't want # to let users upgrade to a version of Synapse with broken support for their # sqlite version, because it risks leaving them with a half-upgraded db. - if sqlite3.sqlite_version_info < (3, 27, 0): - raise RuntimeError("Synapse requires sqlite 3.27 or above.") + if sqlite3.sqlite_version_info < (3, 37, 2): + raise RuntimeError("Synapse requires sqlite 3.37.2 or above.") def check_new_database(self, txn: Cursor) -> None: """Gets called when setting up a brand new database. This allows us to From f1695ac20efdbf24543c1ce7b2795264d612b7fb Mon Sep 17 00:00:00 2001 From: Shay Date: Fri, 24 Oct 2025 13:32:16 -0700 Subject: [PATCH 17/72] Add an admin API to get the space hierarchy (#19021) It is often useful when investigating a space to get information about that space and it's children. This PR adds an Admin API to return information about a space and it's children, regardless of room membership. Will not fetch information over federation about remote rooms that the server is not participating in. --- changelog.d/19021.feature | 2 + docs/admin_api/rooms.md | 73 ++++++++ synapse/handlers/room_summary.py | 71 ++++++-- synapse/rest/admin/__init__.py | 2 + synapse/rest/admin/rooms.py | 44 +++++ tests/rest/admin/test_room.py | 304 ++++++++++++++++++++++++++++++- 6 files changed, 475 insertions(+), 21 deletions(-) create mode 100644 changelog.d/19021.feature diff --git a/changelog.d/19021.feature b/changelog.d/19021.feature new file mode 100644 index 0000000000..dea4748769 --- /dev/null +++ b/changelog.d/19021.feature @@ -0,0 +1,2 @@ +Add an [Admin API](https://element-hq.github.io/synapse/latest/usage/administration/admin_api/index.html) +to allow an admin to fetch the space/room hierarchy for a given space. \ No newline at end of file diff --git a/docs/admin_api/rooms.md b/docs/admin_api/rooms.md index 12af87148d..11e787c236 100644 --- a/docs/admin_api/rooms.md +++ b/docs/admin_api/rooms.md @@ -1115,3 +1115,76 @@ Example response: ] } ``` + +# Admin Space Hierarchy Endpoint + +This API allows an admin to fetch the space/room hierarchy for a given space, +returning details about that room and any children the room may have, paginating +over the space tree in a depth-first manner to locate child rooms. This is +functionally similar to the [CS Hierarchy](https://spec.matrix.org/v1.16/client-server-api/#get_matrixclientv1roomsroomidhierarchy) endpoint but does not check for +room membership when returning room summaries. + +The endpoint does not query other servers over federation about remote rooms +that the server has not joined. This is a deliberate trade-off: while this +means it will leave some holes in the hierarchy that we could otherwise +sometimes fill in, it significantly improves the endpoint's response time and +the admin endpoint is designed for managing rooms local to the homeserver +anyway. + +**Parameters** + +The following query parameters are available: + +* `from` - An optional pagination token, provided when there are more rooms to + return than the limit. +* `limit` - Maximum amount of rooms to return. Must be a non-negative integer, + defaults to `50`. +* `max_depth` - The maximum depth in the tree to explore, must be a non-negative + integer. 0 would correspond to just the root room, 1 would include just the + root room's children, etc. If not provided will recurse into the space tree without limit. + +Request: + +```http +GET /_synapse/admin/v1/rooms//hierarchy +``` + +Response: + +```json +{ + "rooms": + [ + { "children_state": [ + { + "content": { + "via": ["local_test_server"] + }, + "origin_server_ts": 1500, + "sender": "@user:test", + "state_key": "!QrMkkqBSwYRIFNFCso:test", + "type": "m.space.child" + } + ], + "name": "space room", + "guest_can_join": false, + "join_rule": "public", + "num_joined_members": 1, + "room_id": "!sPOpNyMHbZAoAOsOFL:test", + "room_type": "m.space", + "world_readable": false + }, + + { + "children_state": [], + "guest_can_join": true, + "join_rule": "invite", + "name": "nefarious", + "num_joined_members": 1, + "room_id": "!QrMkkqBSwYRIFNFCso:test", + "topic": "being bad", + "world_readable": false} + ], + "next_batch": "KUYmRbeSpAoaAIgOKGgyaCEn" +} +``` diff --git a/synapse/handlers/room_summary.py b/synapse/handlers/room_summary.py index a948202056..a3247d3cda 100644 --- a/synapse/handlers/room_summary.py +++ b/synapse/handlers/room_summary.py @@ -116,6 +116,8 @@ def __init__(self, hs: "HomeServer"): str, str, bool, + bool, + bool, Optional[int], Optional[int], Optional[str], @@ -133,6 +135,8 @@ async def get_room_hierarchy( requester: Requester, requested_room_id: str, suggested_only: bool = False, + omit_remote_room_hierarchy: bool = False, + admin_skip_room_visibility_check: bool = False, max_depth: Optional[int] = None, limit: Optional[int] = None, from_token: Optional[str] = None, @@ -146,6 +150,11 @@ async def get_room_hierarchy( requested_room_id: The room ID to start the hierarchy at (the "root" room). suggested_only: Whether we should only return children with the "suggested" flag set. + omit_remote_room_hierarchy: Whether to skip reaching out over + federation to get information on rooms which the server + is not currently joined to + admin_skip_room_visibility_check: Whether to skip checking if the room can + be accessed by the requester, used for the admin endpoints. max_depth: The maximum depth in the tree to explore, must be a non-negative integer. @@ -173,6 +182,8 @@ async def get_room_hierarchy( requester.user.to_string(), requested_room_id, suggested_only, + omit_remote_room_hierarchy, + admin_skip_room_visibility_check, max_depth, limit, from_token, @@ -182,6 +193,8 @@ async def get_room_hierarchy( requester.user.to_string(), requested_room_id, suggested_only, + omit_remote_room_hierarchy, + admin_skip_room_visibility_check, max_depth, limit, from_token, @@ -193,6 +206,8 @@ async def _get_room_hierarchy( requester: str, requested_room_id: str, suggested_only: bool = False, + omit_remote_room_hierarchy: bool = False, + admin_skip_room_visibility_check: bool = False, max_depth: Optional[int] = None, limit: Optional[int] = None, from_token: Optional[str] = None, @@ -204,17 +219,18 @@ async def _get_room_hierarchy( local_room = await self._store.is_host_joined( requested_room_id, self._server_name ) - if local_room and not await self._is_local_room_accessible( - requested_room_id, requester - ): - raise UnstableSpecAuthError( - 403, - "User %s not in room %s, and room previews are disabled" - % (requester, requested_room_id), - errcode=Codes.NOT_JOINED, - ) + if not admin_skip_room_visibility_check: + if local_room and not await self._is_local_room_accessible( + requested_room_id, requester + ): + raise UnstableSpecAuthError( + 403, + "User %s not in room %s, and room previews are disabled" + % (requester, requested_room_id), + errcode=Codes.NOT_JOINED, + ) - if not local_room: + if not local_room and not omit_remote_room_hierarchy: room_hierarchy = await self._summarize_remote_room_hierarchy( _RoomQueueEntry(requested_room_id, remote_room_hosts or ()), False, @@ -223,12 +239,13 @@ async def _get_room_hierarchy( if not root_room_entry or not await self._is_remote_room_accessible( requester, requested_room_id, root_room_entry.room ): - raise UnstableSpecAuthError( - 403, - "User %s not in room %s, and room previews are disabled" - % (requester, requested_room_id), - errcode=Codes.NOT_JOINED, - ) + if not admin_skip_room_visibility_check: + raise UnstableSpecAuthError( + 403, + "User %s not in room %s, and room previews are disabled" + % (requester, requested_room_id), + errcode=Codes.NOT_JOINED, + ) # If this is continuing a previous session, pull the persisted data. if from_token: @@ -240,13 +257,18 @@ async def _get_room_hierarchy( except StoreError: raise SynapseError(400, "Unknown pagination token", Codes.INVALID_PARAM) - # If the requester, room ID, suggested-only, or max depth were modified - # the session is invalid. + # If the requester, room ID, suggested-only, max depth, + # omit_remote_room_hierarchy, or admin_skip_room_visibility_check + # were modified the session is invalid. if ( requester != pagination_session["requester"] or requested_room_id != pagination_session["room_id"] or suggested_only != pagination_session["suggested_only"] or max_depth != pagination_session["max_depth"] + or omit_remote_room_hierarchy + != pagination_session["omit_remote_room_hierarchy"] + or admin_skip_room_visibility_check + != pagination_session["admin_skip_room_visibility_check"] ): raise SynapseError(400, "Unknown pagination token", Codes.INVALID_PARAM) @@ -301,6 +323,7 @@ async def _get_room_hierarchy( None, room_id, suggested_only, + admin_skip_room_visibility_check=admin_skip_room_visibility_check, ) # Otherwise, attempt to use information for federation. @@ -321,7 +344,7 @@ async def _get_room_hierarchy( # If the above isn't true, attempt to fetch the room # information over federation. - else: + elif not omit_remote_room_hierarchy: ( room_entry, children_room_entries, @@ -378,6 +401,8 @@ async def _get_room_hierarchy( "room_id": requested_room_id, "suggested_only": suggested_only, "max_depth": max_depth, + "omit_remote_room_hierarchy": omit_remote_room_hierarchy, + "admin_skip_room_visibility_check": admin_skip_room_visibility_check, # The stored state. "room_queue": [ attr.astuple(room_entry) for room_entry in room_queue @@ -460,6 +485,7 @@ async def _summarize_local_room( room_id: str, suggested_only: bool, include_children: bool = True, + admin_skip_room_visibility_check: bool = False, ) -> Optional["_RoomEntry"]: """ Generate a room entry and a list of event entries for a given room. @@ -476,11 +502,16 @@ async def _summarize_local_room( Otherwise, all children are returned. include_children: Whether to include the events of any children. + admin_skip_room_visibility_check: Whether to skip checking if the room + can be accessed by the requester, used for the admin endpoints. Returns: A room entry if the room should be returned. None, otherwise. """ - if not await self._is_local_room_accessible(room_id, requester, origin): + if ( + not admin_skip_room_visibility_check + and not await self._is_local_room_accessible(room_id, requester, origin) + ): return None room_entry = await self._build_room_entry(room_id, for_federation=bool(origin)) diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 5e75dc4c00..bcaba85da3 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -74,6 +74,7 @@ RegistrationTokenRestServlet, ) from synapse.rest.admin.rooms import ( + AdminRoomHierarchy, BlockRoomRestServlet, DeleteRoomStatusByDeleteIdRestServlet, DeleteRoomStatusByRoomIdRestServlet, @@ -342,6 +343,7 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: ExperimentalFeaturesRestServlet(hs).register(http_server) SuspendAccountRestServlet(hs).register(http_server) ScheduledTasksRestServlet(hs).register(http_server) + AdminRoomHierarchy(hs).register(http_server) EventRestServlet(hs).register(http_server) diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py index 216af29f9b..e1bfca3c03 100644 --- a/synapse/rest/admin/rooms.py +++ b/synapse/rest/admin/rooms.py @@ -63,6 +63,50 @@ logger = logging.getLogger(__name__) +class AdminRoomHierarchy(RestServlet): + """ + Given a room, returns room details on that room and any space children of + the provided room. Does not reach out over federation to fetch information about + any remote rooms which the server is not currently participating in + """ + + PATTERNS = admin_patterns("/rooms/(?P[^/]*)/hierarchy$") + + def __init__(self, hs: "HomeServer"): + self._auth = hs.get_auth() + self._room_summary_handler = hs.get_room_summary_handler() + self._store = hs.get_datastores().main + self._storage_controllers = hs.get_storage_controllers() + + async def on_GET( + self, request: SynapseRequest, room_id: str + ) -> tuple[int, JsonDict]: + requester = await self._auth.get_user_by_req(request) + await assert_user_is_admin(self._auth, requester) + + max_depth = parse_integer(request, "max_depth") + limit = parse_integer(request, "limit") + + room_entry_summary = await self._room_summary_handler.get_room_hierarchy( + requester, + room_id, + # We omit details about remote rooms because we only care + # about managing rooms local to the homeserver. This + # also immensely helps with the response time of the + # endpoint since we don't need to reach out over federation. + # There is a trade-off as this will leave holes where + # information about public/peekable remote rooms the + # server is not participating in will be omitted. + omit_remote_room_hierarchy=True, + admin_skip_room_visibility_check=True, + max_depth=max_depth, + limit=limit, + from_token=parse_string(request, "from"), + ) + + return HTTPStatus.OK, room_entry_summary + + class RoomRestV2Servlet(RestServlet): """Delete a room from server asynchronously with a background task. diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py index 6bd21630db..5b95262365 100644 --- a/tests/rest/admin/test_room.py +++ b/tests/rest/admin/test_room.py @@ -31,7 +31,7 @@ from twisted.internet.testing import MemoryReactor import synapse.rest.admin -from synapse.api.constants import EventTypes, Membership, RoomTypes +from synapse.api.constants import EventContentFields, EventTypes, Membership, RoomTypes from synapse.api.errors import Codes from synapse.api.room_versions import RoomVersions from synapse.handlers.pagination import ( @@ -56,6 +56,308 @@ ONE_HOUR_IN_S = 3600 +class AdminHierarchyTestCase(unittest.HomeserverTestCase): + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + room.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + # create some users + self.admin_user = self.register_user("admin", "pass", admin=True) + self.admin_user_tok = self.login("admin", "pass") + + self.other_user = self.register_user("user", "pass") + self.other_user_tok = self.login("user", "pass") + + self.third_user = self.register_user("third_user", "pass") + self.third_user_tok = self.login("third_user", "pass") + + # mock out the function which pulls room information in over federation. + self._room_summary_handler = hs.get_room_summary_handler() + self._room_summary_handler._summarize_remote_room_hierarchy = Mock() # type: ignore[method-assign] + + # create some rooms with different options + self.room_id1 = self.helper.create_room_as( + self.other_user, + is_public=False, + tok=self.other_user_tok, + extra_content={"name": "nefarious", "topic": "being bad"}, + ) + + self.room_id2 = self.helper.create_room_as( + self.third_user, + tok=self.third_user_tok, + extra_content={"name": "also nefarious"}, + ) + + self.room_id3 = self.helper.create_room_as( + self.admin_user, + is_public=False, + tok=self.admin_user_tok, + extra_content={ + "name": "not nefarious", + "topic": "happy things", + "creation_content": { + "additional_creators": [self.other_user, self.third_user] + }, + }, + room_version="12", + ) + + self.not_in_space_room_id = self.helper.create_room_as( + self.other_user, + tok=self.other_user_tok, + extra_content={"name": "not related to other rooms"}, + ) + + # create a space room + self.space_room_id = self.helper.create_room_as( + self.other_user, + is_public=True, + extra_content={ + "visibility": "public", + "creation_content": {EventContentFields.ROOM_TYPE: RoomTypes.SPACE}, + "name": "space_room", + }, + tok=self.other_user_tok, + ) + + # and an unjoined remote room + self.remote_room_id = "!remote_room" + + self.room_id_to_human_name_map = { + self.room_id1: "room1", + self.room_id2: "room2", + self.room_id3: "room3", + self.not_in_space_room_id: "room4", + self.space_room_id: "space_room", + self.remote_room_id: "remote_room", + } + + # add three of the rooms to space + for state_key in [self.room_id1, self.room_id2, self.room_id3]: + self.helper.send_state( + self.space_room_id, + EventTypes.SpaceChild, + body={"via": ["local_test_server"]}, + tok=self.other_user_tok, + state_key=state_key, + ) + + # and add remote room to space - ideally we'd add an actual remote + # space with rooms in it but the test framework doesn't currently + # support that. Instead we add a room which the server would have to + # reach out over federation to get details about and assert that the + # federation call was not made + self.helper.send_state( + self.space_room_id, + EventTypes.SpaceChild, + body={"via": ["remote_test_server"]}, + tok=self.other_user_tok, + state_key=self.remote_room_id, + ) + + def test_no_auth(self) -> None: + """ + If the requester does not provide authentication, a 401 is returned + """ + + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/rooms/{self.space_room_id}/hierarchy", + ) + + self.assertEqual(401, channel.code, msg=channel.json_body) + self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"]) + + def test_requester_is_no_admin(self) -> None: + """ + If the requester is not a server admin, an error 403 is returned. + """ + + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/rooms/{self.space_room_id}/hierarchy", + access_token=self.other_user_tok, + ) + + self.assertEqual(403, channel.code, msg=channel.json_body) + self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) + + def test_bad_request(self) -> None: + """ + Test that invalid param values raise an error + """ + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/rooms/{self.space_room_id}/hierarchy?limit=ten", + access_token=self.admin_user_tok, + ) + self.assertEqual(400, channel.code, msg=channel.json_body) + self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"]) + + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/rooms/{self.space_room_id}/hierarchy?max_depth=four", + access_token=self.admin_user_tok, + ) + self.assertEqual(400, channel.code, msg=channel.json_body) + self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"]) + + def test_room_summary(self) -> None: + """ + Test that details of room and details of children of room are + provided correctly + """ + + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/rooms/{self.space_room_id}/hierarchy", + access_token=self.admin_user_tok, + ) + self.assertEqual(channel.code, 200, msg=channel.json_body) + rooms = channel.json_body["rooms"] + self.assertCountEqual( + { + self.room_id_to_human_name_map.get( + room["room_id"], f"Unknown room: {room['room_id']}" + ) + for room in rooms + }, + {"space_room", "room1", "room2", "room3"}, + ) + + for room_result in rooms: + room_id = room_result["room_id"] + if room_id == self.room_id1: + self.assertEqual(room_result["name"], "nefarious") + self.assertEqual(room_result["topic"], "being bad") + self.assertEqual(room_result["join_rule"], "invite") + self.assertEqual(len(room_result["children_state"]), 0) + self.assertEqual(room_result["world_readable"], False) + self.assertEqual(room_result["guest_can_join"], True) + self.assertEqual(room_result["num_joined_members"], 1) + elif room_id == self.room_id2: + self.assertEqual(room_result["name"], "also nefarious") + self.assertEqual(room_result["join_rule"], "public") + self.assertEqual(len(room_result["children_state"]), 0) + self.assertEqual(room_result["world_readable"], False) + self.assertEqual(room_result["guest_can_join"], False) + self.assertEqual(room_result["num_joined_members"], 1) + elif room_id == self.room_id3: + self.assertEqual(room_result["name"], "not nefarious") + self.assertEqual(room_result["join_rule"], "invite") + self.assertEqual(room_result["topic"], "happy things") + self.assertEqual(len(room_result["children_state"]), 0) + self.assertEqual(room_result["world_readable"], False) + self.assertEqual(room_result["guest_can_join"], True) + self.assertEqual(room_result["num_joined_members"], 1) + elif room_id == self.not_in_space_room_id: + self.fail("this room should not have been returned") + elif room_id == self.space_room_id: + self.assertEqual(room_result["join_rule"], "public") + self.assertEqual(len(room_result["children_state"]), 4) + self.assertEqual(room_result["room_type"], "m.space") + self.assertEqual(room_result["world_readable"], False) + self.assertEqual(room_result["guest_can_join"], False) + self.assertEqual(room_result["num_joined_members"], 1) + self.assertEqual(room_result["name"], "space_room") + else: + self.fail("unknown room returned") + + # Assert that a federation function to look up details about + # this room has not been called. We never expect the admin + # hierarchy endpoint to reach out over federation. + self._room_summary_handler._summarize_remote_room_hierarchy.assert_not_called() # type: ignore[attr-defined] + + def test_room_summary_pagination(self) -> None: + """ + Test that details of room and details of children of room are provided + correctly when paginating + """ + + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/rooms/{self.space_room_id}/hierarchy?limit=2", + access_token=self.admin_user_tok, + ) + self.assertEqual(channel.code, 200, msg=channel.json_body) + rooms = channel.json_body["rooms"] + self.assertCountEqual( + { + self.room_id_to_human_name_map.get( + room["room_id"], f"Unknown room: {room['room_id']}" + ) + for room in rooms + }, + {"space_room", "room1"}, + ) + next_batch = channel.json_body["next_batch"] + + channel2 = self.make_request( + "GET", + f"/_synapse/admin/v1/rooms/{self.space_room_id}/hierarchy?from={next_batch}", + access_token=self.admin_user_tok, + ) + self.assertEqual(channel2.code, 200, msg=channel2.json_body) + new_rooms = channel2.json_body["rooms"] + self.assertCountEqual( + { + self.room_id_to_human_name_map.get( + room["room_id"], f"Unknown room: {room['room_id']}" + ) + for room in new_rooms + }, + {"room2", "room3"}, + ) + + rooms_to_check = rooms + new_rooms + for room_result in rooms_to_check: + room_id = room_result["room_id"] + if room_id == self.room_id1: + self.assertEqual(room_result["name"], "nefarious") + self.assertEqual(room_result["topic"], "being bad") + self.assertEqual(room_result["join_rule"], "invite") + self.assertEqual(len(room_result["children_state"]), 0) + self.assertEqual(room_result["world_readable"], False) + self.assertEqual(room_result["guest_can_join"], True) + self.assertEqual(room_result["num_joined_members"], 1) + elif room_id == self.room_id2: + self.assertEqual(room_result["name"], "also nefarious") + self.assertEqual(room_result["join_rule"], "public") + self.assertEqual(len(room_result["children_state"]), 0) + self.assertEqual(room_result["world_readable"], False) + self.assertEqual(room_result["guest_can_join"], False) + self.assertEqual(room_result["num_joined_members"], 1) + elif room_id == self.room_id3: + self.assertEqual(room_result["name"], "not nefarious") + self.assertEqual(room_result["join_rule"], "invite") + self.assertEqual(room_result["topic"], "happy things") + self.assertEqual(len(room_result["children_state"]), 0) + self.assertEqual(room_result["world_readable"], False) + self.assertEqual(room_result["guest_can_join"], True) + self.assertEqual(room_result["num_joined_members"], 1) + elif room_id == self.not_in_space_room_id: + self.fail("this room should not have been returned") + elif room_id == self.space_room_id: + self.assertEqual(room_result["join_rule"], "public") + self.assertEqual(len(room_result["children_state"]), 4) + self.assertEqual(room_result["room_type"], "m.space") + self.assertEqual(room_result["world_readable"], False) + self.assertEqual(room_result["guest_can_join"], False) + self.assertEqual(room_result["num_joined_members"], 1) + self.assertEqual(room_result["name"], "space_room") + else: + self.fail("unknown room returned") + + # Assert that a federation function to look up details about + # this room has not been called. We never expect the admin + # hierarchy endpoint to reach out over federation. + self._room_summary_handler._summarize_remote_room_hierarchy.assert_not_called() # type: ignore[attr-defined] + + class DeleteRoomTestCase(unittest.HomeserverTestCase): servlets = [ synapse.rest.admin.register_servlets, From f6ef9c129a64b9679f70b9f400041bd03ed98b74 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Oct 2025 16:58:05 +0000 Subject: [PATCH 18/72] Bump stefanzweifel/git-auto-commit-action from 6.0.1 to 7.0.0 (#19052) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/fix_lint.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fix_lint.yaml b/.github/workflows/fix_lint.yaml index 87f46078be..c33481a51e 100644 --- a/.github/workflows/fix_lint.yaml +++ b/.github/workflows/fix_lint.yaml @@ -47,6 +47,6 @@ jobs: - run: cargo fmt continue-on-error: true - - uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 # v6.0.1 + - uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0 with: commit_message: "Attempt to fix linting" From 634f7cf18b3bf359b5112355073d941b68f08ecc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Oct 2025 16:58:33 +0000 Subject: [PATCH 19/72] Bump types-psycopg2 from 2.9.21.20250915 to 2.9.21.20251012 (#19054) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 175039a69c..3ca3facd3d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3021,14 +3021,14 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.20250915" +version = "2.9.21.20251012" description = "Typing stubs for psycopg2" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_psycopg2-2.9.21.20250915-py3-none-any.whl", hash = "sha256:eefe5ccdc693fc086146e84c9ba437bb278efe1ef330b299a0cb71169dc6c55f"}, - {file = "types_psycopg2-2.9.21.20250915.tar.gz", hash = "sha256:bfeb8f54c32490e7b5edc46215ab4163693192bc90407b4a023822de9239f5c8"}, + {file = "types_psycopg2-2.9.21.20251012-py3-none-any.whl", hash = "sha256:712bad5c423fe979e357edbf40a07ca40ef775d74043de72bd4544ca328cc57e"}, + {file = "types_psycopg2-2.9.21.20251012.tar.gz", hash = "sha256:4cdafd38927da0cfde49804f39ab85afd9c6e9c492800e42f1f0c1a1b0312935"}, ] [[package]] From 77c6905805542ae894f58dca762f4bec8365515e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Oct 2025 11:47:23 +0000 Subject: [PATCH 20/72] Bump regex from 1.11.3 to 1.12.2 (#19074) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 321f1c7933..0ac32101c7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1062,9 +1062,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.11.3" +version = "1.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b5288124840bee7b386bc413c487869b360b2b4ec421ea56425128692f2a82c" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" dependencies = [ "aho-corasick", "memchr", @@ -1074,9 +1074,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.11" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "833eb9ce86d40ef33cb1306d8accf7bc8ec2bfea4355cbdebb3df68b40925cad" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" dependencies = [ "aho-corasick", "memchr", From 5cfe873146156551720aaeedd68affe8cfc9a72d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Oct 2025 11:48:44 +0000 Subject: [PATCH 21/72] Bump tokio from 1.47.1 to 1.48.0 (#19076) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 95 +++++++++++------------------------------------------- 1 file changed, 18 insertions(+), 77 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0ac32101c7..a9cdf70ba0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,21 +2,6 @@ # It is not intended for manual editing. version = 3 -[[package]] -name = "addr2line" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" -dependencies = [ - "gimli", -] - -[[package]] -name = "adler2" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" - [[package]] name = "aho-corasick" version = "1.1.3" @@ -50,21 +35,6 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" -[[package]] -name = "backtrace" -version = "0.3.75" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" -dependencies = [ - "addr2line", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", - "windows-targets", -] - [[package]] name = "base64" version = "0.22.1" @@ -341,12 +311,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "gimli" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" - [[package]] name = "h2" version = "0.4.11" @@ -684,17 +648,6 @@ version = "2.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" -[[package]] -name = "io-uring" -version = "0.7.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4" -dependencies = [ - "bitflags", - "cfg-if", - "libc", -] - [[package]] name = "ipnet" version = "2.11.0" @@ -784,15 +737,6 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" -[[package]] -name = "miniz_oxide" -version = "0.8.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" -dependencies = [ - "adler2", -] - [[package]] name = "mio" version = "1.0.4" @@ -804,15 +748,6 @@ dependencies = [ "windows-sys 0.59.0", ] -[[package]] -name = "object" -version = "0.36.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" -dependencies = [ - "memchr", -] - [[package]] name = "once_cell" version = "1.21.3" @@ -1145,12 +1080,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "rustc-demangle" -version = "0.1.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" - [[package]] name = "rustc-hash" version = "2.1.1" @@ -1489,19 +1418,16 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.47.1" +version = "1.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" dependencies = [ - "backtrace", "bytes", - "io-uring", "libc", "mio", "pin-project-lite", - "slab", "socket2 0.6.0", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -1782,6 +1708,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + [[package]] name = "windows-sys" version = "0.52.0" @@ -1800,6 +1732,15 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + [[package]] name = "windows-targets" version = "0.52.6" From 0d20f762cbd6671c4a9684575cd1145e3bc7e98b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Oct 2025 11:49:26 +0000 Subject: [PATCH 22/72] Bump reqwest from 0.12.23 to 0.12.24 (#19077) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a9cdf70ba0..35f62fe4e9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1026,9 +1026,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "reqwest" -version = "0.12.23" +version = "0.12.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb" +checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f" dependencies = [ "base64", "bytes", From cb0ed5ec76bd393173b45f0c8ec4e91fee8d6b3c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Oct 2025 11:53:36 +0000 Subject: [PATCH 23/72] Bump actions/download-artifact from 5.0.0 to 6.0.0 (#19102) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/docker.yml | 2 +- .github/workflows/release-artifacts.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index c88312f050..09832fae5c 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -95,7 +95,7 @@ jobs: - build steps: - name: Download digests - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: path: ${{ runner.temp }}/digests pattern: digests-* diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index da6996742b..fc291ad771 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -191,7 +191,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download all workflow run artifacts - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 - name: Build a tarball for the debs # We need to merge all the debs uploads into one folder, then compress # that. From 66a42d4e54add85f11f63e116eb5702ca6c61778 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Oct 2025 11:54:00 +0000 Subject: [PATCH 24/72] Bump hiredis from 3.2.1 to 3.3.0 (#19103) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 283 ++++++++++++++++++++++++++++------------------------ 1 file changed, 150 insertions(+), 133 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3ca3facd3d..7b1b3b4cbc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -39,7 +39,7 @@ description = "The ultimate Python library in building OAuth and OpenID Connect optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"oidc\" or extra == \"jwt\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\"" files = [ {file = "authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a"}, {file = "authlib-1.6.5.tar.gz", hash = "sha256:6aaf9c79b7cc96c900f0b284061691c5d4e61221640a948fe690b556a6d6d10b"}, @@ -447,7 +447,7 @@ description = "XML bomb protection for Python stdlib modules" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -472,7 +472,7 @@ description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and l optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "elementpath-4.1.5-py3-none-any.whl", hash = "sha256:2ac1a2fb31eb22bbbf817f8cf6752f844513216263f0e3892c8e79782fe4bb55"}, {file = "elementpath-4.1.5.tar.gz", hash = "sha256:c2d6dc524b29ef751ecfc416b0627668119d8812441c555d7471da41d4bacb8d"}, @@ -518,122 +518,118 @@ test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock ; python_version < \"3. [[package]] name = "hiredis" -version = "3.2.1" +version = "3.3.0" description = "Python wrapper for hiredis" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"redis\" or extra == \"all\"" -files = [ - {file = "hiredis-3.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:add17efcbae46c5a6a13b244ff0b4a8fa079602ceb62290095c941b42e9d5dec"}, - {file = "hiredis-3.2.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:5fe955cc4f66c57df1ae8e5caf4de2925d43b5efab4e40859662311d1bcc5f54"}, - {file = "hiredis-3.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f9ad63cd9065820a43fb1efb8ed5ae85bb78f03ef5eb53f6bde47914708f5718"}, - {file = "hiredis-3.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e7f9e5fdba08841d78d4e1450cae03a4dbed2eda8a4084673cafa5615ce24a"}, - {file = "hiredis-3.2.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1dce2508eca5d4e47ef38bc7c0724cb45abcdb0089f95a2ef49baf52882979a8"}, - {file = "hiredis-3.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:186428bf353e4819abae15aa2ad64c3f40499d596ede280fe328abb9e98e72ce"}, - {file = "hiredis-3.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74f2500d90a0494843aba7abcdc3e77f859c502e0892112d708c02e1dcae8f90"}, - {file = "hiredis-3.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32822a94d2fdd1da96c05b22fdeef6d145d8fdbd865ba2f273f45eb949e4a805"}, - {file = "hiredis-3.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ead809fb08dd4fdb5b4b6e2999c834e78c3b0c450a07c3ed88983964432d0c64"}, - {file = "hiredis-3.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b90fada20301c3a257e868dd6a4694febc089b2b6d893fa96a3fc6c1f9ab4340"}, - {file = "hiredis-3.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:6d8bff53f526da3d9db86c8668011e4f7ca2958ee3a46c648edab6fe2cd1e709"}, - {file = "hiredis-3.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:043d929ae262d03e1db0f08616e14504a9119c1ff3de13d66f857d85cd45caff"}, - {file = "hiredis-3.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8d470fef39d02dbe5c541ec345cc4ffd7d2baec7d6e59c92bd9d9545dc221829"}, - {file = "hiredis-3.2.1-cp310-cp310-win32.whl", hash = "sha256:efa4c76c45cc8c42228c7989b279fa974580e053b5e6a4a834098b5324b9eafa"}, - {file = "hiredis-3.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:cbac5ec3a620b095c46ef3a8f1f06da9c86c1cdc411d44a5f538876c39a2b321"}, - {file = "hiredis-3.2.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:e4ae0be44cab5e74e6e4c4a93d04784629a45e781ff483b136cc9e1b9c23975c"}, - {file = "hiredis-3.2.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:24647e84c9f552934eb60b7f3d2116f8b64a7020361da9369e558935ca45914d"}, - {file = "hiredis-3.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6fb3e92d1172da8decc5f836bf8b528c0fc9b6d449f1353e79ceeb9dc1801132"}, - {file = "hiredis-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38ba7a32e51e518b6b3e470142e52ed2674558e04d7d73d86eb19ebcb37d7d40"}, - {file = "hiredis-3.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4fc632be73174891d6bb71480247e57b2fd8f572059f0a1153e4d0339e919779"}, - {file = "hiredis-3.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f03e6839ff21379ad3c195e0700fc9c209e7f344946dea0f8a6d7b5137a2a141"}, - {file = "hiredis-3.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99983873e37c71bb71deb544670ff4f9d6920dab272aaf52365606d87a4d6c73"}, - {file = "hiredis-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffd982c419f48e3a57f592678c72474429465bb4bfc96472ec805f5d836523f0"}, - {file = "hiredis-3.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc993f4aa4abc029347f309e722f122e05a3b8a0c279ae612849b5cc9dc69f2d"}, - {file = "hiredis-3.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dde790d420081f18b5949227649ccb3ed991459df33279419a25fcae7f97cd92"}, - {file = "hiredis-3.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b0c8cae7edbef860afcf3177b705aef43e10b5628f14d5baf0ec69668247d08d"}, - {file = "hiredis-3.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e8a90eaca7e1ce7f175584f07a2cdbbcab13f4863f9f355d7895c4d28805f65b"}, - {file = "hiredis-3.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:476031958fa44e245e803827e0787d49740daa4de708fe514370293ce519893a"}, - {file = "hiredis-3.2.1-cp311-cp311-win32.whl", hash = "sha256:eb3f5df2a9593b4b4b676dce3cea53b9c6969fc372875188589ddf2bafc7f624"}, - {file = "hiredis-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:1402e763d8a9fdfcc103bbf8b2913971c0a3f7b8a73deacbda3dfe5f3a9d1e0b"}, - {file = "hiredis-3.2.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:3742d8b17e73c198cabeab11da35f2e2a81999d406f52c6275234592256bf8e8"}, - {file = "hiredis-3.2.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9c2f3176fb617a79f6cccf22cb7d2715e590acb534af6a82b41f8196ad59375d"}, - {file = "hiredis-3.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a8bd46189c7fa46174e02670dc44dfecb60f5bd4b67ed88cb050d8f1fd842f09"}, - {file = "hiredis-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f86ee4488c8575b58139cdfdddeae17f91e9a893ffee20260822add443592e2f"}, - {file = "hiredis-3.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3717832f4a557b2fe7060b9d4a7900e5de287a15595e398c3f04df69019ca69d"}, - {file = "hiredis-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5cb12c21fb9e2403d28c4e6a38120164973342d34d08120f2d7009b66785644"}, - {file = "hiredis-3.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:080fda1510bbd389af91f919c11a4f2aa4d92f0684afa4709236faa084a42cac"}, - {file = "hiredis-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1252e10a1f3273d1c6bf2021e461652c2e11b05b83e0915d6eb540ec7539afe2"}, - {file = "hiredis-3.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d9e320e99ab7d2a30dc91ff6f745ba38d39b23f43d345cdee9881329d7b511d6"}, - {file = "hiredis-3.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:641668f385f16550fdd6fdc109b0af6988b94ba2acc06770a5e06a16e88f320c"}, - {file = "hiredis-3.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1e1f44208c39d6c345ff451f82f21e9eeda6fe9af4ac65972cc3eeb58d41f7cb"}, - {file = "hiredis-3.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f882a0d6415fffe1ffcb09e6281d0ba8b1ece470e866612bbb24425bf76cf397"}, - {file = "hiredis-3.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b4e78719a0730ebffe335528531d154bc8867a246418f74ecd88adbc4d938c49"}, - {file = "hiredis-3.2.1-cp312-cp312-win32.whl", hash = "sha256:33c4604d9f79a13b84da79950a8255433fca7edaf292bbd3364fd620864ed7b2"}, - {file = "hiredis-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7b9749375bf9d171aab8813694f379f2cff0330d7424000f5e92890ad4932dc9"}, - {file = "hiredis-3.2.1-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:7cabf7f1f06be221e1cbed1f34f00891a7bdfad05b23e4d315007dd42148f3d4"}, - {file = "hiredis-3.2.1-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:db85cb86f8114c314d0ec6d8de25b060a2590b4713135240d568da4f7dea97ac"}, - {file = "hiredis-3.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c9a592a49b7b8497e4e62c3ff40700d0c7f1a42d145b71e3e23c385df573c964"}, - {file = "hiredis-3.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0079ef1e03930b364556b78548e67236ab3def4e07e674f6adfc52944aa972dd"}, - {file = "hiredis-3.2.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d6a290ed45d9c14f4c50b6bda07afb60f270c69b5cb626fd23a4c2fde9e3da1"}, - {file = "hiredis-3.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79dd5fe8c0892769f82949adeb021342ca46871af26e26945eb55d044fcdf0d0"}, - {file = "hiredis-3.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:998a82281a159f4aebbfd4fb45cfe24eb111145206df2951d95bc75327983b58"}, - {file = "hiredis-3.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41fc3cd52368ffe7c8e489fb83af5e99f86008ed7f9d9ba33b35fec54f215c0a"}, - {file = "hiredis-3.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8d10df3575ce09b0fa54b8582f57039dcbdafde5de698923a33f601d2e2a246c"}, - {file = "hiredis-3.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1ab010d04be33735ad8e643a40af0d68a21d70a57b1d0bff9b6a66b28cca9dbf"}, - {file = "hiredis-3.2.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ec3b5f9ea34f70aaba3e061cbe1fa3556fea401d41f5af321b13e326792f3017"}, - {file = "hiredis-3.2.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:158dfb505fff6bffd17f823a56effc0c2a7a8bc4fb659d79a52782f22eefc697"}, - {file = "hiredis-3.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d632cd0ddd7895081be76748e6fb9286f81d2a51c371b516541c6324f2fdac9"}, - {file = "hiredis-3.2.1-cp313-cp313-win32.whl", hash = "sha256:e9726d03e7df068bf755f6d1ecc61f7fc35c6b20363c7b1b96f39a14083df940"}, - {file = "hiredis-3.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:b5b1653ad7263a001f2e907e81a957d6087625f9700fa404f1a2268c0a4f9059"}, - {file = "hiredis-3.2.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:ef27728a8ceaa038ef4b6efc0e4473b7643b5c873c2fff5475e2c8b9c8d2e0d5"}, - {file = "hiredis-3.2.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:1039d8d2e1d2a1528ad9f9e289e8aa8eec9bf4b4759be4d453a2ab406a70a800"}, - {file = "hiredis-3.2.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83a8cd0eb6e535c93aad9c21e3e85bcb7dd26d3ff9b8ab095287be86e8af2f59"}, - {file = "hiredis-3.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6fc1e8f78bcdc7e25651b7d96d19b983b843b575904d96642f97ae157797ae4"}, - {file = "hiredis-3.2.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ddfa9a10fda3bea985a3b371a64553731141aaa0a20cbcc62a0e659f05e6c01"}, - {file = "hiredis-3.2.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e789ee008752b9be82a7bed82e36b62053c7cc06a0179a5a403ba5b2acba5bd8"}, - {file = "hiredis-3.2.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bf271877947a0f3eb9dc331688404a2e4cc246bca61bc5a1e2d62da9a1caad8"}, - {file = "hiredis-3.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9ad404fd0fdbdfe74e55ebb0592ab4169eecfe70ccf0db80eedc1d9943dd6d7"}, - {file = "hiredis-3.2.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:979572c602bdea0c3df255545c8c257f2163dd6c10d1f172268ffa7a6e1287d6"}, - {file = "hiredis-3.2.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f74e3d899be057fb00444ea5f7ae1d7389d393bddf0f3ed698997aa05563483b"}, - {file = "hiredis-3.2.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:a015666d5fdc3ca704f68db9850d0272ddcfb27e9f26a593013383f565ed2ad7"}, - {file = "hiredis-3.2.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:259a3389dfe3390e356c2796b6bc96a778695e9d7d40c82121096a6b8a2dd3c6"}, - {file = "hiredis-3.2.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:39f469891d29f0522712265de76018ab83a64b85ac4b4f67e1f692cbd42a03f9"}, - {file = "hiredis-3.2.1-cp38-cp38-win32.whl", hash = "sha256:73aa0508f26cd6cb4dfdbe189b28fb3162fd171532e526e90a802363b88027f8"}, - {file = "hiredis-3.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:2b910f12d7bcaf5ffc056087fc7b2d23e688f166462c31b73a0799d12891378d"}, - {file = "hiredis-3.2.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:523a241d9f268bc0c7306792f58f9c633185f939a19abc0356c55f078d3901c5"}, - {file = "hiredis-3.2.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:fec453a038c262e18d7de4919220b2916e0b17d1eadd12e7a800f09f78f84f39"}, - {file = "hiredis-3.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e75a49c5927453c316665cfa39f4274081d00ce69b137b393823eb90c66a8371"}, - {file = "hiredis-3.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd974cbe8b3ae8d3e7f60675e6da10383da69f029147c2c93d1a7e44b36d1290"}, - {file = "hiredis-3.2.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12d3b8fff9905e44f357417159d64138a32500dbd0d5cffaddbb2600d3ce33b1"}, - {file = "hiredis-3.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e21985804a40cb91e69e35ae321eb4e3610cd61a2cbc0328ab73a245f608fa1c"}, - {file = "hiredis-3.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e26e2b49a9569f44a2a2d743464ff0786b46fb1124ed33d2a1bd8b1c660c25b"}, - {file = "hiredis-3.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ef1ebf9ee8e0b4a895b86a02a8b7e184b964c43758393532966ecb8a256f37c"}, - {file = "hiredis-3.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c936b690dd31d7af74f707fc9003c500315b4c9ad70fa564aff73d1283b3b37a"}, - {file = "hiredis-3.2.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4909666bcb73270bb806aa00d0eee9e81f7a1aca388aafb4ba7dfcf5d344d23a"}, - {file = "hiredis-3.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d74a2ad25bc91ca9639e4485099852e6263b360b2c3650fdd3cc47762c5db3fa"}, - {file = "hiredis-3.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:e99910088df446ee64d64b160835f592fb4d36189fcc948dd204e903d91fffa3"}, - {file = "hiredis-3.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:54423bd7af93a773edc6f166341cfb0e5f35ef42ca07b93f568f672a6f445e40"}, - {file = "hiredis-3.2.1-cp39-cp39-win32.whl", hash = "sha256:4a5365cb6d7be82d3c6d523b369bc0bc1a64987e88ed6ecfabadda2aa1cf4fa4"}, - {file = "hiredis-3.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a2eb02b6aaf4f1425a408e892c0378ba6cb6b45b1412c30dd258df1322d88c0"}, - {file = "hiredis-3.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:73913d2fa379e722d17ba52f21ce12dd578140941a08efd73e73b6fab1dea4d8"}, - {file = "hiredis-3.2.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:15a3dff3eca31ecbf3d7d6d104cf1b318dc2b013bad3f4bdb2839cb9ea2e1584"}, - {file = "hiredis-3.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c78258032c2f9fc6f39fee7b07882ce26de281e09178266ce535992572132d95"}, - {file = "hiredis-3.2.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:578d6a881e64e46db065256355594e680202c3bacf3270be3140057171d2c23e"}, - {file = "hiredis-3.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b7f34b170093c077c972b8cc0ceb15d8ff88ad0079751a8ae9733e94d77e733"}, - {file = "hiredis-3.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:291a18b228fc90f6720d178de2fac46522082c96330b4cc2d3dd8cb2c1cb2815"}, - {file = "hiredis-3.2.1-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f53d2af5a7cd33a4b4d7ba632dce80c17823df6814ef5a8d328ed44c815a68e7"}, - {file = "hiredis-3.2.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:20bdf6dbdf77eb43b98bc53950f7711983042472199245d4c36448e6b4cb460f"}, - {file = "hiredis-3.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f43e5c50d76da15118c72b757216cf26c643d55bb1b3c86cad1ae49173971780"}, - {file = "hiredis-3.2.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e5bb5fe9834851d56c8543e52dcd2ac5275fb6772ebc97876e18c2e05a3300b"}, - {file = "hiredis-3.2.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e348438b6452e3d14dddb95d071fe8eaf6f264f641cba999c10bf6359cf1d2"}, - {file = "hiredis-3.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e305f6c63a2abcbde6ce28958de2bb4dd0fd34c6ab3bde5a4410befd5df8c6b2"}, - {file = "hiredis-3.2.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:33f24b1152f684b54d6b9d09135d849a6df64b6982675e8cf972f8adfa2de9aa"}, - {file = "hiredis-3.2.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:01dd8ea88bf8363751857ca2eb8f13faad0c7d57a6369663d4d1160f225ab449"}, - {file = "hiredis-3.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b16946533535cbb5cc7d4b6fc009d32d22b0f9ac58e8eb6f144637b64f9a61d"}, - {file = "hiredis-3.2.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9a03886cad1076e9f7e9e411c402826a8eac6f56ba426ee84b88e6515574b7b"}, - {file = "hiredis-3.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a4f6340f1c378bce17c195d46288a796fcf213dd3e2a008c2c942b33ab58993"}, - {file = "hiredis-3.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:9d64ddf29016d34e7e3bc4b3d36ca9ac8a94f9b2c13ac4b9d8a486862d91b95c"}, - {file = "hiredis-3.2.1.tar.gz", hash = "sha256:5a5f64479bf04dd829fe7029fad0ea043eac4023abc6e946668cbbec3493a78d"}, +markers = "extra == \"all\" or extra == \"redis\"" +files = [ + {file = "hiredis-3.3.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:9937d9b69321b393fbace69f55423480f098120bc55a3316e1ca3508c4dbbd6f"}, + {file = "hiredis-3.3.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:50351b77f89ba6a22aff430b993653847f36b71d444509036baa0f2d79d1ebf4"}, + {file = "hiredis-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1d00bce25c813eec45a2f524249f58daf51d38c9d3347f6f643ae53826fc735a"}, + {file = "hiredis-3.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ef840d9f142556ed384180ed8cdf14ff875fcae55c980cbe5cec7adca2ef4d8"}, + {file = "hiredis-3.3.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:88bc79d7e9b94d17ed1bd8b7f2815ed0eada376ed5f48751044e5e4d179aa2f2"}, + {file = "hiredis-3.3.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7165c7363e59b258e1875c51f35c0b2b9901e6c691037b487d8a0ace2c137ed2"}, + {file = "hiredis-3.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8c3be446f0c38fbe6863a7cf4522c9a463df6e64bee87c4402e9f6d7d2e7f869"}, + {file = "hiredis-3.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:96f9a27643279853b91a1fb94a88b559e55fdecec86f1fcd5f2561492be52e47"}, + {file = "hiredis-3.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0a5eebb170de1b415c78ae5ca3aee17cff8b885df93c2055d54320e789d838f4"}, + {file = "hiredis-3.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:200678547ac3966bac3e38df188211fdc13d5f21509c23267e7def411710e112"}, + {file = "hiredis-3.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:dd9d78c5363a858f9dc5e698e5e1e402b83c00226cba294f977a92c53092b549"}, + {file = "hiredis-3.3.0-cp310-cp310-win32.whl", hash = "sha256:a0d31ff178b913137a7a08c7377e93805914755a15c3585e203d0d74496456c0"}, + {file = "hiredis-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:7b41833c8f0d4c7fbfaa867c8ed9a4e4aaa71d7c54e4806ed62da2d5cd27b40d"}, + {file = "hiredis-3.3.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:63ee6c1ae6a2462a2439eb93c38ab0315cd5f4b6d769c6a34903058ba538b5d6"}, + {file = "hiredis-3.3.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:31eda3526e2065268a8f97fbe3d0e9a64ad26f1d89309e953c80885c511ea2ae"}, + {file = "hiredis-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a26bae1b61b7bcafe3d0d0c7d012fb66ab3c95f2121dbea336df67e344e39089"}, + {file = "hiredis-3.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9546079f7fd5c50fbff9c791710049b32eebe7f9b94debec1e8b9f4c048cba2"}, + {file = "hiredis-3.3.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ae327fc13b1157b694d53f92d50920c0051e30b0c245f980a7036e299d039ab4"}, + {file = "hiredis-3.3.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4016e50a8be5740a59c5af5252e5ad16c395021a999ad24c6604f0d9faf4d346"}, + {file = "hiredis-3.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c17b473f273465a3d2168a57a5b43846165105ac217d5652a005e14068589ddc"}, + {file = "hiredis-3.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9ecd9b09b11bd0b8af87d29c3f5da628d2bdc2a6c23d2dd264d2da082bd4bf32"}, + {file = "hiredis-3.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:00fb04eac208cd575d14f246e74a468561081ce235937ab17d77cde73aefc66c"}, + {file = "hiredis-3.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:60814a7d0b718adf3bfe2c32c6878b0e00d6ae290ad8e47f60d7bba3941234a6"}, + {file = "hiredis-3.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fcbd1a15e935aa323b5b2534b38419511b7909b4b8ee548e42b59090a1b37bb1"}, + {file = "hiredis-3.3.0-cp311-cp311-win32.whl", hash = "sha256:73679607c5a19f4bcfc9cf6eb54480bcd26617b68708ac8b1079da9721be5449"}, + {file = "hiredis-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:30a4df3d48f32538de50648d44146231dde5ad7f84f8f08818820f426840ae97"}, + {file = "hiredis-3.3.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5b8e1d6a2277ec5b82af5dce11534d3ed5dffeb131fd9b210bc1940643b39b5f"}, + {file = "hiredis-3.3.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:c4981de4d335f996822419e8a8b3b87367fcef67dc5fb74d3bff4df9f6f17783"}, + {file = "hiredis-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1706480a683e328ae9ba5d704629dee2298e75016aa0207e7067b9c40cecc271"}, + {file = "hiredis-3.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a95cef9989736ac313639f8f545b76b60b797e44e65834aabbb54e4fad8d6c8"}, + {file = "hiredis-3.3.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca2802934557ccc28a954414c245ba7ad904718e9712cb67c05152cf6b9dd0a3"}, + {file = "hiredis-3.3.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fe730716775f61e76d75810a38ee4c349d3af3896450f1525f5a4034cf8f2ed7"}, + {file = "hiredis-3.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:749faa69b1ce1f741f5eaf743435ac261a9262e2d2d66089192477e7708a9abc"}, + {file = "hiredis-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:95c9427f2ac3f1dd016a3da4e1161fa9d82f221346c8f3fdd6f3f77d4e28946c"}, + {file = "hiredis-3.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c863ee44fe7bff25e41f3a5105c936a63938b76299b802d758f40994ab340071"}, + {file = "hiredis-3.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2213c7eb8ad5267434891f3241c7776e3bafd92b5933fc57d53d4456247dc542"}, + {file = "hiredis-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a172bae3e2837d74530cd60b06b141005075db1b814d966755977c69bd882ce8"}, + {file = "hiredis-3.3.0-cp312-cp312-win32.whl", hash = "sha256:cb91363b9fd6d41c80df9795e12fffbaf5c399819e6ae8120f414dedce6de068"}, + {file = "hiredis-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:04ec150e95eea3de9ff8bac754978aa17b8bf30a86d4ab2689862020945396b0"}, + {file = "hiredis-3.3.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:b7048b4ec0d5dddc8ddd03da603de0c4b43ef2540bf6e4c54f47d23e3480a4fa"}, + {file = "hiredis-3.3.0-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:e5f86ce5a779319c15567b79e0be806e8e92c18bb2ea9153e136312fafa4b7d6"}, + {file = "hiredis-3.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fbdb97a942e66016fff034df48a7a184e2b7dc69f14c4acd20772e156f20d04b"}, + {file = "hiredis-3.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0fb4bea72fe45ff13e93ddd1352b43ff0749f9866263b5cca759a4c960c776f"}, + {file = "hiredis-3.3.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:85b9baf98050e8f43c2826ab46aaf775090d608217baf7af7882596aef74e7f9"}, + {file = "hiredis-3.3.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:69079fb0f0ebb61ba63340b9c4bce9388ad016092ca157e5772eb2818209d930"}, + {file = "hiredis-3.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c17f77b79031ea4b0967d30255d2ae6e7df0603ee2426ad3274067f406938236"}, + {file = "hiredis-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45d14f745fc177bc05fc24bdf20e2b515e9a068d3d4cce90a0fb78d04c9c9d9a"}, + {file = "hiredis-3.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ba063fdf1eff6377a0c409609cbe890389aefddfec109c2d20fcc19cfdafe9da"}, + {file = "hiredis-3.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:1799cc66353ad066bfdd410135c951959da9f16bcb757c845aab2f21fc4ef099"}, + {file = "hiredis-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2cbf71a121996ffac82436b6153290815b746afb010cac19b3290a1644381b07"}, + {file = "hiredis-3.3.0-cp313-cp313-win32.whl", hash = "sha256:a7cbbc6026bf03659f0b25e94bbf6e64f6c8c22f7b4bc52fe569d041de274194"}, + {file = "hiredis-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:a8def89dd19d4e2e4482b7412d453dec4a5898954d9a210d7d05f60576cedef6"}, + {file = "hiredis-3.3.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:c135bda87211f7af9e2fd4e046ab433c576cd17b69e639a0f5bb2eed5e0e71a9"}, + {file = "hiredis-3.3.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2f855c678230aed6fc29b962ce1cc67e5858a785ef3a3fd6b15dece0487a2e60"}, + {file = "hiredis-3.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4059c78a930cbb33c391452ccce75b137d6f89e2eebf6273d75dafc5c2143c03"}, + {file = "hiredis-3.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:334a3f1d14c253bb092e187736c3384203bd486b244e726319bbb3f7dffa4a20"}, + {file = "hiredis-3.3.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd137b147235447b3d067ec952c5b9b95ca54b71837e1b38dbb2ec03b89f24fc"}, + {file = "hiredis-3.3.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8f88f4f2aceb73329ece86a1cb0794fdbc8e6d614cb5ca2d1023c9b7eb432db8"}, + {file = "hiredis-3.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:550f4d1538822fc75ebf8cf63adc396b23d4958bdbbad424521f2c0e3dfcb169"}, + {file = "hiredis-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:54b14211fbd5930fc696f6fcd1f1f364c660970d61af065a80e48a1fa5464dd6"}, + {file = "hiredis-3.3.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9e96f63dbc489fc86f69951e9f83dadb9582271f64f6822c47dcffa6fac7e4a"}, + {file = "hiredis-3.3.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:106e99885d46684d62ab3ec1d6b01573cc0e0083ac295b11aaa56870b536c7ec"}, + {file = "hiredis-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:087e2ef3206361281b1a658b5b4263572b6ba99465253e827796964208680459"}, + {file = "hiredis-3.3.0-cp314-cp314-win32.whl", hash = "sha256:80638ebeab1cefda9420e9fedc7920e1ec7b4f0513a6b23d58c9d13c882f8065"}, + {file = "hiredis-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a68aaf9ba024f4e28cf23df9196ff4e897bd7085872f3a30644dca07fa787816"}, + {file = "hiredis-3.3.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:f7f80442a32ce51ee5d89aeb5a84ee56189a0e0e875f1a57bbf8d462555ae48f"}, + {file = "hiredis-3.3.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:a1a67530da714954ed50579f4fe1ab0ddbac9c43643b1721c2cb226a50dde263"}, + {file = "hiredis-3.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:616868352e47ab355559adca30f4f3859f9db895b4e7bc71e2323409a2add751"}, + {file = "hiredis-3.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e799b79f3150083e9702fc37e6243c0bd47a443d6eae3f3077b0b3f510d6a145"}, + {file = "hiredis-3.3.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ef1dfb0d2c92c3701655e2927e6bbe10c499aba632c7ea57b6392516df3864b"}, + {file = "hiredis-3.3.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c290da6bc2a57e854c7da9956cd65013483ede935677e84560da3b848f253596"}, + {file = "hiredis-3.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd8c438d9e1728f0085bf9b3c9484d19ec31f41002311464e75b69550c32ffa8"}, + {file = "hiredis-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1bbc6b8a88bbe331e3ebf6685452cebca6dfe6d38a6d4efc5651d7e363ba28bd"}, + {file = "hiredis-3.3.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:55d8c18fe9a05496c5c04e6eccc695169d89bf358dff964bcad95696958ec05f"}, + {file = "hiredis-3.3.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:4ddc79afa76b805d364e202a754666cb3c4d9c85153cbfed522871ff55827838"}, + {file = "hiredis-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8e8a4b8540581dcd1b2b25827a54cfd538e0afeaa1a0e3ca87ad7126965981cc"}, + {file = "hiredis-3.3.0-cp314-cp314t-win32.whl", hash = "sha256:298593bb08487753b3afe6dc38bac2532e9bac8dcee8d992ef9977d539cc6776"}, + {file = "hiredis-3.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b442b6ab038a6f3b5109874d2514c4edf389d8d8b553f10f12654548808683bc"}, + {file = "hiredis-3.3.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:114c0b9f1b5fad99edae38e747018aead358a4f4e9720cc1876495d78cdb8276"}, + {file = "hiredis-3.3.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:c6d91a5e6904ed7eca21d74b041e03f2ad598dd08a6065b06a776974fe5d003c"}, + {file = "hiredis-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:76374faa075e996c895cbe106ba923852a9f8146f2aa59eba22111c5e5ec6316"}, + {file = "hiredis-3.3.0-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:50a54397bd104c2e2f5b7696bbdab8ba2973d3075e4deb932adb025b8863de91"}, + {file = "hiredis-3.3.0-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:15edee02cc9cc06e07e2bcfae07e283e640cc1aeedd08b4c6934bf1a0113c607"}, + {file = "hiredis-3.3.0-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ff3179a57745d0f8d71fa8bf3ea3944d3f557dcfa4431304497987fecad381dd"}, + {file = "hiredis-3.3.0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bdb7cd9e1e73db78f145a09bb837732790d0912eb963dee5768631faf2ece162"}, + {file = "hiredis-3.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4d3b4e0d4445faf9041c52a98cb5d2b65c4fcaebb2aa02efa7c6517c4917f7e8"}, + {file = "hiredis-3.3.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffea6c407cff532c7599d3ec9e8502c2c865753cebab044f3dfce9afbf71a8df"}, + {file = "hiredis-3.3.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:bcd745a28e1b3216e42680d91e142a42569dfad68a6f40535080c47b0356c796"}, + {file = "hiredis-3.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4c18a97ea55d1a58f5c3adfe236b3e7cccedc6735cbd36ab1c786c52fd823667"}, + {file = "hiredis-3.3.0-cp38-cp38-win32.whl", hash = "sha256:77eacd969e3c6ff50c2b078c27d2a773c652248a5d81af5765a8663478d0bc02"}, + {file = "hiredis-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:161a4a595a53475587aef8dc549d0527962879b0c5d62f7947b44ba7e5084b76"}, + {file = "hiredis-3.3.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:1203697a7ebadc7cf873acc189df9e44fcb377b636e6660471707ac8d5bcba68"}, + {file = "hiredis-3.3.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:9a7ea2344d277317160da4911f885bcf7dfd8381b830d76b442f7775b41544b3"}, + {file = "hiredis-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9bd7c9a089cf4e4f4b5a61f412c76293449bac6b0bf92bb49a3892850bd5c899"}, + {file = "hiredis-3.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:294de11e3995128c784534e327d1f9382b88dc5407356465df7934c710e8392d"}, + {file = "hiredis-3.3.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4a3aab895358368f81f9546a7cd192b6fb427f785cb1a8853cf9db38df01e9ca"}, + {file = "hiredis-3.3.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:eaf8418e33e23d6d7ef0128eff4c06ab3040d40b9bbc8a24d6265d751a472596"}, + {file = "hiredis-3.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41aea51949142bad4e40badb0396392d7f4394791e4097a0951ab75bcc58ff84"}, + {file = "hiredis-3.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1f9a5f84a8bd29ac5b9953b27e8ba5508396afeabf1d165611a1e31fbd90a0e1"}, + {file = "hiredis-3.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a5f9fde56550ebbe962f437a4c982b0856d03aea7fab09e30fa6c0f9be992b40"}, + {file = "hiredis-3.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c567aab02612d91f3e747fc492100ae894515194f85d6fb6bb68958c0e718721"}, + {file = "hiredis-3.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ca97c5e6f9e9b9f0aed61b70fed2d594ce2f7472905077d2d10b307c50a41008"}, + {file = "hiredis-3.3.0-cp39-cp39-win32.whl", hash = "sha256:776dc5769d5eb05e969216de095377ff61c802414a74bd3c24a4ca8526c897ab"}, + {file = "hiredis-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:538a9f5fbb3a8a4ef0c3abd309cccb90cd2ba9976fcc2b44193af9507d005b48"}, + {file = "hiredis-3.3.0.tar.gz", hash = "sha256:105596aad9249634361815c574351f1bd50455dc23b537c2940066c4a9dea685"}, ] [[package]] @@ -870,7 +866,7 @@ description = "Jaeger Python OpenTracing Tracer implementation" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"opentracing\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"opentracing\"" files = [ {file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"}, ] @@ -1008,7 +1004,7 @@ description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\"" files = [ {file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"}, {file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"}, @@ -1024,7 +1020,7 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"url-preview\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"url-preview\"" files = [ {file = "lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e77dd455b9a16bbd2a5036a63ddbd479c19572af81b624e79ef422f929eef388"}, {file = "lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d444858b9f07cefff6455b983aea9a67f7462ba1f6cbe4a21e8bf6791bf2153"}, @@ -1311,7 +1307,7 @@ description = "An LDAP3 auth provider for Synapse" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\"" files = [ {file = "matrix-synapse-ldap3-0.3.0.tar.gz", hash = "sha256:8bb6517173164d4b9cc44f49de411d8cebdb2e705d5dd1ea1f38733c4a009e1d"}, {file = "matrix_synapse_ldap3-0.3.0-py3-none-any.whl", hash = "sha256:8b4d701f8702551e98cc1d8c20dbed532de5613584c08d0df22de376ba99159d"}, @@ -1553,7 +1549,7 @@ description = "OpenTracing API for Python. See documentation at http://opentraci optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"opentracing\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"opentracing\"" files = [ {file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"}, ] @@ -1622,6 +1618,8 @@ groups = ["main"] files = [ {file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"}, {file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"}, {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"}, {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"}, {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"}, @@ -1631,6 +1629,8 @@ files = [ {file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"}, {file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"}, {file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"}, {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"}, {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"}, {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"}, @@ -1640,6 +1640,8 @@ files = [ {file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"}, {file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"}, {file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"}, {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"}, {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"}, {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"}, @@ -1652,6 +1654,8 @@ files = [ {file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"}, {file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"}, {file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"}, {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"}, {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"}, {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"}, @@ -1661,6 +1665,8 @@ files = [ {file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"}, {file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"}, {file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"}, {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"}, {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"}, {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"}, @@ -1670,6 +1676,8 @@ files = [ {file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"}, {file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"}, {file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"}, {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"}, {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"}, {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"}, @@ -1679,6 +1687,8 @@ files = [ {file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"}, {file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"}, {file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"}, {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"}, {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"}, {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"}, @@ -1688,6 +1698,8 @@ files = [ {file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"}, {file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"}, {file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"}, {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"}, {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"}, {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"}, @@ -1697,11 +1709,15 @@ files = [ {file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"}, {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"}, {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"}, {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"}, {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"}, {file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"}, {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"}, {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"}, {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"}, {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"}, {file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"}, @@ -1739,7 +1755,7 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"postgres\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"postgres\"" files = [ {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"}, {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"}, @@ -1747,6 +1763,7 @@ files = [ {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"}, {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"}, {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"}, + {file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"}, {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"}, {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"}, {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"}, @@ -1759,7 +1776,7 @@ description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=mas optional = true python-versions = "*" groups = ["main"] -markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")" +markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")" files = [ {file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"}, ] @@ -1775,7 +1792,7 @@ description = "A Simple library to enable psycopg2 compatability" optional = true python-versions = "*" groups = ["main"] -markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")" +markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")" files = [ {file = "psycopg2cffi-compat-1.1.tar.gz", hash = "sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05"}, ] @@ -2034,7 +2051,7 @@ description = "A development tool to measure, monitor and analyze the memory beh optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"cache-memory\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"cache-memory\"" files = [ {file = "Pympler-1.0.1-py3-none-any.whl", hash = "sha256:d260dda9ae781e1eab6ea15bacb84015849833ba5555f141d2d9b7b7473b307d"}, {file = "Pympler-1.0.1.tar.gz", hash = "sha256:993f1a3599ca3f4fcd7160c7545ad06310c9e12f70174ae7ae8d4e25f6c5d3fa"}, @@ -2094,7 +2111,7 @@ description = "Python implementation of SAML Version 2 Standard" optional = true python-versions = ">=3.9,<4.0" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "pysaml2-7.5.0-py3-none-any.whl", hash = "sha256:bc6627cc344476a83c757f440a73fda1369f13b6fda1b4e16bca63ffbabb5318"}, {file = "pysaml2-7.5.0.tar.gz", hash = "sha256:f36871d4e5ee857c6b85532e942550d2cf90ea4ee943d75eb681044bbc4f54f7"}, @@ -2119,7 +2136,7 @@ description = "Extensions to the standard Python datetime module" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -2147,7 +2164,7 @@ description = "World timezone definitions, modern and historical" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, @@ -2513,7 +2530,7 @@ description = "Python client for Sentry (https://sentry.io)" optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"sentry\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"sentry\"" files = [ {file = "sentry_sdk-2.34.1-py2.py3-none-any.whl", hash = "sha256:b7a072e1cdc5abc48101d5146e1ae680fa81fe886d8d95aaa25a0b450c818d32"}, {file = "sentry_sdk-2.34.1.tar.gz", hash = "sha256:69274eb8c5c38562a544c3e9f68b5be0a43be4b697f5fd385bf98e4fbe672687"}, @@ -2701,7 +2718,7 @@ description = "Tornado IOLoop Backed Concurrent Futures" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"opentracing\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"opentracing\"" files = [ {file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"}, {file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"}, @@ -2717,7 +2734,7 @@ description = "Python bindings for the Apache Thrift RPC system" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"opentracing\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"opentracing\"" files = [ {file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"}, ] @@ -2779,7 +2796,7 @@ description = "Tornado is a Python web framework and asynchronous networking lib optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"opentracing\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"opentracing\"" files = [ {file = "tornado-6.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:f81067dad2e4443b015368b24e802d0083fecada4f0a4572fdb72fc06e54a9a6"}, {file = "tornado-6.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9ac1cbe1db860b3cbb251e795c701c41d343f06a96049d6274e7c77559117e41"}, @@ -2916,7 +2933,7 @@ description = "non-blocking redis client for python" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"redis\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"redis\"" files = [ {file = "txredisapi-1.4.11-py3-none-any.whl", hash = "sha256:ac64d7a9342b58edca13ef267d4fa7637c1aa63f8595e066801c1e8b56b22d0b"}, {file = "txredisapi-1.4.11.tar.gz", hash = "sha256:3eb1af99aefdefb59eb877b1dd08861efad60915e30ad5bf3d5bf6c5cedcdbc6"}, @@ -3162,7 +3179,7 @@ description = "An XML Schema validator and decoder" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "xmlschema-2.4.0-py3-none-any.whl", hash = "sha256:dc87be0caaa61f42649899189aab2fd8e0d567f2cf548433ba7b79278d231a4a"}, {file = "xmlschema-2.4.0.tar.gz", hash = "sha256:d74cd0c10866ac609e1ef94a5a69b018ad16e39077bc6393408b40c6babee793"}, From 1a78fc8a65d215959f611c9e4d0643edcf07b68e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Oct 2025 11:55:12 +0000 Subject: [PATCH 25/72] Bump pyyaml from 6.0.2 to 6.0.3 (#19105) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 128 ++++++++++++++++++++++++++++++---------------------- 1 file changed, 74 insertions(+), 54 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7b1b3b4cbc..dd86fe8159 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2185,65 +2185,85 @@ files = [ [[package]] name = "pyyaml" -version = "6.0.2" +version = "6.0.3" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, + {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"}, + {file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"}, + {file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"}, + {file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}, + {file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}, + {file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}, + {file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}, + {file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}, + {file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}, + {file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"}, + {file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"}, + {file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"}, + {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, ] [[package]] From e23e7ae48fa75bb6700931f4911e2e5d05072867 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Oct 2025 11:56:29 +0000 Subject: [PATCH 26/72] Bump actions/upload-artifact from 4 to 5 (#19106) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/docker.yml | 2 +- .github/workflows/docs-pr.yaml | 2 +- .github/workflows/latest_deps.yml | 2 +- .github/workflows/release-artifacts.yml | 6 +++--- .github/workflows/tests.yml | 4 ++-- .github/workflows/twisted_trunk.yml | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 09832fae5c..987fb76ae1 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -75,7 +75,7 @@ jobs: touch "${{ runner.temp }}/digests/${digest#sha256:}" - name: Upload digest - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: digests-${{ matrix.suffix }} path: ${{ runner.temp }}/digests/* diff --git a/.github/workflows/docs-pr.yaml b/.github/workflows/docs-pr.yaml index a0af38a6c5..6a61dd5fb1 100644 --- a/.github/workflows/docs-pr.yaml +++ b/.github/workflows/docs-pr.yaml @@ -39,7 +39,7 @@ jobs: cp book/welcome_and_overview.html book/index.html - name: Upload Artifact - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: book path: book diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index 526546531a..2076a1c1e1 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -173,7 +173,7 @@ jobs: if: ${{ always() }} run: /sytest/scripts/tap_to_gha.pl /logs/results.tap - name: Upload SyTest logs - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 if: ${{ always() }} with: name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }}) diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index fc291ad771..a19bde0a60 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -101,7 +101,7 @@ jobs: echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT" - name: Upload debs as artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }} path: debs/* @@ -154,7 +154,7 @@ jobs: # for, and so need extra build deps. CIBW_TEST_SKIP: pp3*-* *i686* *musl* - - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: Wheel-${{ matrix.os }} path: ./wheelhouse/*.whl @@ -175,7 +175,7 @@ jobs: - name: Build sdist run: python -m build --sdist - - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: Sdist path: dist/*.tar.gz diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 6b8cb3c585..f75435bedf 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -585,7 +585,7 @@ jobs: if: ${{ always() }} run: /sytest/scripts/tap_to_gha.pl /logs/results.tap - name: Upload SyTest logs - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 if: ${{ always() }} with: name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }}) @@ -683,7 +683,7 @@ jobs: PGPASSWORD: postgres PGDATABASE: postgres - name: "Upload schema differences" - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }} with: name: Schema dumps diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index 3f14219bbc..11b7bfe143 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -147,7 +147,7 @@ jobs: if: ${{ always() }} run: /sytest/scripts/tap_to_gha.pl /logs/results.tap - name: Upload SyTest logs - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 if: ${{ always() }} with: name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }}) From a07dd43ac40cf5c3e0e357227d8d6e48759964e9 Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Tue, 28 Oct 2025 14:11:45 -0400 Subject: [PATCH 27/72] Use Pillow's non-experimental getexif (#19098) It has been available since Pillow 6, and Synapse is now pinned on Pillow >=10.0.1. Found this while looking at Debian-shipped dependencies, and figured this may as well be updated. --- changelog.d/19098.misc | 1 + synapse/media/thumbnailer.py | 11 +---------- 2 files changed, 2 insertions(+), 10 deletions(-) create mode 100644 changelog.d/19098.misc diff --git a/changelog.d/19098.misc b/changelog.d/19098.misc new file mode 100644 index 0000000000..a6933348a3 --- /dev/null +++ b/changelog.d/19098.misc @@ -0,0 +1 @@ +Use Pillow's `Image.getexif` method instead of the experimental `Image._getexif`. diff --git a/synapse/media/thumbnailer.py b/synapse/media/thumbnailer.py index cc2fe7318b..a42d39c319 100644 --- a/synapse/media/thumbnailer.py +++ b/synapse/media/thumbnailer.py @@ -97,16 +97,7 @@ def __init__(self, input_path: str): self.transpose_method = None try: # We don't use ImageOps.exif_transpose since it crashes with big EXIF - # - # Ignore safety: Pillow seems to acknowledge that this method is - # "private, experimental, but generally widely used". Pillow 6 - # includes a public getexif() method (no underscore) that we might - # consider using instead when we can bump that dependency. - # - # At the time of writing, Debian buster (currently oldstable) - # provides version 5.4.1. It's expected to EOL in mid-2022, see - # https://wiki.debian.org/DebianReleases#Production_Releases - image_exif = self.image._getexif() # type: ignore + image_exif = self.image.getexif() if image_exif is not None: image_orientation = image_exif.get(EXIF_ORIENTATION_TAG) assert type(image_orientation) is int # noqa: E721 From dc33ef90d373ed7f8abd85fbd38bdddc8d984e27 Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Tue, 28 Oct 2025 18:25:16 -0400 Subject: [PATCH 28/72] Update docs on downstream Debian package (#19100) --- changelog.d/19100.doc | 1 + docs/setup/installation.md | 8 ++------ 2 files changed, 3 insertions(+), 6 deletions(-) create mode 100644 changelog.d/19100.doc diff --git a/changelog.d/19100.doc b/changelog.d/19100.doc new file mode 100644 index 0000000000..a723f34c4f --- /dev/null +++ b/changelog.d/19100.doc @@ -0,0 +1 @@ +Update the list of Debian releases that the downstream Debian package is maintained for. diff --git a/docs/setup/installation.md b/docs/setup/installation.md index 0840f532b0..68f224d33a 100644 --- a/docs/setup/installation.md +++ b/docs/setup/installation.md @@ -87,17 +87,13 @@ file when you upgrade the Debian package to a later version. Andrej Shadura maintains a [`matrix-synapse`](https://packages.debian.org/sid/matrix-synapse) package in the Debian repositories. -For `bookworm` and `sid`, it can be installed simply with: +For `forky` (14) and `sid` (rolling release), it can be installed simply with: ```sh sudo apt install matrix-synapse ``` -Synapse is also available in `bullseye-backports`. Please -see the [Debian documentation](https://backports.debian.org/Instructions/) -for information on how to use backports. - -`matrix-synapse` is no longer maintained for `buster` and older. +The downstream Debian `matrix-synapse` package is not available for `trixie` (13) and older. Consider using the Matrix.org packages (above). ##### Downstream Ubuntu packages From 7897c8f6af3007b8a537247fe150e390a4202865 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Wed, 29 Oct 2025 12:32:33 +0100 Subject: [PATCH 29/72] Add a docs page with common steps to review the release notes (#19109) --- changelog.d/19109.doc | 1 + docs/SUMMARY.md | 2 ++ .../release_notes_review_checklist.md | 12 ++++++++++++ scripts-dev/release.py | 5 ++++- 4 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 changelog.d/19109.doc create mode 100644 docs/development/internal_documentation/release_notes_review_checklist.md diff --git a/changelog.d/19109.doc b/changelog.d/19109.doc new file mode 100644 index 0000000000..4cce54b486 --- /dev/null +++ b/changelog.d/19109.doc @@ -0,0 +1 @@ +Add [a page](https://element-hq.github.io/synapse/latest/development/internal_documentation/release_notes_review_checklist.html) to the documentation describing the steps the Synapse team takes to review the release notes before publishing them. \ No newline at end of file diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index 64869eca8e..926a6eb848 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -116,6 +116,8 @@ - [The Auth Chain Difference Algorithm](auth_chain_difference_algorithm.md) - [Media Repository](media_repository.md) - [Room and User Statistics](room_and_user_statistics.md) + - [Releasing]() + - [Release Notes Review Checklist](development/internal_documentation/release_notes_review_checklist.md) - [Scripts]() # Other diff --git a/docs/development/internal_documentation/release_notes_review_checklist.md b/docs/development/internal_documentation/release_notes_review_checklist.md new file mode 100644 index 0000000000..c3d4d665be --- /dev/null +++ b/docs/development/internal_documentation/release_notes_review_checklist.md @@ -0,0 +1,12 @@ +# Release notes review checklist + +The Synapse release process includes a step to review the changelog before +publishing it. The following is a list of common points to check for: + +1. Check whether any similar entries that can be merged together (make sure to include all mentioned PRs at the end of the line, i.e. (#1234, #1235, ...)). +2. Link any MSCXXXX lines to the Matrix Spec Change itself: . +3. Wrap any class names, variable names, etc. in back-ticks, if needed. +4. Hoist any relevant security, deprecation, etc. announcements to the top of this version's changelog for visibility. This includes any announcements in RCs for this release. +5. Check the upgrade notes for any important announcements, and link to them from the changelog if warranted. +6. Quickly skim and check that each entry is in the appropriate section. +7. Entries under the Bugfixes section should ideally state what Synapse version the bug was introduced in. For example: "Fixed a bug introduced in v1.x.y" or if no version can be identified, "Fixed a long-standing bug ...". \ No newline at end of file diff --git a/scripts-dev/release.py b/scripts-dev/release.py index 111c184ccb..262c1503c7 100755 --- a/scripts-dev/release.py +++ b/scripts-dev/release.py @@ -316,7 +316,10 @@ def _prepare() -> None: ) print("Opening the changelog in your browser...") - print("Please ask #synapse-dev to give it a check.") + print( + "Please review it using the release notes review checklist: https://element-hq.github.io/synapse/develop/development/internal_documentation/release_notes_review_checklist.html" + ) + print("And post it in #synapse-dev for cursory review from the team.") click.launch( f"https://github.com/element-hq/synapse/blob/{synapse_repo.active_branch.name}/CHANGES.md" ) From 0417296b9f3d3499bd00807a356809aa34a74559 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 29 Oct 2025 10:23:10 -0500 Subject: [PATCH 30/72] Remove logcontext problems caused by awaiting raw `deferLater(...)` (#19058) This is a normal problem where we `await` a deferred without wrapping it in `make_deferred_yieldable(...)`. But I've opted to replace the usage of `deferLater` with something more standard for the Synapse codebase. Part of https://github.com/element-hq/synapse/issues/18905 It's unclear why we're only now seeing these failures happen with the changes from https://github.com/element-hq/synapse/pull/19057 Example failures seen in https://github.com/element-hq/synapse/actions/runs/18477454390/job/52645183606?pr=19057 ``` builtins.AssertionError: Expected `looping_call` callback from the reactor to start with the sentinel logcontext but saw task-_resumable_task-0-IBzAmHUoepQfLnEA. In other words, another task shouldn't have leaked their logcontext to us. ``` --- changelog.d/19058.misc | 1 + synapse/util/task_scheduler.py | 4 ++-- tests/rest/admin/test_room.py | 3 +-- tests/util/test_task_scheduler.py | 9 ++++++--- 4 files changed, 10 insertions(+), 7 deletions(-) create mode 100644 changelog.d/19058.misc diff --git a/changelog.d/19058.misc b/changelog.d/19058.misc new file mode 100644 index 0000000000..15bc4b39bd --- /dev/null +++ b/changelog.d/19058.misc @@ -0,0 +1 @@ +Remove logcontext problems caused by awaiting raw `deferLater(...)`. diff --git a/synapse/util/task_scheduler.py b/synapse/util/task_scheduler.py index f033d37579..22b3bf8c15 100644 --- a/synapse/util/task_scheduler.py +++ b/synapse/util/task_scheduler.py @@ -53,8 +53,8 @@ class TaskScheduler: """ This is a simple task scheduler designed for resumable tasks. Normally, - you'd use `run_in_background` to start a background task or Twisted's - `deferLater` if you want to run it later. + you'd use `run_in_background` to start a background task or `clock.call_later` + if you want to run it later. The issue is that these tasks stop completely and won't resume if Synapse is shut down for any reason. diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py index 5b95262365..40b34f4433 100644 --- a/tests/rest/admin/test_room.py +++ b/tests/rest/admin/test_room.py @@ -27,7 +27,6 @@ from parameterized import parameterized -from twisted.internet.task import deferLater from twisted.internet.testing import MemoryReactor import synapse.rest.admin @@ -1163,7 +1162,7 @@ def test_delete_same_room_twice(self) -> None: # Mock PaginationHandler.purge_room to sleep for 100s, so we have time to do a second call # before the purge is over. Note that it doesn't purge anymore, but we don't care. async def purge_room(room_id: str, force: bool) -> None: - await deferLater(self.hs.get_reactor(), 100, lambda: None) + await self.hs.get_clock().sleep(100) self.pagination_handler.purge_room = AsyncMock(side_effect=purge_room) # type: ignore[method-assign] diff --git a/tests/util/test_task_scheduler.py b/tests/util/test_task_scheduler.py index 43c3ce52ea..de9e381489 100644 --- a/tests/util/test_task_scheduler.py +++ b/tests/util/test_task_scheduler.py @@ -20,9 +20,10 @@ # from typing import Optional -from twisted.internet.task import deferLater +from twisted.internet.defer import Deferred from twisted.internet.testing import MemoryReactor +from synapse.logging.context import make_deferred_yieldable from synapse.server import HomeServer from synapse.types import JsonMapping, ScheduledTask, TaskStatus from synapse.util.clock import Clock @@ -87,7 +88,7 @@ async def _sleeping_task( self, task: ScheduledTask ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: # Sleep for a second - await deferLater(self.reactor, 1, lambda: None) + await self.hs.get_clock().sleep(1) return TaskStatus.COMPLETE, None, None def test_schedule_lot_of_tasks(self) -> None: @@ -170,8 +171,10 @@ async def _resumable_task( return TaskStatus.COMPLETE, {"success": True}, None else: await self.task_scheduler.update_task(task.id, result={"in_progress": True}) + # Create a deferred which we will never complete + incomplete_d: Deferred = Deferred() # Await forever to simulate an aborted task because of a restart - await deferLater(self.reactor, 2**16, lambda: None) + await make_deferred_yieldable(incomplete_d) # This should never been called return TaskStatus.ACTIVE, None, None From 6facf98a3ac83e03cf9880a057e4321c8be6070f Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 29 Oct 2025 10:28:05 -0500 Subject: [PATCH 31/72] Be mindful of other `SIGHUP` handlers in 3rd-party code (#19095) Be mindful that Synapse can be run alongside other code in the same Python process. We shouldn't clobber other `SIGHUP` handlers as only one can be set at time. (no clobber) ### Background As part of Element's plan to support a light form of vhosting (virtual host) (multiple instances of Synapse in the same Python process), we're currently diving into the details and implications of running multiple instances of Synapse in the same Python process. "Per-tenant logging" tracked internally by https://github.com/element-hq/synapse-small-hosts/issues/48 Relevant to logging as we use a `SIGHUP` to reload log config in Synapse. --- changelog.d/19095.misc | 1 + synapse/app/_base.py | 118 ++++++++++++++++++++++++--------------- synapse/config/logger.py | 4 +- 3 files changed, 74 insertions(+), 49 deletions(-) create mode 100644 changelog.d/19095.misc diff --git a/changelog.d/19095.misc b/changelog.d/19095.misc new file mode 100644 index 0000000000..c9949c9cb5 --- /dev/null +++ b/changelog.d/19095.misc @@ -0,0 +1 @@ +Avoid clobbering other `SIGHUP` handlers in 3rd-party code. diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 1954dbc1a0..c0fcf8ca29 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -29,6 +29,7 @@ import warnings from textwrap import indent from threading import Thread +from types import FrameType from typing import ( TYPE_CHECKING, Any, @@ -36,6 +37,7 @@ Callable, NoReturn, Optional, + Union, cast, ) from wsgiref.simple_server import WSGIServer @@ -72,7 +74,6 @@ from synapse.http.site import SynapseSite from synapse.logging.context import LoggingContext, PreserveLoggingContext from synapse.metrics import install_gc_manager, register_threadpool -from synapse.metrics.background_process_metrics import run_as_background_process from synapse.metrics.jemalloc import setup_jemalloc_stats from synapse.module_api.callbacks.spamchecker_callbacks import load_legacy_spam_checkers from synapse.module_api.callbacks.third_party_event_rules_callbacks import ( @@ -108,7 +109,7 @@ def register_sighup( - homeserver_instance_id: str, + hs: "HomeServer", func: Callable[P, None], *args: P.args, **kwargs: P.kwargs, @@ -123,19 +124,25 @@ def register_sighup( *args, **kwargs: args and kwargs to be passed to the target function. """ - _instance_id_to_sighup_callbacks_map.setdefault(homeserver_instance_id, []).append( - (func, args, kwargs) + # Wrap the function so we can run it within a logcontext + def _callback_wrapper(*args: P.args, **kwargs: P.kwargs) -> None: + with LoggingContext(name="sighup", server_name=hs.hostname): + func(*args, **kwargs) + + _instance_id_to_sighup_callbacks_map.setdefault(hs.get_instance_id(), []).append( + (_callback_wrapper, args, kwargs) ) -def unregister_sighups(instance_id: str) -> None: +def unregister_sighups(homeserver_instance_id: str) -> None: """ Unregister all sighup functions associated with this Synapse instance. Args: - instance_id: Unique ID for this Synapse process instance. + homeserver_instance_id: The unique ID for this Synapse process instance to + unregister hooks for (`hs.get_instance_id()`). """ - _instance_id_to_sighup_callbacks_map.pop(instance_id, []) + _instance_id_to_sighup_callbacks_map.pop(homeserver_instance_id, []) def start_worker_reactor( @@ -540,6 +547,61 @@ def refresh_certificate(hs: "HomeServer") -> None: logger.info("Context factories updated.") +_already_setup_sighup_handling = False +""" +Marks whether we've already successfully ran `setup_sighup_handling()`. +""" + + +def setup_sighup_handling() -> None: + """ + Set up SIGHUP handling to call registered callbacks. + + This can be called multiple times safely. + """ + global _already_setup_sighup_handling + # We only need to set things up once per process. + if _already_setup_sighup_handling: + return + + previous_sighup_handler: Union[ + Callable[[int, Optional[FrameType]], Any], int, None + ] = None + + # Set up the SIGHUP machinery. + if hasattr(signal, "SIGHUP"): + + def handle_sighup(*args: Any, **kwargs: Any) -> None: + # Tell systemd our state, if we're using it. This will silently fail if + # we're not using systemd. + sdnotify(b"RELOADING=1") + + if callable(previous_sighup_handler): + previous_sighup_handler(*args, **kwargs) + + for sighup_callbacks in _instance_id_to_sighup_callbacks_map.values(): + for func, args, kwargs in sighup_callbacks: + func(*args, **kwargs) + + sdnotify(b"READY=1") + + # We defer running the sighup handlers until next reactor tick. This + # is so that we're in a sane state, e.g. flushing the logs may fail + # if the sighup happens in the middle of writing a log entry. + def run_sighup(*args: Any, **kwargs: Any) -> None: + # `callFromThread` should be "signal safe" as well as thread + # safe. + reactor.callFromThread(handle_sighup, *args, **kwargs) + + # Register for the SIGHUP signal, chaining any existing handler as there can + # only be one handler per signal and we don't want to clobber any existing + # handlers (like the `multi_synapse` shard process in the context of Synapse Pro + # for small hosts) + previous_sighup_handler = signal.signal(signal.SIGHUP, run_sighup) + + _already_setup_sighup_handling = True + + async def start(hs: "HomeServer", freeze: bool = True) -> None: """ Start a Synapse server or worker. @@ -579,45 +641,9 @@ async def start(hs: "HomeServer", freeze: bool = True) -> None: name="gai_resolver", server_name=server_name, threadpool=resolver_threadpool ) - # Set up the SIGHUP machinery. - if hasattr(signal, "SIGHUP"): - - def handle_sighup(*args: Any, **kwargs: Any) -> "defer.Deferred[None]": - async def _handle_sighup(*args: Any, **kwargs: Any) -> None: - # Tell systemd our state, if we're using it. This will silently fail if - # we're not using systemd. - sdnotify(b"RELOADING=1") - - for sighup_callbacks in _instance_id_to_sighup_callbacks_map.values(): - for func, args, kwargs in sighup_callbacks: - func(*args, **kwargs) - - sdnotify(b"READY=1") - - # It's okay to ignore the linter error here and call - # `run_as_background_process` directly because `_handle_sighup` operates - # outside of the scope of a specific `HomeServer` instance and holds no - # references to it which would prevent a clean shutdown. - return run_as_background_process( # type: ignore[untracked-background-process] - "sighup", - server_name, - _handle_sighup, - *args, - **kwargs, - ) - - # We defer running the sighup handlers until next reactor tick. This - # is so that we're in a sane state, e.g. flushing the logs may fail - # if the sighup happens in the middle of writing a log entry. - def run_sighup(*args: Any, **kwargs: Any) -> None: - # `callFromThread` should be "signal safe" as well as thread - # safe. - reactor.callFromThread(handle_sighup, *args, **kwargs) - - signal.signal(signal.SIGHUP, run_sighup) - - register_sighup(hs.get_instance_id(), refresh_certificate, hs) - register_sighup(hs.get_instance_id(), reload_cache_config, hs.config) + setup_sighup_handling() + register_sighup(hs, refresh_certificate, hs) + register_sighup(hs, reload_cache_config, hs.config) # Apply the cache config. hs.config.caches.resize_all_caches() diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 8e355035a9..945236ed07 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -345,9 +345,7 @@ def setup_logging( # Add a SIGHUP handler to reload the logging configuration, if one is available. from synapse.app import _base as appbase - appbase.register_sighup( - hs.get_instance_id(), _reload_logging_config, log_config_path - ) + appbase.register_sighup(hs, _reload_logging_config, log_config_path) # Log immediately so we can grep backwards. logger.warning("***** STARTING SERVER *****") From 32998d07d2250ef4b82fef3aaeca7c83b87f38d8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 29 Oct 2025 15:39:07 +0000 Subject: [PATCH 32/72] Bump sigstore/cosign-installer from 3.10.0 to 4.0.0 (#19075) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/docker.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 987fb76ae1..8bc045dc64 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -120,7 +120,7 @@ jobs: uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 - name: Install Cosign - uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0 + uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0 - name: Calculate docker image tag uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0 From e0838c25674d1c46a514ebcb8f14fd058a595b2a Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Wed, 29 Oct 2025 13:15:00 -0400 Subject: [PATCH 33/72] Drop Python 3.9, bump tests/builds to Python 3.10 (#19099) Python 3.9 EOL is on 2025-10-31 --- .ci/scripts/calculate_jobs.py | 10 +++---- .github/workflows/release-artifacts.yml | 2 +- .github/workflows/tests.yml | 6 ++--- build_rust.py | 6 ++--- changelog.d/19099.removal | 1 + docker/editable.Dockerfile | 2 +- docs/development/dependencies.md | 8 +++--- docs/setup/installation.md | 11 +++++--- docs/upgrade.md | 12 +++++++++ mypy.ini | 2 +- poetry.lock | 35 +++---------------------- pyproject.toml | 16 ++++++----- rust/Cargo.toml | 2 +- scripts-dev/build_debian_packages.py | 5 ++-- synapse/__init__.py | 4 +-- tox.ini | 2 +- 16 files changed, 59 insertions(+), 65 deletions(-) create mode 100644 changelog.d/19099.removal diff --git a/.ci/scripts/calculate_jobs.py b/.ci/scripts/calculate_jobs.py index f3b1bb1503..2971b3c5c8 100755 --- a/.ci/scripts/calculate_jobs.py +++ b/.ci/scripts/calculate_jobs.py @@ -36,11 +36,11 @@ def set_output(key: str, value: str): # First calculate the various trial jobs. # # For PRs, we only run each type of test with the oldest Python version supported (which -# is Python 3.9 right now) +# is Python 3.10 right now) trial_sqlite_tests = [ { - "python-version": "3.9", + "python-version": "3.10", "database": "sqlite", "extras": "all", } @@ -53,12 +53,12 @@ def set_output(key: str, value: str): "database": "sqlite", "extras": "all", } - for version in ("3.10", "3.11", "3.12", "3.13") + for version in ("3.11", "3.12", "3.13") ) trial_postgres_tests = [ { - "python-version": "3.9", + "python-version": "3.10", "database": "postgres", "postgres-version": "13", "extras": "all", @@ -77,7 +77,7 @@ def set_output(key: str, value: str): trial_no_extra_tests = [ { - "python-version": "3.9", + "python-version": "3.10", "database": "sqlite", "extras": "", } diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index a19bde0a60..f3e0da5aa4 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -145,7 +145,7 @@ jobs: - name: Only build a single wheel on PR if: startsWith(github.ref, 'refs/pull/') - run: echo "CIBW_BUILD="cp39-manylinux_*"" >> $GITHUB_ENV + run: echo "CIBW_BUILD="cp310-manylinux_*"" >> $GITHUB_ENV - name: Build wheels run: python -m cibuildwheel --output-dir wheelhouse diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f75435bedf..93c0e9415f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -470,7 +470,7 @@ jobs: - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: - python-version: '3.9' + python-version: '3.10' - name: Prepare old deps if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true' @@ -514,7 +514,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["pypy-3.9"] + python-version: ["pypy-3.10"] extras: ["all"] steps: @@ -638,7 +638,7 @@ jobs: strategy: matrix: include: - - python-version: "3.9" + - python-version: "3.10" postgres-version: "13" - python-version: "3.13" diff --git a/build_rust.py b/build_rust.py index af7bd2fdc5..a9e9265daf 100644 --- a/build_rust.py +++ b/build_rust.py @@ -27,12 +27,12 @@ def build(setup_kwargs: dict[str, Any]) -> None: setup_kwargs["zip_safe"] = False # We look up the minimum supported Python version with - # `python_requires` (e.g. ">=3.9.0,<4.0.0") and finding the first Python + # `python_requires` (e.g. ">=3.10.0,<4.0.0") and finding the first Python # version that matches. We then convert that into the `py_limited_api` form, - # e.g. cp39 for Python 3.9. + # e.g. cp310 for Python 3.10. py_limited_api: str python_bounds = SpecifierSet(setup_kwargs["python_requires"]) - for minor_version in itertools.count(start=8): + for minor_version in itertools.count(start=10): if f"3.{minor_version}.0" in python_bounds: py_limited_api = f"cp3{minor_version}" break diff --git a/changelog.d/19099.removal b/changelog.d/19099.removal new file mode 100644 index 0000000000..8279a1c7f9 --- /dev/null +++ b/changelog.d/19099.removal @@ -0,0 +1 @@ +Drop support for Python 3.9. diff --git a/docker/editable.Dockerfile b/docker/editable.Dockerfile index 7e5da4e4f4..b2aff9cb53 100644 --- a/docker/editable.Dockerfile +++ b/docker/editable.Dockerfile @@ -3,7 +3,7 @@ # # Used by `complement.sh`. Not suitable for production use. -ARG PYTHON_VERSION=3.9 +ARG PYTHON_VERSION=3.10 ### ### Stage 0: generate requirements.txt diff --git a/docs/development/dependencies.md b/docs/development/dependencies.md index e381b3d155..1b3348703f 100644 --- a/docs/development/dependencies.md +++ b/docs/development/dependencies.md @@ -79,17 +79,17 @@ phonenumbers = [ We can see this pinned version inside the docker image for that release: ``` -$ docker pull vectorim/synapse:v1.97.0 +$ docker pull matrixdotorg/synapse:latest ... -$ docker run --entrypoint pip vectorim/synapse:v1.97.0 show phonenumbers +$ docker run --entrypoint pip matrixdotorg/synapse:latest show phonenumbers Name: phonenumbers -Version: 8.12.44 +Version: 9.0.15 Summary: Python version of Google's common library for parsing, formatting, storing and validating international phone numbers. Home-page: https://github.com/daviddrysdale/python-phonenumbers Author: David Drysdale Author-email: dmd@lurklurk.org License: Apache License 2.0 -Location: /usr/local/lib/python3.9/site-packages +Location: /usr/local/lib/python3.12/site-packages Requires: Required-by: matrix-synapse ``` diff --git a/docs/setup/installation.md b/docs/setup/installation.md index 68f224d33a..786672c689 100644 --- a/docs/setup/installation.md +++ b/docs/setup/installation.md @@ -204,7 +204,7 @@ When following this route please make sure that the [Platform-specific prerequis System requirements: - POSIX-compliant system (tested on Linux & OS X) -- Python 3.9 or later, up to Python 3.13. +- Python 3.10 or later, up to Python 3.13. - At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org If building on an uncommon architecture for which pre-built wheels are @@ -307,11 +307,16 @@ sudo dnf group install "Development Tools" ##### Red Hat Enterprise Linux / Rocky Linux / Oracle Linux -*Note: The term "RHEL" below refers to Red Hat Enterprise Linux, Oracle Linux and Rocky Linux. The distributions are 1:1 binary compatible.* +*Note: The term "RHEL" below refers to Red Hat Enterprise Linux, Oracle Linux and Rocky Linux. +The distributions are 1:1 binary compatible.* It's recommended to use the latest Python versions. -RHEL 8 in particular ships with Python 3.6 by default which is EOL and therefore no longer supported by Synapse. RHEL 9 ships with Python 3.9 which is still supported by the Python core team as of this writing. However, newer Python versions provide significant performance improvements and they're available in official distributions' repositories. Therefore it's recommended to use them. +RHEL 8 & 9 in particular ship with Python 3.6 & 3.9 respectively by default +which are EOL and therefore no longer supported by Synapse. +However, newer Python versions provide significant performance improvements +and they're available in official distributions' repositories. +Therefore it's recommended to use them. Python 3.11 and 3.12 are available for both RHEL 8 and 9. diff --git a/docs/upgrade.md b/docs/upgrade.md index 63d567505f..faf6cbf8dc 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -117,6 +117,18 @@ each upgrade are complete before moving on to the next upgrade, to avoid stacking them up. You can monitor the currently running background updates with [the Admin API](usage/administration/admin_api/background_updates.html#status). +# Upgrading to v1.142.0 + +## Minimum supported Python version + +The minimum supported Python version has been increased from v3.9 to v3.10. +You will need Python 3.10+ to run Synapse v1.142.0. + +If you use current versions of the +[matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks) +Docker images, no action is required. + + # Upgrading to v1.141.0 ## Docker images now based on Debian `trixie` with Python 3.13 diff --git a/mypy.ini b/mypy.ini index eefe405fe5..d6a3434293 100644 --- a/mypy.ini +++ b/mypy.ini @@ -37,7 +37,7 @@ strict_equality = True # Run mypy type checking with the minimum supported Python version to catch new usage # that isn't backwards-compatible (types, overloads, etc). -python_version = 3.9 +python_version = 3.10 files = docker/, diff --git a/poetry.lock b/poetry.lock index dd86fe8159..5a16dd5860 100644 --- a/poetry.lock +++ b/poetry.lock @@ -60,9 +60,6 @@ files = [ {file = "automat-25.4.16.tar.gz", hash = "sha256:0017591a5477066e90d26b0e696ddc143baafd87b588cfac8100bc6be9634de0"}, ] -[package.dependencies] -typing_extensions = {version = "*", markers = "python_version < \"3.10\""} - [package.extras] visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] @@ -510,7 +507,6 @@ files = [ [package.dependencies] gitdb = ">=4.0.1,<5" -typing-extensions = {version = ">=3.10.0.2", markers = "python_version < \"3.10\""} [package.extras] doc = ["sphinx (>=7.1.2,<7.2)", "sphinx-autodoc-typehints", "sphinx_rtd_theme"] @@ -806,7 +802,7 @@ description = "Read metadata from Python packages" optional = false python-versions = ">=3.7" groups = ["dev"] -markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\" and python_version < \"3.12\" or python_version < \"3.10\"" +markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\" and python_version < \"3.12\"" files = [ {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, @@ -820,26 +816,6 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\"", "pytest-perf (>=0.9.2)", "pytest-ruff"] -[[package]] -name = "importlib-resources" -version = "5.12.0" -description = "Read resources from Python packages" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -markers = "python_version < \"3.10\"" -files = [ - {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"}, - {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"}, -] - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8 ; python_version < \"3.12\"", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\""] - [[package]] name = "incremental" version = "24.7.2" @@ -2846,8 +2822,6 @@ files = [ [package.dependencies] click = "*" -importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} -importlib-resources = {version = ">=5", markers = "python_version < \"3.10\""} jinja2 = "*" tomli = {version = "*", markers = "python_version < \"3.11\""} @@ -2893,7 +2867,6 @@ files = [ [package.dependencies] id = "*" -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} keyring = {version = ">=21.2.0", markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\""} packaging = ">=24.0" readme-renderer = ">=35.0" @@ -3220,7 +3193,7 @@ description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\" and python_version < \"3.12\" or python_version < \"3.10\"" +markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\" and python_version < \"3.12\"" files = [ {file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"}, {file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"}, @@ -3342,5 +3315,5 @@ url-preview = ["lxml"] [metadata] lock-version = "2.1" -python-versions = "^3.9.0" -content-hash = "5d71c862b924bc2af936cb6fef264a023213153543f738af31357deaf6de19b8" +python-versions = "^3.10.0" +content-hash = "0122c5aa55099678f2ba5094ec393ebd814def15213388b33e5f1d7760392ffc" diff --git a/pyproject.toml b/pyproject.toml index 9a57a2b8d1..08b4b8af66 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,7 @@ [tool.ruff] line-length = 88 -target-version = "py39" +target-version = "py310" [tool.ruff.lint] # See https://beta.ruff.rs/docs/rules/#error-e @@ -165,7 +165,7 @@ synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main" update_synapse_database = "synapse._scripts.update_synapse_database:main" [tool.poetry.dependencies] -python = "^3.9.0" +python = "^3.10.0" # Mandatory Dependencies # ---------------------- @@ -201,7 +201,8 @@ bcrypt = ">=3.1.7" # Packagers that already took care of libwebp can lower that down to 5.4.0. Pillow = ">=10.0.1" # We use SortedDict.peekitem(), which was added in sortedcontainers 1.5.2. -sortedcontainers = ">=1.5.2" +# 2.0.5 updates collections.abc imports to avoid Python 3.10 incompatibility. +sortedcontainers = ">=2.0.5" pymacaroons = ">=0.13.0" msgpack = ">=0.5.2" phonenumbers = ">=8.2.0" @@ -217,7 +218,8 @@ netaddr = ">=0.7.18" # end up with a broken installation, with recent MarkupSafe but old Jinja, we # add a lower bound to the Jinja2 dependency. Jinja2 = ">=3.0" -bleach = ">=1.4.3" +# 3.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility. +bleach = ">=3.2.0" # We use `assert_never`, which were added in `typing-extensions` 4.1. typing-extensions = ">=4.1" # We enforce that we have a `cryptography` version that bundles an `openssl` @@ -258,10 +260,12 @@ authlib = { version = ">=0.15.1", optional = true } # `contrib/systemd/log_config.yaml`. # Note: systemd-python 231 appears to have been yanked from pypi systemd-python = { version = ">=231", optional = true } -lxml = { version = ">=4.5.2", optional = true } +# 4.6.3 removes usage of _PyGen_Send which is unavailable in CPython as of Python 3.10. +lxml = { version = ">=4.6.3", optional = true } sentry-sdk = { version = ">=0.7.2", optional = true } opentracing = { version = ">=2.2.0", optional = true } -jaeger-client = { version = ">=4.0.0", optional = true } +# 4.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility. +jaeger-client = { version = ">=4.2.0", optional = true } txredisapi = { version = ">=1.4.7", optional = true } hiredis = { version = "*", optional = true } Pympler = { version = "*", optional = true } diff --git a/rust/Cargo.toml b/rust/Cargo.toml index 0706357294..4f0319a7f5 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -34,7 +34,7 @@ pyo3 = { version = "0.25.1", features = [ "macros", "anyhow", "abi3", - "abi3-py39", + "abi3-py310", ] } pyo3-log = "0.12.4" pythonize = "0.25.0" diff --git a/scripts-dev/build_debian_packages.py b/scripts-dev/build_debian_packages.py index f94c5a37fc..60aa8a5796 100755 --- a/scripts-dev/build_debian_packages.py +++ b/scripts-dev/build_debian_packages.py @@ -21,13 +21,12 @@ from typing import Collection, Optional, Sequence # These are expanded inside the dockerfile to be a fully qualified image name. -# e.g. docker.io/library/debian:bullseye +# e.g. docker.io/library/debian:bookworm # # If an EOL is forced by a Python version and we're dropping support for it, make sure -# to remove references to the distibution across Synapse (search for "bullseye" for +# to remove references to the distibution across Synapse (search for "bookworm" for # example) DISTS = ( - "debian:bullseye", # (EOL ~2024-07) (our EOL forced by Python 3.9 is 2025-10-05) "debian:bookworm", # (EOL 2026-06) (our EOL forced by Python 3.11 is 2027-10-24) "debian:sid", # (rolling distro, no EOL) "ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04) diff --git a/synapse/__init__.py b/synapse/__init__.py index d1c306b8f3..2bed060878 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -39,8 +39,8 @@ # Note that we use an (unneeded) variable here so that pyupgrade doesn't nuke the # if-statement completely. py_version = sys.version_info -if py_version < (3, 9): - print("Synapse requires Python 3.9 or above.") +if py_version < (3, 10): + print("Synapse requires Python 3.10 or above.") sys.exit(1) # Allow using the asyncio reactor via env var. diff --git a/tox.ini b/tox.ini index a506b5034d..a0e397bbbf 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py39, py310, py311, py312, py313 +envlist = py310, py311, py312, py313 # we require tox>=2.3.2 for the fix to https://github.com/tox-dev/tox/issues/208 minversion = 2.3.2 From 728512918e169dd6a84f3c3a2b359c97fcde9b9a Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 30 Oct 2025 11:17:35 +0100 Subject: [PATCH 34/72] Exclude `.lock` file from `/usr/local` when building docker images (#19107) --- changelog.d/19107.misc | 1 + docker/Dockerfile | 7 ++++++- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 changelog.d/19107.misc diff --git a/changelog.d/19107.misc b/changelog.d/19107.misc new file mode 100644 index 0000000000..38cb9a9b3b --- /dev/null +++ b/changelog.d/19107.misc @@ -0,0 +1 @@ +Prevent uv `/usr/local/.lock` file from appearing in built Synapse docker images. \ No newline at end of file diff --git a/docker/Dockerfile b/docker/Dockerfile index f83486036d..6d10dee1aa 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -179,7 +179,12 @@ LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later OR LicenseRef-Element COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr COPY --from=runtime-deps /install-${TARGETARCH}/var /var -COPY --from=builder /install /usr/local + +# Copy the installed python packages from the builder stage. +# +# uv will generate a `.lock` file when installing packages, which we don't want +# to copy to the final image. +COPY --from=builder --exclude=.lock /install /usr/local COPY ./docker/start.py /start.py COPY ./docker/conf /conf From f54ddbcace2a42f656b739ae0520ac1c8f2205a5 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 30 Oct 2025 13:40:53 +0100 Subject: [PATCH 35/72] Prevent duplicate GH releases being created during Synapse release process (#19096) --- .github/workflows/release-artifacts.yml | 17 ++++++----------- changelog.d/19096.misc | 1 + 2 files changed, 7 insertions(+), 11 deletions(-) create mode 100644 changelog.d/19096.misc diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index f3e0da5aa4..4e38c0f35b 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -200,16 +200,11 @@ jobs: mv debs*/* debs/ tar -cvJf debs.tar.xz debs - name: Attach to release - # Pinned to work around https://github.com/softprops/action-gh-release/issues/445 - uses: softprops/action-gh-release@c95fe1489396fe8a9eb87c0abf8aa5b2ef267fda # v0.1.15 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - files: | - Sdist/* - Wheel*/* - debs.tar.xz - # if it's not already published, keep the release as a draft. - draft: true - # mark it as a prerelease if the tag contains 'rc'. - prerelease: ${{ contains(github.ref, 'rc') }} + run: | + gh release upload "${{ github.ref_name }}" \ + Sdist/* \ + Wheel*/* \ + debs.tar.xz \ + --repo ${{ github.repository }} diff --git a/changelog.d/19096.misc b/changelog.d/19096.misc new file mode 100644 index 0000000000..0b7bdf0967 --- /dev/null +++ b/changelog.d/19096.misc @@ -0,0 +1 @@ +Prevent duplicate GitHub draft releases being created during the Synapse release process. \ No newline at end of file From 2c4057bf93c55ce0df16a8024f75b46eab70739d Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Thu, 30 Oct 2025 10:21:56 -0500 Subject: [PATCH 36/72] Prevent duplicate logging setup when running multiple Synapse instances (#19067) Be mindful that it's possible to run Synapse multiple times in the same Python process. So we only need to do some part of the logging setup once. - We only need to setup the global log record factory and context filter once - We only need to redirect Twisted logging once ### Background As part of Element's plan to support a light form of vhosting (virtual host) (multiple instances of Synapse in the same Python process), we're currently diving into the details and implications of running multiple instances of Synapse in the same Python process. "Per-tenant logging" tracked internally by https://github.com/element-hq/synapse-small-hosts/issues/48 --- changelog.d/19067.misc | 1 + synapse/config/logger.py | 88 +++++++++++++++++++++++++-------------- synmark/suites/logging.py | 4 +- 3 files changed, 60 insertions(+), 33 deletions(-) create mode 100644 changelog.d/19067.misc diff --git a/changelog.d/19067.misc b/changelog.d/19067.misc new file mode 100644 index 0000000000..560fbfc668 --- /dev/null +++ b/changelog.d/19067.misc @@ -0,0 +1 @@ +Prevent duplicate logging setup when running multiple Synapse instances. diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 945236ed07..1f5c6da3ae 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -198,12 +198,27 @@ def generate_files(self, config: dict[str, Any], config_dir_path: str) -> None: log_config_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=log_file)) -def _setup_stdlib_logging( - config: "HomeServerConfig", log_config_path: Optional[str], logBeginner: LogBeginner -) -> None: +_already_performed_one_time_logging_setup: bool = False +""" +Marks whether we've already successfully ran `one_time_logging_setup()`. +""" + + +def one_time_logging_setup(*, logBeginner: LogBeginner = globalLogBeginner) -> None: """ - Set up Python standard library logging. + Perform one-time logging configuration for the Python process. + + For example, we don't need to have multiple log record factories. Once we've + configured it once, we don't need to do it again. + + This matters because multiple Synapse instances can be run in the same Python + process (c.f. Synapse Pro for small hosts) """ + global _already_performed_one_time_logging_setup + + # We only need to set things up once. + if _already_performed_one_time_logging_setup: + return # We add a log record factory that runs all messages through the # LoggingContextFilter so that we get the context *at the time we log* @@ -221,26 +236,6 @@ def factory(*args: Any, **kwargs: Any) -> logging.LogRecord: logging.setLogRecordFactory(factory) - # Configure the logger with the initial configuration. - if log_config_path is None: - log_format = ( - "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s" - " - %(message)s" - ) - - logger = logging.getLogger("") - logger.setLevel(logging.INFO) - logging.getLogger("synapse.storage.SQL").setLevel(logging.INFO) - - formatter = logging.Formatter(log_format) - - handler = logging.StreamHandler() - handler.setFormatter(formatter) - logger.addHandler(handler) - else: - # Load the logging configuration. - _load_logging_config(log_config_path) - # Route Twisted's native logging through to the standard library logging # system. observer = STDLibLogObserver() @@ -281,6 +276,36 @@ def _log(event: dict) -> None: logBeginner.beginLoggingTo([_log], redirectStandardIO=False) + _already_performed_one_time_logging_setup = True + + +def _setup_stdlib_logging( + config: "HomeServerConfig", log_config_path: Optional[str] +) -> None: + """ + Set up Python standard library logging. + """ + + # Configure the logger with the initial configuration. + if log_config_path is None: + log_format = ( + "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s" + " - %(message)s" + ) + + logger = logging.getLogger("") + logger.setLevel(logging.INFO) + logging.getLogger("synapse.storage.SQL").setLevel(logging.INFO) + + formatter = logging.Formatter(log_format) + + handler = logging.StreamHandler() + handler.setFormatter(formatter) + logger.addHandler(handler) + else: + # Load the logging configuration. + _load_logging_config(log_config_path) + def _load_logging_config(log_config_path: str) -> None: """ @@ -318,19 +343,14 @@ def setup_logging( hs: "HomeServer", config: "HomeServerConfig", use_worker_options: bool = False, - logBeginner: LogBeginner = globalLogBeginner, ) -> None: """ Set up the logging subsystem. Args: config: configuration data - use_worker_options: True to use the 'worker_log_config' option instead of 'log_config'. - - logBeginner: The Twisted logBeginner to use. - """ from twisted.internet import reactor @@ -341,11 +361,17 @@ def setup_logging( ) # Perform one-time logging configuration. - _setup_stdlib_logging(config, log_config_path, logBeginner=logBeginner) + one_time_logging_setup() + + # Configure logging. + _setup_stdlib_logging(config, log_config_path) # Add a SIGHUP handler to reload the logging configuration, if one is available. from synapse.app import _base as appbase - appbase.register_sighup(hs, _reload_logging_config, log_config_path) + # We only need to reload the config if there is a log config file path provided to + # reload from. + if log_config_path: + appbase.register_sighup(hs, _reload_logging_config, log_config_path) # Log immediately so we can grep backwards. logger.warning("***** STARTING SERVER *****") diff --git a/synmark/suites/logging.py b/synmark/suites/logging.py index cf9c836e06..db77484f4c 100644 --- a/synmark/suites/logging.py +++ b/synmark/suites/logging.py @@ -33,7 +33,7 @@ from twisted.logger import LogBeginner, LogPublisher from twisted.protocols.basic import LineOnlyReceiver -from synapse.config.logger import _setup_stdlib_logging +from synapse.config.logger import _setup_stdlib_logging, one_time_logging_setup from synapse.logging import RemoteHandler from synapse.synapse_rust import reset_logging_config from synapse.types import ISynapseReactor @@ -115,10 +115,10 @@ class _logging: } logger = logging.getLogger("synapse") + one_time_logging_setup(logBeginner=beginner) _setup_stdlib_logging( hs_config, # type: ignore[arg-type] None, - logBeginner=beginner, ) # Force a new logging config without having to load it from a file. From 349599143eaf7f7b415a7a3d859d0488afffa1d1 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 30 Oct 2025 16:22:52 +0100 Subject: [PATCH 37/72] Move reading of multipart response into `try` body (#19062) --- changelog.d/19062.bugfix | 1 + synapse/http/matrixfederationclient.py | 2 +- tests/http/test_matrixfederationclient.py | 59 +++++++++++++++++++++++ 3 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 changelog.d/19062.bugfix diff --git a/changelog.d/19062.bugfix b/changelog.d/19062.bugfix new file mode 100644 index 0000000000..c5231cbbc8 --- /dev/null +++ b/changelog.d/19062.bugfix @@ -0,0 +1 @@ +Fix a bug introduced in 1.111.0 where failed attempts to download authenticated remote media would not be handled correctly. \ No newline at end of file diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index d0e47cf8dc..562007c74f 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -1755,6 +1755,7 @@ async def federation_get_file( response, output_stream, boundary, expected_size + 1 ) deferred.addTimeout(self.default_timeout_seconds, self.reactor) + multipart_response = await make_deferred_yieldable(deferred) except BodyExceededMaxSize: msg = "Requested file is too large > %r bytes" % (expected_size,) logger.warning( @@ -1791,7 +1792,6 @@ async def federation_get_file( ) raise - multipart_response = await make_deferred_yieldable(deferred) if not multipart_response.url: assert multipart_response.length is not None length = multipart_response.length diff --git a/tests/http/test_matrixfederationclient.py b/tests/http/test_matrixfederationclient.py index 6accb03b9f..4792bdc9bc 100644 --- a/tests/http/test_matrixfederationclient.py +++ b/tests/http/test_matrixfederationclient.py @@ -414,6 +414,65 @@ def test_authed_media_redirect_response(self) -> None: self.assertEqual(length, len(data)) self.assertEqual(output_stream.getvalue(), data) + @override_config( + { + "federation": { + # Set the timeout to a deterministic value, in case the defaults + # change. + "client_timeout": "10s", + } + } + ) + def test_authed_media_timeout_reading_body(self) -> None: + """ + If the HTTP request is connected, but gets no response before being + timed out, it'll give a RequestSendFailed with can_retry. + + Regression test for https://github.com/element-hq/synapse/issues/19061 + """ + limiter = Ratelimiter( + store=self.hs.get_datastores().main, + clock=self.clock, + cfg=RatelimitSettings(key="", per_second=0.17, burst_count=1048576), + ) + + output_stream = io.BytesIO() + + d = defer.ensureDeferred( + # timeout is set by `client_timeout`, which we override above. + self.cl.federation_get_file( + "testserv:8008", "path", output_stream, limiter, "127.0.0.1", 10000 + ) + ) + + self.pump() + + conn = Mock() + clients = self.reactor.tcpClients + client = clients[0][2].buildProtocol(None) + client.makeConnection(conn) + + # Deferred does not have a result + self.assertNoResult(d) + + # Send it the HTTP response + client.dataReceived( + b"HTTP/1.1 200 OK\r\n" + b"Server: Fake\r\n" + # Set a large content length, prompting the federation client to + # wait to receive the rest of the body. + b"Content-Length: 1000\r\n" + b"Content-Type: multipart/mixed; boundary=6067d4698f8d40a0a794ea7d7379d53a\r\n\r\n" + ) + + # Push by enough to time it out + self.reactor.advance(10.5) + f = self.failureResultOf(d) + + self.assertIsInstance(f.value, RequestSendFailed) + self.assertTrue(f.value.can_retry) + self.assertIsInstance(f.value.inner_exception, defer.TimeoutError) + @parameterized.expand(["get_json", "post_json", "delete_json", "put_json"]) def test_timeout_reading_body(self, method_name: str) -> None: """ From f0aae62f8543fd4e1be295a14d3ecc874c1431ef Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Thu, 30 Oct 2025 11:47:53 -0500 Subject: [PATCH 38/72] Cheaper logcontext debug logs (`random_string_insecure_fast(...)`) (#19094) Follow-up to https://github.com/element-hq/synapse/pull/18966 During the weekly Backend team meeting, it was mentioned that `random_string(...)` was taking a significant amount of CPU on `matrix.org`. This makes sense as it relies on [`secrets.choice(...)`](https://docs.python.org/3/library/secrets.html#secrets.choice), a cryptographically secure function that is inherently computationally expensive. And since https://github.com/element-hq/synapse/pull/18966, we're calling `random_string(...)` as part of a bunch of logcontext utilities. Since we don't need cryptographically secure random strings for our debug logs, this PR is introducing a new `random_string_insecure_fast(...)` function that uses [`random.choice(...)`](https://docs.python.org/3/library/random.html#random.choice) which uses pseudo-random numbers that are "both fast and threadsafe". --- changelog.d/19094.misc | 1 + synapse/logging/context.py | 8 ++++---- synapse/util/stringutils.py | 17 +++++++++++++++++ 3 files changed, 22 insertions(+), 4 deletions(-) create mode 100644 changelog.d/19094.misc diff --git a/changelog.d/19094.misc b/changelog.d/19094.misc new file mode 100644 index 0000000000..0d38d17483 --- /dev/null +++ b/changelog.d/19094.misc @@ -0,0 +1 @@ +Use cheaper random string function in logcontext utilities. diff --git a/synapse/logging/context.py b/synapse/logging/context.py index 6a4425ff1d..5b87de6eb3 100644 --- a/synapse/logging/context.py +++ b/synapse/logging/context.py @@ -53,7 +53,7 @@ from twisted.python.threadpool import ThreadPool from synapse.logging.loggers import ExplicitlyConfiguredLogger -from synapse.util.stringutils import random_string +from synapse.util.stringutils import random_string_insecure_fast if TYPE_CHECKING: from synapse.logging.scopecontextmanager import _LogContextScope @@ -657,7 +657,7 @@ def __init__( self, new_context: LoggingContextOrSentinel = SENTINEL_CONTEXT ) -> None: self._new_context = new_context - self._instance_id = random_string(5) + self._instance_id = random_string_insecure_fast(5) def __enter__(self) -> None: logcontext_debug_logger.debug( @@ -859,7 +859,7 @@ def run_in_background( Note that the returned Deferred does not follow the synapse logcontext rules. """ - instance_id = random_string(5) + instance_id = random_string_insecure_fast(5) calling_context = current_context() logcontext_debug_logger.debug( "run_in_background(%s): called with logcontext=%s", instance_id, calling_context @@ -1012,7 +1012,7 @@ def make_deferred_yieldable(deferred: "defer.Deferred[T]") -> "defer.Deferred[T] restores the old context once the awaitable completes (execution passes from the reactor back to the code). """ - instance_id = random_string(5) + instance_id = random_string_insecure_fast(5) logcontext_debug_logger.debug( "make_deferred_yieldable(%s): called with logcontext=%s", instance_id, diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py index 6b0d3677da..0dadafbc78 100644 --- a/synapse/util/stringutils.py +++ b/synapse/util/stringutils.py @@ -20,6 +20,7 @@ # # import itertools +import random import re import secrets import string @@ -56,6 +57,10 @@ def random_string(length: int) -> str: """Generate a cryptographically secure string of random letters. Drawn from the characters: `a-z` and `A-Z` + + Because this is generated from cryptographic sources, it takes a notable amount of + effort to generate (computationally expensive). If you don't need cryptographic + security, consider using `random_string_insecure_fast` for better performance. """ return "".join(secrets.choice(string.ascii_letters) for _ in range(length)) @@ -68,6 +73,18 @@ def random_string_with_symbols(length: int) -> str: return "".join(secrets.choice(_string_with_symbols) for _ in range(length)) +def random_string_insecure_fast(length: int) -> str: + """ + Generate a string of random letters (insecure, fast). This is a more performant but + insecure version of `random_string`. + + WARNING: Not for security or cryptographic uses. Use `random_string` instead. + + Drawn from the characters: `a-z` and `A-Z` + """ + return "".join(random.choice(string.ascii_letters) for _ in range(length)) + + def is_ascii(s: bytes) -> bool: try: s.decode("ascii").encode("ascii") From c0b9437ab6511cad351dc79cfabec3b8a1c767fa Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Thu, 30 Oct 2025 11:49:15 -0500 Subject: [PATCH 39/72] Fix lost logcontext when using `timeout_deferred(...)` (#19090) Fix lost logcontext when using `timeout_deferred(...)` and things actually timeout. Fix https://github.com/element-hq/synapse/issues/19087 (our HTTP client times out requests using `timeout_deferred(...)` Fix https://github.com/element-hq/synapse/issues/19066 (`/sync` uses `notifier.wait_for_events()` which uses `timeout_deferred(...)` under the hood) ### When/why did these lost logcontext warnings start happening? ``` synapse.logging.context - 107 - WARNING - sentinel - Expected logging context call_later but found POST-2453 synapse.logging.context - 107 - WARNING - sentinel - Expected logging context call_later was lost ``` In https://github.com/element-hq/synapse/pull/18828, we switched `timeout_deferred(...)` from using `reactor.callLater(...)` to [`clock.call_later(...)`](https://github.com/element-hq/synapse/blob/3b59ac3b69f6a2f73a504699b30313d8dcfe4709/synapse/util/clock.py#L224-L313) under the hood. This meant it started dealing with logcontexts but our `time_it_out()` callback didn't follow our [Synapse logcontext rules](https://github.com/element-hq/synapse/blob/3b59ac3b69f6a2f73a504699b30313d8dcfe4709/docs/log_contexts.md). --- changelog.d/19090.bugfix | 1 + synapse/logging/context.py | 22 ++++++++--- synapse/util/async_helpers.py | 3 +- synapse/util/clock.py | 2 +- tests/unittest.py | 3 ++ tests/util/test_async_helpers.py | 68 +++++++++++++++++++++++++++++++- 6 files changed, 89 insertions(+), 10 deletions(-) create mode 100644 changelog.d/19090.bugfix diff --git a/changelog.d/19090.bugfix b/changelog.d/19090.bugfix new file mode 100644 index 0000000000..077dafcbf8 --- /dev/null +++ b/changelog.d/19090.bugfix @@ -0,0 +1 @@ +Fix lost logcontext warnings from timeouts in sync and requests made by Synapse itself. diff --git a/synapse/logging/context.py b/synapse/logging/context.py index 5b87de6eb3..86e994cbb4 100644 --- a/synapse/logging/context.py +++ b/synapse/logging/context.py @@ -896,7 +896,7 @@ def run_in_background( # If the function messes with logcontexts, we can assume it follows the Synapse # logcontext rules (Rules for functions returning awaitables: "If the awaitable # is already complete, the function returns with the same logcontext it started - # with."). If it function doesn't touch logcontexts at all, we can also assume + # with."). If the function doesn't touch logcontexts at all, we can also assume # the logcontext is unchanged. # # Either way, the function should have maintained the calling logcontext, so we @@ -905,11 +905,21 @@ def run_in_background( # to reset the logcontext to the sentinel logcontext as that would run # immediately (remember our goal is to maintain the calling logcontext when we # return). - logcontext_debug_logger.debug( - "run_in_background(%s): deferred already completed and the function should have maintained the logcontext %s", - instance_id, - calling_context, - ) + if current_context() != calling_context: + logcontext_error( + "run_in_background(%s): deferred already completed but the function did not maintain the calling logcontext %s (found %s)" + % ( + instance_id, + calling_context, + current_context(), + ) + ) + else: + logcontext_debug_logger.debug( + "run_in_background(%s): deferred already completed (maintained the calling logcontext %s)", + instance_id, + calling_context, + ) return d # Since the function we called may follow the Synapse logcontext rules (Rules for diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index c568b377d2..99e899d1ef 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -808,7 +808,8 @@ def time_it_out() -> None: timed_out[0] = True try: - deferred.cancel() + with PreserveLoggingContext(): + deferred.cancel() except Exception: # if we throw any exception it'll break time outs logger.exception("Canceller failed during timeout") diff --git a/synapse/util/clock.py b/synapse/util/clock.py index 6557582629..5b59cef60a 100644 --- a/synapse/util/clock.py +++ b/synapse/util/clock.py @@ -266,7 +266,7 @@ def wrapped_callback(*args: Any, **kwargs: Any) -> None: # We use `PreserveLoggingContext` to prevent our new `call_later` # logcontext from finishing as soon as we exit this function, in case `f` # returns an awaitable/deferred which would continue running and may try to - # restore the `loop_call` context when it's done (because it's trying to + # restore the `call_later` context when it's done (because it's trying to # adhere to the Synapse logcontext rules.) # # This also ensures that we return to the `sentinel` context when we exit diff --git a/tests/unittest.py b/tests/unittest.py index 1007f40456..049a92caaa 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -341,6 +341,9 @@ def logcontext_clean(target: TV) -> TV: """ def logcontext_error(msg: str) -> NoReturn: + # Log so we can still see it in the logs like normal + logger.warning(msg) + # But also fail the test raise AssertionError("logcontext error: %s" % (msg)) patcher = patch("synapse.logging.context.logcontext_error", new=logcontext_error) diff --git a/tests/util/test_async_helpers.py b/tests/util/test_async_helpers.py index a02a2f0cef..8fbee12fb9 100644 --- a/tests/util/test_async_helpers.py +++ b/tests/util/test_async_helpers.py @@ -45,7 +45,7 @@ ) from tests.server import get_clock -from tests.unittest import TestCase +from tests.unittest import TestCase, logcontext_clean logger = logging.getLogger(__name__) @@ -198,7 +198,12 @@ def canceller(_d: Deferred) -> None: self.failureResultOf(timing_out_d, defer.TimeoutError) - async def test_logcontext_is_preserved_on_cancellation(self) -> None: + @logcontext_clean + async def test_logcontext_is_preserved_on_timeout_cancellation(self) -> None: + """ + Test that the logcontext is preserved when we timeout and the deferred is + cancelled. + """ # Sanity check that we start in the sentinel context self.assertEqual(current_context(), SENTINEL_CONTEXT) @@ -270,6 +275,65 @@ def mark_was_cancelled(res: Failure) -> None: # Back to the sentinel context self.assertEqual(current_context(), SENTINEL_CONTEXT) + @logcontext_clean + async def test_logcontext_is_not_lost_when_awaiting_on_timeout_cancellation( + self, + ) -> None: + """ + Test that the logcontext isn't lost when we `await make_deferred_yieldable(...)` + the deferred to complete/timeout and it times out. + """ + + # Sanity check that we start in the sentinel context + self.assertEqual(current_context(), SENTINEL_CONTEXT) + + # Create a deferred which we will never complete + incomplete_d: Deferred = Deferred() + + async def competing_task() -> None: + with LoggingContext( + name="competing", server_name="test_server" + ) as context_competing: + timing_out_d = timeout_deferred( + deferred=incomplete_d, + timeout=1.0, + clock=self.clock, + ) + self.assertNoResult(timing_out_d) + # We should still be in the logcontext we started in + self.assertIs(current_context(), context_competing) + + # Mimic the normal use case to wait for the work to complete or timeout. + # + # In this specific test, we expect the deferred to timeout and raise an + # exception at this point. + await make_deferred_yieldable(timing_out_d) + + self.fail( + "We should not make it to this point as the `timing_out_d` should have been cancelled" + ) + + d = defer.ensureDeferred(competing_task()) + + # Still in the sentinel context + self.assertEqual(current_context(), SENTINEL_CONTEXT) + + # Pump until we trigger the timeout + self.reactor.pump( + # We only need to pump `1.0` (seconds) as we set + # `timeout_deferred(timeout=1.0)` above + (1.0,) + ) + + # Still in the sentinel context + self.assertEqual(current_context(), SENTINEL_CONTEXT) + + # We expect a failure due to the timeout + self.failureResultOf(d, defer.TimeoutError) + + # Back to the sentinel context at the end of the day + self.assertEqual(current_context(), SENTINEL_CONTEXT) + class _TestException(Exception): # pass From 300c5558ab051a978e0abe04df963ef302eb0958 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 30 Oct 2025 22:33:29 +0100 Subject: [PATCH 40/72] Update `check_dependencies` to support markers (#19110) --- changelog.d/19110.misc | 1 + synapse/util/check_dependencies.py | 131 ++++++++++++++++++++++++-- tests/util/test_check_dependencies.py | 52 +++++++++- 3 files changed, 176 insertions(+), 8 deletions(-) create mode 100644 changelog.d/19110.misc diff --git a/changelog.d/19110.misc b/changelog.d/19110.misc new file mode 100644 index 0000000000..dc45eef17c --- /dev/null +++ b/changelog.d/19110.misc @@ -0,0 +1 @@ +Allow Synapse's runtime dependency checking code to take packaging markers (i.e. `python <= 3.14`) into account when checking dependencies. \ No newline at end of file diff --git a/synapse/util/check_dependencies.py b/synapse/util/check_dependencies.py index 1c79c0be48..715240c8ce 100644 --- a/synapse/util/check_dependencies.py +++ b/synapse/util/check_dependencies.py @@ -28,8 +28,9 @@ import logging from importlib import metadata -from typing import Iterable, NamedTuple, Optional +from typing import Any, Iterable, NamedTuple, Optional, Sequence, cast +from packaging.markers import Marker, Value, Variable, default_environment from packaging.requirements import Requirement DISTRIBUTION_NAME = "matrix-synapse" @@ -65,9 +66,23 @@ def dependencies(self) -> Iterable[str]: VERSION = metadata.version(DISTRIBUTION_NAME) +def _marker_environment(extra: str) -> dict[str, str]: + """Return the marker environment for `extra`, seeded with the current interpreter.""" + + env = cast(dict[str, str], dict(default_environment())) + env["extra"] = extra + return env + + def _is_dev_dependency(req: Requirement) -> bool: - return req.marker is not None and any( - req.marker.evaluate({"extra": e}) for e in DEV_EXTRAS + """Return True if `req` is a development dependency.""" + if req.marker is None: + return False + + marker_extras = _extras_from_marker(req.marker) + return any( + extra in DEV_EXTRAS and req.marker.evaluate(_marker_environment(extra)) + for extra in marker_extras ) @@ -95,6 +110,7 @@ def _generic_dependencies() -> Iterable[Dependency]: """Yield pairs (requirement, must_be_installed).""" requirements = metadata.requires(DISTRIBUTION_NAME) assert requirements is not None + env_no_extra = _marker_environment("") for raw_requirement in requirements: req = Requirement(raw_requirement) if _is_dev_dependency(req) or _should_ignore_runtime_requirement(req): @@ -103,7 +119,7 @@ def _generic_dependencies() -> Iterable[Dependency]: # https://packaging.pypa.io/en/latest/markers.html#usage notes that # > Evaluating an extra marker with no environment is an error # so we pass in a dummy empty extra value here. - must_be_installed = req.marker is None or req.marker.evaluate({"extra": ""}) + must_be_installed = req.marker is None or req.marker.evaluate(env_no_extra) yield Dependency(req, must_be_installed) @@ -111,6 +127,8 @@ def _dependencies_for_extra(extra: str) -> Iterable[Dependency]: """Yield additional dependencies needed for a given `extra`.""" requirements = metadata.requires(DISTRIBUTION_NAME) assert requirements is not None + env_no_extra = _marker_environment("") + env_for_extra = _marker_environment(extra) for raw_requirement in requirements: req = Requirement(raw_requirement) if _is_dev_dependency(req): @@ -118,12 +136,84 @@ def _dependencies_for_extra(extra: str) -> Iterable[Dependency]: # Exclude mandatory deps by only selecting deps needed with this extra. if ( req.marker is not None - and req.marker.evaluate({"extra": extra}) - and not req.marker.evaluate({"extra": ""}) + and req.marker.evaluate(env_for_extra) + and not req.marker.evaluate(env_no_extra) ): yield Dependency(req, True) +def _values_from_marker_value(value: Value) -> set[str]: + """Extract text values contained in a marker `Value`.""" + + raw: Any = value.value + if isinstance(raw, str): + return {raw} + if isinstance(raw, (tuple, list)): + return {str(item) for item in raw} + return {str(raw)} + + +def _extras_from_marker(marker: Optional[Marker]) -> set[str]: + """Return every `extra` referenced in the supplied marker tree.""" + + extras: set[str] = set() + + if marker is None: + return extras + + def collect(tree: object) -> None: + if isinstance(tree, list): + for item in tree: + collect(item) + elif isinstance(tree, tuple) and len(tree) == 3: + lhs, _op, rhs = tree + if ( + isinstance(lhs, Variable) + and lhs.value == "extra" + and isinstance(rhs, Value) + ): + extras.update(_values_from_marker_value(rhs)) + elif ( + isinstance(rhs, Variable) + and rhs.value == "extra" + and isinstance(lhs, Value) + ): + extras.update(_values_from_marker_value(lhs)) + + collect(marker._markers) + return extras + + +def _extras_to_consider_for_requirement( + marker: Marker, base_candidates: Sequence[str] +) -> set[str]: + """ + Augment `base_candidates` with extras explicitly mentioned in `marker`. + + Markers can mention extras (e.g. `extra == "saml2"`). + """ + + # Avoid modifying the input sequence. + # Use a set to efficiently avoid duplicate extras. + extras = set(base_candidates) + + for candidate in _extras_from_marker(marker): + extras.add(candidate) + + return extras + + +def _marker_applies_for_any_extra(requirement: Requirement, extras: set[str]) -> bool: + """Check whether a requirement's marker matches any evaluated `extra`.""" + + if requirement.marker is None: + return True + + return any( + requirement.marker.evaluate(_marker_environment(extra)) for extra in extras + ) + + def _not_installed(requirement: Requirement, extra: Optional[str] = None) -> str: if extra: return ( @@ -164,7 +254,7 @@ def _no_reported_version(requirement: Requirement, extra: Optional[str] = None) def check_requirements(extra: Optional[str] = None) -> None: """Check Synapse's dependencies are present and correctly versioned. - If provided, `extra` must be the name of an pacakging extra (e.g. "saml2" in + If provided, `extra` must be the name of an packaging extra (e.g. "saml2" in `pip install matrix-synapse[saml2]`). If `extra` is None, this function checks that @@ -174,6 +264,15 @@ def check_requirements(extra: Optional[str] = None) -> None: If `extra` is not None, this function checks that - the dependencies needed for that extra are installed and correctly versioned. + `marker`s are optional attributes on each requirement which specify + conditions under which the requirement applies. For example, a requirement + might only be needed on Windows, or with Python < 3.14. Markers can + additionally mention `extras` themselves, meaning a requirement may not + apply if the marker mentions an extra that the user has not asked for. + + This function skips a requirement when its markers do not apply in the + current environment. + :raises DependencyException: if a dependency is missing or incorrectly versioned. :raises ValueError: if this extra does not exist. """ @@ -188,7 +287,25 @@ def check_requirements(extra: Optional[str] = None) -> None: deps_unfulfilled = [] errors = [] + if extra is None: + # Default to all mandatory dependencies (non-dev extras). + # "" means all dependencies that aren't conditional on an extra. + base_extra_candidates: Sequence[str] = ("", *RUNTIME_EXTRAS) + else: + base_extra_candidates = (extra,) + for requirement, must_be_installed in dependencies: + if requirement.marker is not None: + candidate_extras = _extras_to_consider_for_requirement( + requirement.marker, base_extra_candidates + ) + # Skip checking this dependency if the requirement's marker object + # (i.e. `python_version < "3.14" and os_name == "win32"`) does not + # apply for any of the extras we're considering. + if not _marker_applies_for_any_extra(requirement, candidate_extras): + continue + + # Check if the requirement is installed and correctly versioned. try: dist: metadata.Distribution = metadata.distribution(requirement.name) except metadata.PackageNotFoundError: diff --git a/tests/util/test_check_dependencies.py b/tests/util/test_check_dependencies.py index c052ba2b75..ab2e2f6291 100644 --- a/tests/util/test_check_dependencies.py +++ b/tests/util/test_check_dependencies.py @@ -22,9 +22,11 @@ from contextlib import contextmanager from os import PathLike from pathlib import Path -from typing import Generator, Optional, Union +from typing import Generator, Optional, Union, cast from unittest.mock import patch +from packaging.markers import default_environment as packaging_default_environment + from synapse.util.check_dependencies import ( DependencyException, check_requirements, @@ -80,6 +82,22 @@ def mock_distribution(name: str) -> DummyDistribution: ): yield + @contextmanager + def mock_python_version(self, version: str) -> Generator[None, None, None]: + """Override the marker environment to report the supplied `python_version`.""" + + def fake_default_environment() -> dict[str, str]: + env = cast(dict[str, str], dict(packaging_default_environment())) + env["python_version"] = version + env["python_full_version"] = f"{version}.0" + return env + + with patch( + "synapse.util.check_dependencies.default_environment", + side_effect=fake_default_environment, + ): + yield + def test_mandatory_dependency(self) -> None: """Complain if a required package is missing or old.""" with patch( @@ -191,3 +209,35 @@ def test_setuptools_rust_ignored(self) -> None: with self.mock_installed_package(old): # We also ignore old versions of setuptools_rust check_requirements() + + def test_python_version_markers_respected(self) -> None: + """ + Tests that python_version markers are properly respected. + + Specifically that older versions of dependencies can be installed in + environments with older Python versions. + """ + requirements = [ + "pydantic ~= 2.8; python_version < '3.14'", + "pydantic ~= 2.12; python_version >= '3.14'", + ] + + with patch( + "synapse.util.check_dependencies.metadata.requires", + return_value=requirements, + ): + with self.mock_python_version("3.9"): + with self.mock_installed_package(DummyDistribution("2.12.3")): + check_requirements() + with self.mock_installed_package(DummyDistribution("2.8.1")): + check_requirements() + with self.mock_installed_package(DummyDistribution("2.7.0")): + self.assertRaises(DependencyException, check_requirements) + + with self.mock_python_version("3.14"): + with self.mock_installed_package(DummyDistribution("2.12.3")): + check_requirements() + with self.mock_installed_package(DummyDistribution("2.8.1")): + self.assertRaises(DependencyException, check_requirements) + with self.mock_installed_package(DummyDistribution("2.7.0")): + self.assertRaises(DependencyException, check_requirements) From 3595ff921f876ee6ccb03623ae93e21f723bd444 Mon Sep 17 00:00:00 2001 From: V02460 Date: Fri, 31 Oct 2025 10:22:22 +0100 Subject: [PATCH 41/72] Pydantic v2 (#19071) Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Co-authored-by: Andrew Morgan --- .github/workflows/tests.yml | 22 - changelog.d/19071.misc | 1 + poetry.lock | 294 ++++++----- pyproject.toml | 13 +- scripts-dev/check_pydantic_models.py | 474 ------------------ scripts-dev/lint.sh | 3 - synapse/_pydantic_compat.py | 104 ---- synapse/api/auth/mas.py | 49 +- synapse/config/_util.py | 4 +- synapse/config/mas.py | 29 +- synapse/config/matrixrtc.py | 13 +- synapse/config/workers.py | 4 +- synapse/events/validator.py | 2 +- synapse/http/servlet.py | 21 +- synapse/rest/admin/users.py | 8 +- synapse/rest/client/account.py | 11 +- synapse/rest/client/devices.py | 24 +- synapse/rest/client/directory.py | 3 +- synapse/rest/client/keys.py | 19 +- synapse/rest/client/reporting.py | 3 +- synapse/rest/client/thread_subscriptions.py | 2 +- synapse/rest/key/v2/remote_key_resource.py | 5 +- synapse/rest/synapse/mas/devices.py | 7 +- synapse/rest/synapse/mas/users.py | 6 +- synapse/storage/background_updates.py | 4 +- synapse/types/handlers/sliding_sync.py | 17 +- synapse/types/rest/client/__init__.py | 119 +++-- synapse/util/events.py | 25 +- synapse/util/pydantic_models.py | 65 +-- tests/config/test_oauth_delegation.py | 11 +- tests/rest/client/test_account.py | 4 +- tests/rest/client/test_models.py | 15 +- .../rest/client/test_thread_subscriptions.py | 46 +- 33 files changed, 422 insertions(+), 1005 deletions(-) create mode 100644 changelog.d/19071.misc delete mode 100755 scripts-dev/check_pydantic_models.py delete mode 100644 synapse/_pydantic_compat.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 93c0e9415f..494543e4b9 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -207,26 +207,6 @@ jobs: env: PULL_REQUEST_NUMBER: ${{ github.event.number }} - lint-pydantic: - runs-on: ubuntu-latest - needs: changes - if: ${{ needs.changes.outputs.linting == 'true' }} - - steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - with: - ref: ${{ github.event.pull_request.head.sha }} - - name: Install Rust - uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master - with: - toolchain: ${{ env.RUST_VERSION }} - - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - - uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 - with: - poetry-version: "2.1.1" - extras: "all" - - run: poetry run scripts-dev/check_pydantic_models.py - lint-clippy: runs-on: ubuntu-latest needs: changes @@ -341,7 +321,6 @@ jobs: - lint-mypy - lint-crlf - lint-newsfile - - lint-pydantic - check-sampleconfig - check-schema-delta - check-lockfile @@ -363,7 +342,6 @@ jobs: lint lint-mypy lint-newsfile - lint-pydantic lint-clippy lint-clippy-nightly lint-rust diff --git a/changelog.d/19071.misc b/changelog.d/19071.misc new file mode 100644 index 0000000000..d0930f339b --- /dev/null +++ b/changelog.d/19071.misc @@ -0,0 +1 @@ +Update pydantic to v2. \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 5a16dd5860..a1f133e164 100644 --- a/poetry.lock +++ b/poetry.lock @@ -6,7 +6,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -39,7 +39,7 @@ description = "The ultimate Python library in building OAuth and OpenID Connect optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\"" +markers = "extra == \"oidc\" or extra == \"jwt\" or extra == \"all\"" files = [ {file = "authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a"}, {file = "authlib-1.6.5.tar.gz", hash = "sha256:6aaf9c79b7cc96c900f0b284061691c5d4e61221640a948fe690b556a6d6d10b"}, @@ -444,7 +444,7 @@ description = "XML bomb protection for Python stdlib modules" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["main"] -markers = "extra == \"all\" or extra == \"saml2\"" +markers = "extra == \"saml2\" or extra == \"all\"" files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -469,7 +469,7 @@ description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and l optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"all\" or extra == \"saml2\"" +markers = "extra == \"saml2\" or extra == \"all\"" files = [ {file = "elementpath-4.1.5-py3-none-any.whl", hash = "sha256:2ac1a2fb31eb22bbbf817f8cf6752f844513216263f0e3892c8e79782fe4bb55"}, {file = "elementpath-4.1.5.tar.gz", hash = "sha256:c2d6dc524b29ef751ecfc416b0627668119d8812441c555d7471da41d4bacb8d"}, @@ -519,7 +519,7 @@ description = "Python wrapper for hiredis" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"all\" or extra == \"redis\"" +markers = "extra == \"redis\" or extra == \"all\"" files = [ {file = "hiredis-3.3.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:9937d9b69321b393fbace69f55423480f098120bc55a3316e1ca3508c4dbbd6f"}, {file = "hiredis-3.3.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:50351b77f89ba6a22aff430b993653847f36b71d444509036baa0f2d79d1ebf4"}, @@ -842,7 +842,7 @@ description = "Jaeger Python OpenTracing Tracer implementation" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"all\" or extra == \"opentracing\"" +markers = "extra == \"opentracing\" or extra == \"all\"" files = [ {file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"}, ] @@ -980,7 +980,7 @@ description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\"" +markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\"" files = [ {file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"}, {file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"}, @@ -996,7 +996,7 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"all\" or extra == \"url-preview\"" +markers = "extra == \"url-preview\" or extra == \"all\"" files = [ {file = "lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e77dd455b9a16bbd2a5036a63ddbd479c19572af81b624e79ef422f929eef388"}, {file = "lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d444858b9f07cefff6455b983aea9a67f7462ba1f6cbe4a21e8bf6791bf2153"}, @@ -1283,7 +1283,7 @@ description = "An LDAP3 auth provider for Synapse" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\"" +markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\"" files = [ {file = "matrix-synapse-ldap3-0.3.0.tar.gz", hash = "sha256:8bb6517173164d4b9cc44f49de411d8cebdb2e705d5dd1ea1f38733c4a009e1d"}, {file = "matrix_synapse_ldap3-0.3.0-py3-none-any.whl", hash = "sha256:8b4d701f8702551e98cc1d8c20dbed532de5613584c08d0df22de376ba99159d"}, @@ -1525,7 +1525,7 @@ description = "OpenTracing API for Python. See documentation at http://opentraci optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"all\" or extra == \"opentracing\"" +markers = "extra == \"opentracing\" or extra == \"all\"" files = [ {file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"}, ] @@ -1731,7 +1731,7 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"all\" or extra == \"postgres\"" +markers = "extra == \"postgres\" or extra == \"all\"" files = [ {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"}, {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"}, @@ -1739,7 +1739,6 @@ files = [ {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"}, {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"}, {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"}, - {file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"}, {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"}, {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"}, {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"}, @@ -1752,7 +1751,7 @@ description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=mas optional = true python-versions = "*" groups = ["main"] -markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")" +markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")" files = [ {file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"}, ] @@ -1768,7 +1767,7 @@ description = "A Simple library to enable psycopg2 compatability" optional = true python-versions = "*" groups = ["main"] -markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")" +markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")" files = [ {file = "psycopg2cffi-compat-1.1.tar.gz", hash = "sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05"}, ] @@ -1817,21 +1816,21 @@ files = [ [[package]] name = "pydantic" -version = "2.11.10" +version = "2.12.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["main"] files = [ - {file = "pydantic-2.11.10-py3-none-any.whl", hash = "sha256:802a655709d49bd004c31e865ef37da30b540786a46bfce02333e0e24b5fe29a"}, - {file = "pydantic-2.11.10.tar.gz", hash = "sha256:dc280f0982fbda6c38fada4e476dc0a4f3aeaf9c6ad4c28df68a666ec3c61423"}, + {file = "pydantic-2.12.3-py3-none-any.whl", hash = "sha256:6986454a854bc3bc6e5443e1369e06a3a456af9d339eda45510f517d9ea5c6bf"}, + {file = "pydantic-2.12.3.tar.gz", hash = "sha256:1da1c82b0fc140bb0103bc1441ffe062154c8d38491189751ee00fd8ca65ce74"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.33.2" -typing-extensions = ">=4.12.2" -typing-inspection = ">=0.4.0" +pydantic-core = "2.41.4" +typing-extensions = ">=4.14.1" +typing-inspection = ">=0.4.2" [package.extras] email = ["email-validator (>=2.0.0)"] @@ -1839,115 +1838,133 @@ timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.4" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["main"] files = [ - {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, - {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, - {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, - {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, - {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, - {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, - {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, - {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, - {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, - {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, - {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, - {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, - {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, + {file = "pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e"}, + {file = "pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b"}, + {file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d55bbac04711e2980645af68b97d445cdbcce70e5216de444a6c4b6943ebcccd"}, + {file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1d778fb7849a42d0ee5927ab0f7453bf9f85eef8887a546ec87db5ddb178945"}, + {file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b65077a4693a98b90ec5ad8f203ad65802a1b9b6d4a7e48066925a7e1606706"}, + {file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62637c769dee16eddb7686bf421be48dfc2fae93832c25e25bc7242e698361ba"}, + {file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfe3aa529c8f501babf6e502936b9e8d4698502b2cfab41e17a028d91b1ac7b"}, + {file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca2322da745bf2eeb581fc9ea3bbb31147702163ccbcbf12a3bb630e4bf05e1d"}, + {file = "pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8cd3577c796be7231dcf80badcf2e0835a46665eaafd8ace124d886bab4d700"}, + {file = "pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1cae8851e174c83633f0833e90636832857297900133705ee158cf79d40f03e6"}, + {file = "pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a26d950449aae348afe1ac8be5525a00ae4235309b729ad4d3399623125b43c9"}, + {file = "pydantic_core-2.41.4-cp310-cp310-win32.whl", hash = "sha256:0cf2a1f599efe57fa0051312774280ee0f650e11152325e41dfd3018ef2c1b57"}, + {file = "pydantic_core-2.41.4-cp310-cp310-win_amd64.whl", hash = "sha256:a8c2e340d7e454dc3340d3d2e8f23558ebe78c98aa8f68851b04dcb7bc37abdc"}, + {file = "pydantic_core-2.41.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:28ff11666443a1a8cf2a044d6a545ebffa8382b5f7973f22c36109205e65dc80"}, + {file = "pydantic_core-2.41.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61760c3925d4633290292bad462e0f737b840508b4f722247d8729684f6539ae"}, + {file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae547b7315d055b0de2ec3965643b0ab82ad0106a7ffd29615ee9f266a02827"}, + {file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef9ee5471edd58d1fcce1c80ffc8783a650e3e3a193fe90d52e43bb4d87bff1f"}, + {file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15dd504af121caaf2c95cb90c0ebf71603c53de98305621b94da0f967e572def"}, + {file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a926768ea49a8af4d36abd6a8968b8790f7f76dd7cbd5a4c180db2b4ac9a3a2"}, + {file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916b9b7d134bff5440098a4deb80e4cb623e68974a87883299de9124126c2a8"}, + {file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cf90535979089df02e6f17ffd076f07237efa55b7343d98760bde8743c4b265"}, + {file = "pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7533c76fa647fade2d7ec75ac5cc079ab3f34879626dae5689b27790a6cf5a5c"}, + {file = "pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:37e516bca9264cbf29612539801ca3cd5d1be465f940417b002905e6ed79d38a"}, + {file = "pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c19cb355224037c83642429b8ce261ae108e1c5fbf5c028bac63c77b0f8646e"}, + {file = "pydantic_core-2.41.4-cp311-cp311-win32.whl", hash = "sha256:09c2a60e55b357284b5f31f5ab275ba9f7f70b7525e18a132ec1f9160b4f1f03"}, + {file = "pydantic_core-2.41.4-cp311-cp311-win_amd64.whl", hash = "sha256:711156b6afb5cb1cb7c14a2cc2c4a8b4c717b69046f13c6b332d8a0a8f41ca3e"}, + {file = "pydantic_core-2.41.4-cp311-cp311-win_arm64.whl", hash = "sha256:6cb9cf7e761f4f8a8589a45e49ed3c0d92d1d696a45a6feaee8c904b26efc2db"}, + {file = "pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887"}, + {file = "pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2"}, + {file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999"}, + {file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4"}, + {file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f"}, + {file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b"}, + {file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47"}, + {file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970"}, + {file = "pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed"}, + {file = "pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8"}, + {file = "pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431"}, + {file = "pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd"}, + {file = "pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff"}, + {file = "pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8"}, + {file = "pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746"}, + {file = "pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced"}, + {file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a"}, + {file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02"}, + {file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1"}, + {file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2"}, + {file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84"}, + {file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d"}, + {file = "pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d"}, + {file = "pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2"}, + {file = "pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab"}, + {file = "pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c"}, + {file = "pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4"}, + {file = "pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564"}, + {file = "pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4"}, + {file = "pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2"}, + {file = "pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf"}, + {file = "pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2"}, + {file = "pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89"}, + {file = "pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1"}, + {file = "pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac"}, + {file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554"}, + {file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e"}, + {file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616"}, + {file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af"}, + {file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12"}, + {file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d"}, + {file = "pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad"}, + {file = "pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a"}, + {file = "pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025"}, + {file = "pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e"}, + {file = "pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894"}, + {file = "pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d"}, + {file = "pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da"}, + {file = "pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e"}, + {file = "pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa"}, + {file = "pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d"}, + {file = "pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0"}, + {file = "pydantic_core-2.41.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:646e76293345954acea6966149683047b7b2ace793011922208c8e9da12b0062"}, + {file = "pydantic_core-2.41.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cc8e85a63085a137d286e2791037f5fdfff0aabb8b899483ca9c496dd5797338"}, + {file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:692c622c8f859a17c156492783902d8370ac7e121a611bd6fe92cc71acf9ee8d"}, + {file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d1e2906efb1031a532600679b424ef1d95d9f9fb507f813951f23320903adbd7"}, + {file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04e2f7f8916ad3ddd417a7abdd295276a0bf216993d9318a5d61cc058209166"}, + {file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df649916b81822543d1c8e0e1d079235f68acdc7d270c911e8425045a8cfc57e"}, + {file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66c529f862fdba70558061bb936fe00ddbaaa0c647fd26e4a4356ef1d6561891"}, + {file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc3b4c5a1fd3a311563ed866c2c9b62da06cb6398bee186484ce95c820db71cb"}, + {file = "pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6e0fc40d84448f941df9b3334c4b78fe42f36e3bf631ad54c3047a0cdddc2514"}, + {file = "pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:44e7625332683b6c1c8b980461475cde9595eff94447500e80716db89b0da005"}, + {file = "pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:170ee6835f6c71081d031ef1c3b4dc4a12b9efa6a9540f93f95b82f3c7571ae8"}, + {file = "pydantic_core-2.41.4-cp39-cp39-win32.whl", hash = "sha256:3adf61415efa6ce977041ba9745183c0e1f637ca849773afa93833e04b163feb"}, + {file = "pydantic_core-2.41.4-cp39-cp39-win_amd64.whl", hash = "sha256:a238dd3feee263eeaeb7dc44aea4ba1364682c4f9f9467e6af5596ba322c2332"}, + {file = "pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b"}, + {file = "pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42"}, + {file = "pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee"}, + {file = "pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed810568aeffed3edc78910af32af911c835cc39ebbfacd1f0ab5dd53028e5c"}, + {file = "pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537"}, + {file = "pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94"}, + {file = "pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c"}, + {file = "pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335"}, + {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e5ab4fc177dd41536b3c32b2ea11380dd3d4619a385860621478ac2d25ceb00"}, + {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d88d0054d3fa11ce936184896bed3c1c5441d6fa483b498fac6a5d0dd6f64a9"}, + {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2a054a8725f05b4b6503357e0ac1c4e8234ad3b0c2ac130d6ffc66f0e170e2"}, + {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0d9db5a161c99375a0c68c058e227bee1d89303300802601d76a3d01f74e258"}, + {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6273ea2c8ffdac7b7fda2653c49682db815aebf4a89243a6feccf5e36c18c347"}, + {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:4c973add636efc61de22530b2ef83a65f39b6d6f656df97f678720e20de26caa"}, + {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b69d1973354758007f46cf2d44a4f3d0933f10b6dc9bf15cf1356e037f6f731a"}, + {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3619320641fd212aaf5997b6ca505e97540b7e16418f4a241f44cdf108ffb50d"}, + {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:491535d45cd7ad7e4a2af4a5169b0d07bebf1adfd164b0368da8aa41e19907a5"}, + {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:54d86c0cada6aba4ec4c047d0e348cbad7063b87ae0f005d9f8c9ad04d4a92a2"}, + {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca1124aced216b2500dc2609eade086d718e8249cb9696660ab447d50a758bd"}, + {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c9024169becccf0cb470ada03ee578d7348c119a0d42af3dcf9eda96e3a247c"}, + {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:26895a4268ae5a2849269f4991cdc97236e4b9c010e51137becf25182daac405"}, + {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:ca4df25762cf71308c446e33c9b1fdca2923a3f13de616e2a949f38bf21ff5a8"}, + {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5a28fcedd762349519276c36634e71853b4541079cab4acaaac60c4421827308"}, + {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c173ddcd86afd2535e2b695217e82191580663a1d1928239f877f5a1649ef39f"}, + {file = "pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5"}, ] [package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +typing-extensions = ">=4.14.1" [[package]] name = "pygithub" @@ -2027,7 +2044,7 @@ description = "A development tool to measure, monitor and analyze the memory beh optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"all\" or extra == \"cache-memory\"" +markers = "extra == \"cache-memory\" or extra == \"all\"" files = [ {file = "Pympler-1.0.1-py3-none-any.whl", hash = "sha256:d260dda9ae781e1eab6ea15bacb84015849833ba5555f141d2d9b7b7473b307d"}, {file = "Pympler-1.0.1.tar.gz", hash = "sha256:993f1a3599ca3f4fcd7160c7545ad06310c9e12f70174ae7ae8d4e25f6c5d3fa"}, @@ -2087,7 +2104,7 @@ description = "Python implementation of SAML Version 2 Standard" optional = true python-versions = ">=3.9,<4.0" groups = ["main"] -markers = "extra == \"all\" or extra == \"saml2\"" +markers = "extra == \"saml2\" or extra == \"all\"" files = [ {file = "pysaml2-7.5.0-py3-none-any.whl", hash = "sha256:bc6627cc344476a83c757f440a73fda1369f13b6fda1b4e16bca63ffbabb5318"}, {file = "pysaml2-7.5.0.tar.gz", hash = "sha256:f36871d4e5ee857c6b85532e942550d2cf90ea4ee943d75eb681044bbc4f54f7"}, @@ -2112,7 +2129,7 @@ description = "Extensions to the standard Python datetime module" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" groups = ["main"] -markers = "extra == \"all\" or extra == \"saml2\"" +markers = "extra == \"saml2\" or extra == \"all\"" files = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -2140,7 +2157,7 @@ description = "World timezone definitions, modern and historical" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"all\" or extra == \"saml2\"" +markers = "extra == \"saml2\" or extra == \"all\"" files = [ {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, @@ -2526,7 +2543,7 @@ description = "Python client for Sentry (https://sentry.io)" optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"all\" or extra == \"sentry\"" +markers = "extra == \"sentry\" or extra == \"all\"" files = [ {file = "sentry_sdk-2.34.1-py2.py3-none-any.whl", hash = "sha256:b7a072e1cdc5abc48101d5146e1ae680fa81fe886d8d95aaa25a0b450c818d32"}, {file = "sentry_sdk-2.34.1.tar.gz", hash = "sha256:69274eb8c5c38562a544c3e9f68b5be0a43be4b697f5fd385bf98e4fbe672687"}, @@ -2714,7 +2731,7 @@ description = "Tornado IOLoop Backed Concurrent Futures" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"all\" or extra == \"opentracing\"" +markers = "extra == \"opentracing\" or extra == \"all\"" files = [ {file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"}, {file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"}, @@ -2730,7 +2747,7 @@ description = "Python bindings for the Apache Thrift RPC system" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"all\" or extra == \"opentracing\"" +markers = "extra == \"opentracing\" or extra == \"all\"" files = [ {file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"}, ] @@ -2784,6 +2801,7 @@ files = [ {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] +markers = {main = "python_version < \"3.14\""} [[package]] name = "tornado" @@ -2792,7 +2810,7 @@ description = "Tornado is a Python web framework and asynchronous networking lib optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"all\" or extra == \"opentracing\"" +markers = "extra == \"opentracing\" or extra == \"all\"" files = [ {file = "tornado-6.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:f81067dad2e4443b015368b24e802d0083fecada4f0a4572fdb72fc06e54a9a6"}, {file = "tornado-6.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9ac1cbe1db860b3cbb251e795c701c41d343f06a96049d6274e7c77559117e41"}, @@ -2926,7 +2944,7 @@ description = "non-blocking redis client for python" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"all\" or extra == \"redis\"" +markers = "extra == \"redis\" or extra == \"all\"" files = [ {file = "txredisapi-1.4.11-py3-none-any.whl", hash = "sha256:ac64d7a9342b58edca13ef267d4fa7637c1aa63f8595e066801c1e8b56b22d0b"}, {file = "txredisapi-1.4.11.tar.gz", hash = "sha256:3eb1af99aefdefb59eb877b1dd08861efad60915e30ad5bf3d5bf6c5cedcdbc6"}, @@ -3110,14 +3128,14 @@ files = [ [[package]] name = "typing-inspection" -version = "0.4.0" +version = "0.4.2" description = "Runtime typing introspection tools" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["main"] files = [ - {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, - {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, + {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, + {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, ] [package.dependencies] @@ -3172,7 +3190,7 @@ description = "An XML Schema validator and decoder" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"all\" or extra == \"saml2\"" +markers = "extra == \"saml2\" or extra == \"all\"" files = [ {file = "xmlschema-2.4.0-py3-none-any.whl", hash = "sha256:dc87be0caaa61f42649899189aab2fd8e0d567f2cf548433ba7b79278d231a4a"}, {file = "xmlschema-2.4.0.tar.gz", hash = "sha256:d74cd0c10866ac609e1ef94a5a69b018ad16e39077bc6393408b40c6babee793"}, @@ -3316,4 +3334,4 @@ url-preview = ["lxml"] [metadata] lock-version = "2.1" python-versions = "^3.10.0" -content-hash = "0122c5aa55099678f2ba5094ec393ebd814def15213388b33e5f1d7760392ffc" +content-hash = "363f8059c998566788b0465c338a3a8aaa56d1e61cc347f2473b687ff34f2a8d" diff --git a/pyproject.toml b/pyproject.toml index 08b4b8af66..5fb0c88b4f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -220,8 +220,8 @@ netaddr = ">=0.7.18" Jinja2 = ">=3.0" # 3.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility. bleach = ">=3.2.0" -# We use `assert_never`, which were added in `typing-extensions` 4.1. -typing-extensions = ">=4.1" +# pydantic 2.12 depends on typing-extensions>=4.14.1 +typing-extensions = ">=4.14.1" # We enforce that we have a `cryptography` version that bundles an `openssl` # with the latest security patches. cryptography = ">=3.4.7" @@ -230,9 +230,10 @@ ijson = ">=3.1.4" matrix-common = "^1.3.0" # We need packaging.verison.Version(...).major added in 20.0. packaging = ">=20.0" -# We support pydantic v1 and pydantic v2 via the pydantic.v1 compat module. -# See https://github.com/matrix-org/synapse/issues/15858 -pydantic = ">=1.7.4, <3" +pydantic = [ + { version = "~=2.8", python = "<3.14" }, + { version = "~=2.12", python = ">=3.14" }, +] # This is for building the rust components during "poetry install", which # currently ignores the `build-system.requires` directive (c.f. @@ -335,8 +336,6 @@ all = [ # can bump versions without having to update the content-hash in the lockfile. # This helps prevents merge conflicts when running a batch of dependabot updates. ruff = "0.12.10" -# Type checking only works with the pydantic.v1 compat module from pydantic v2 -pydantic = "^2" # Typechecking lxml-stubs = ">=0.4.0" diff --git a/scripts-dev/check_pydantic_models.py b/scripts-dev/check_pydantic_models.py deleted file mode 100755 index 69c49e258d..0000000000 --- a/scripts-dev/check_pydantic_models.py +++ /dev/null @@ -1,474 +0,0 @@ -#! /usr/bin/env python -# -# This file is licensed under the Affero General Public License (AGPL) version 3. -# -# Copyright 2022 The Matrix.org Foundation C.I.C. -# Copyright (C) 2023 New Vector, Ltd -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# See the GNU Affero General Public License for more details: -# . -# -# Originally licensed under the Apache License, Version 2.0: -# . -# -# [This file includes modifications made by New Vector Limited] -# -# -""" -A script which enforces that Synapse always uses strict types when defining a Pydantic -model. - -Pydantic does not yet offer a strict mode, but it is planned for pydantic v2. See - - https://github.com/pydantic/pydantic/issues/1098 - https://pydantic-docs.helpmanual.io/blog/pydantic-v2/#strict-mode - -until then, this script is a best effort to stop us from introducing type coersion bugs -(like the infamous stringy power levels fixed in room version 10). -""" - -import argparse -import contextlib -import functools -import importlib -import logging -import os -import pkgutil -import sys -import textwrap -import traceback -import unittest.mock -from contextlib import contextmanager -from typing import ( - Any, - Callable, - Generator, - TypeVar, -) - -from parameterized import parameterized -from typing_extensions import ParamSpec - -from synapse._pydantic_compat import ( - BaseModel as PydanticBaseModel, - conbytes, - confloat, - conint, - constr, - get_args, -) - -logger = logging.getLogger(__name__) - -CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: list[Callable] = [ - constr, - conbytes, - conint, - confloat, -] - -TYPES_THAT_PYDANTIC_WILL_COERCE_TO = [ - str, - bytes, - int, - float, - bool, -] - - -P = ParamSpec("P") -R = TypeVar("R") - - -class ModelCheckerException(Exception): - """Dummy exception. Allows us to detect unwanted types during a module import.""" - - -class MissingStrictInConstrainedTypeException(ModelCheckerException): - factory_name: str - - def __init__(self, factory_name: str): - self.factory_name = factory_name - - -class FieldHasUnwantedTypeException(ModelCheckerException): - message: str - - def __init__(self, message: str): - self.message = message - - -def make_wrapper(factory: Callable[P, R]) -> Callable[P, R]: - """We patch `constr` and friends with wrappers that enforce strict=True.""" - - @functools.wraps(factory) - def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: - if "strict" not in kwargs: - raise MissingStrictInConstrainedTypeException(factory.__name__) - if not kwargs["strict"]: - raise MissingStrictInConstrainedTypeException(factory.__name__) - return factory(*args, **kwargs) - - return wrapper - - -def field_type_unwanted(type_: Any) -> bool: - """Very rough attempt to detect if a type is unwanted as a Pydantic annotation. - - At present, we exclude types which will coerce, or any generic type involving types - which will coerce.""" - logger.debug("Is %s unwanted?") - if type_ in TYPES_THAT_PYDANTIC_WILL_COERCE_TO: - logger.debug("yes") - return True - logger.debug("Maybe. Subargs are %s", get_args(type_)) - rv = any(field_type_unwanted(t) for t in get_args(type_)) - logger.debug("Conclusion: %s %s unwanted", type_, "is" if rv else "is not") - return rv - - -class PatchedBaseModel(PydanticBaseModel): - """A patched version of BaseModel that inspects fields after models are defined. - - We complain loudly if we see an unwanted type. - - Beware: ModelField.type_ is presumably private; this is likely to be very brittle. - """ - - @classmethod - def __init_subclass__(cls: type[PydanticBaseModel], **kwargs: object): - for field in cls.__fields__.values(): - # Note that field.type_ and field.outer_type are computed based on the - # annotation type, see pydantic.fields.ModelField._type_analysis - if field_type_unwanted(field.outer_type_): - # TODO: this only reports the first bad field. Can we find all bad ones - # and report them all? - raise FieldHasUnwantedTypeException( - f"{cls.__module__}.{cls.__qualname__} has field '{field.name}' " - f"with unwanted type `{field.outer_type_}`" - ) - - -@contextmanager -def monkeypatch_pydantic() -> Generator[None, None, None]: - """Patch pydantic with our snooping versions of BaseModel and the con* functions. - - If the snooping functions see something they don't like, they'll raise a - ModelCheckingException instance. - """ - with contextlib.ExitStack() as patches: - # Most Synapse code ought to import the patched objects directly from - # `pydantic`. But we also patch their containing modules `pydantic.main` and - # `pydantic.types` for completeness. - patch_basemodel = unittest.mock.patch( - "synapse._pydantic_compat.BaseModel", new=PatchedBaseModel - ) - patches.enter_context(patch_basemodel) - for factory in CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: - wrapper: Callable = make_wrapper(factory) - patch = unittest.mock.patch( - f"synapse._pydantic_compat.{factory.__name__}", new=wrapper - ) - patches.enter_context(patch) - yield - - -def format_model_checker_exception(e: ModelCheckerException) -> str: - """Work out which line of code caused e. Format the line in a human-friendly way.""" - # TODO. FieldHasUnwantedTypeException gives better error messages. Can we ditch the - # patches of constr() etc, and instead inspect fields to look for ConstrainedStr - # with strict=False? There is some difficulty with the inheritance hierarchy - # because StrictStr < ConstrainedStr < str. - if isinstance(e, FieldHasUnwantedTypeException): - return e.message - elif isinstance(e, MissingStrictInConstrainedTypeException): - frame_summary = traceback.extract_tb(e.__traceback__)[-2] - return ( - f"Missing `strict=True` from {e.factory_name}() call \n" - + traceback.format_list([frame_summary])[0].lstrip() - ) - else: - raise ValueError(f"Unknown exception {e}") from e - - -def lint() -> int: - """Try to import all of Synapse and see if we spot any Pydantic type coercions. - - Print any problems, then return a status code suitable for sys.exit.""" - failures = do_lint() - if failures: - print(f"Found {len(failures)} problem(s)") - for failure in sorted(failures): - print(failure) - return os.EX_DATAERR if failures else os.EX_OK - - -def do_lint() -> set[str]: - """Try to import all of Synapse and see if we spot any Pydantic type coercions.""" - failures = set() - - with monkeypatch_pydantic(): - logger.debug("Importing synapse") - try: - # TODO: make "synapse" an argument so we can target this script at - # a subpackage - module = importlib.import_module("synapse") - except ModelCheckerException as e: - logger.warning("Bad annotation found when importing synapse") - failures.add(format_model_checker_exception(e)) - return failures - - try: - logger.debug("Fetching subpackages") - module_infos = list( - pkgutil.walk_packages(module.__path__, f"{module.__name__}.") - ) - except ModelCheckerException as e: - logger.warning("Bad annotation found when looking for modules to import") - failures.add(format_model_checker_exception(e)) - return failures - - for module_info in module_infos: - logger.debug("Importing %s", module_info.name) - try: - importlib.import_module(module_info.name) - except ModelCheckerException as e: - logger.warning( - "Bad annotation found when importing %s", module_info.name - ) - failures.add(format_model_checker_exception(e)) - - return failures - - -def run_test_snippet(source: str) -> None: - """Exec a snippet of source code in an isolated environment.""" - # To emulate `source` being called at the top level of the module, - # the globals and locals we provide apparently have to be the same mapping. - # - # > Remember that at the module level, globals and locals are the same dictionary. - # > If exec gets two separate objects as globals and locals, the code will be - # > executed as if it were embedded in a class definition. - globals_: dict[str, object] - locals_: dict[str, object] - globals_ = locals_ = {} - exec(textwrap.dedent(source), globals_, locals_) - - -class TestConstrainedTypesPatch(unittest.TestCase): - def test_expression_without_strict_raises(self) -> None: - with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException): - run_test_snippet( - """ - try: - from pydantic.v1 import constr - except ImportError: - from pydantic import constr - constr() - """ - ) - - def test_called_as_module_attribute_raises(self) -> None: - with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException): - run_test_snippet( - """ - import pydantic - pydantic.constr() - """ - ) - - def test_wildcard_import_raises(self) -> None: - with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException): - run_test_snippet( - """ - try: - from pydantic.v1 import * - except ImportError: - from pydantic import * - constr() - """ - ) - - def test_alternative_import_raises(self) -> None: - with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException): - run_test_snippet( - """ - try: - from pydantic.v1.types import constr - except ImportError: - from pydantic.types import constr - constr() - """ - ) - - def test_alternative_import_attribute_raises(self) -> None: - with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException): - run_test_snippet( - """ - try: - from pydantic.v1 import types as pydantic_types - except ImportError: - from pydantic import types as pydantic_types - pydantic_types.constr() - """ - ) - - def test_kwarg_but_no_strict_raises(self) -> None: - with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException): - run_test_snippet( - """ - try: - from pydantic.v1 import constr - except ImportError: - from pydantic import constr - constr(min_length=10) - """ - ) - - def test_kwarg_strict_False_raises(self) -> None: - with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException): - run_test_snippet( - """ - try: - from pydantic.v1 import constr - except ImportError: - from pydantic import constr - constr(strict=False) - """ - ) - - def test_kwarg_strict_True_doesnt_raise(self) -> None: - with monkeypatch_pydantic(): - run_test_snippet( - """ - try: - from pydantic.v1 import constr - except ImportError: - from pydantic import constr - constr(strict=True) - """ - ) - - def test_annotation_without_strict_raises(self) -> None: - with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException): - run_test_snippet( - """ - try: - from pydantic.v1 import constr - except ImportError: - from pydantic import constr - x: constr() - """ - ) - - def test_field_annotation_without_strict_raises(self) -> None: - with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException): - run_test_snippet( - """ - try: - from pydantic.v1 import BaseModel, conint - except ImportError: - from pydantic import BaseModel, conint - class C: - x: conint() - """ - ) - - -class TestFieldTypeInspection(unittest.TestCase): - @parameterized.expand( - [ - ("str",), - ("bytes"), - ("int",), - ("float",), - ("bool"), - ("Optional[str]",), - ("Union[None, str]",), - ("list[str]",), - ("list[list[str]]",), - ("dict[StrictStr, str]",), - ("dict[str, StrictStr]",), - ("TypedDict('D', x=int)",), - ] - ) - def test_field_holding_unwanted_type_raises(self, annotation: str) -> None: - with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException): - run_test_snippet( - f""" - from typing import * - try: - from pydantic.v1 import * - except ImportError: - from pydantic import * - class C(BaseModel): - f: {annotation} - """ - ) - - @parameterized.expand( - [ - ("StrictStr",), - ("StrictBytes"), - ("StrictInt",), - ("StrictFloat",), - ("StrictBool"), - ("constr(strict=True, min_length=10)",), - ("Optional[StrictStr]",), - ("Union[None, StrictStr]",), - ("list[StrictStr]",), - ("list[list[StrictStr]]",), - ("dict[StrictStr, StrictStr]",), - ("TypedDict('D', x=StrictInt)",), - ] - ) - def test_field_holding_accepted_type_doesnt_raise(self, annotation: str) -> None: - with monkeypatch_pydantic(): - run_test_snippet( - f""" - from typing import * - try: - from pydantic.v1 import * - except ImportError: - from pydantic import * - class C(BaseModel): - f: {annotation} - """ - ) - - def test_field_holding_str_raises_with_alternative_import(self) -> None: - with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException): - run_test_snippet( - """ - try: - from pydantic.v1.main import BaseModel - except ImportError: - from pydantic.main import BaseModel - class C(BaseModel): - f: str - """ - ) - - -parser = argparse.ArgumentParser() -parser.add_argument("mode", choices=["lint", "test"], default="lint", nargs="?") -parser.add_argument("-v", "--verbose", action="store_true") - - -if __name__ == "__main__": - args = parser.parse_args(sys.argv[1:]) - logging.basicConfig( - format="%(asctime)s %(name)s:%(lineno)d %(levelname)s %(message)s", - level=logging.DEBUG if args.verbose else logging.INFO, - ) - # suppress logs we don't care about - logging.getLogger("xmlschema").setLevel(logging.WARNING) - if args.mode == "lint": - sys.exit(lint()) - elif args.mode == "test": - unittest.main(argv=sys.argv[:1]) diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh index 7096100a3e..d5e10d4292 100755 --- a/scripts-dev/lint.sh +++ b/scripts-dev/lint.sh @@ -134,9 +134,6 @@ fi # Ensure the formatting of Rust code. cargo-fmt -# Ensure all Pydantic models use strict types. -./scripts-dev/check_pydantic_models.py lint - # Ensure type hints are correct. mypy diff --git a/synapse/_pydantic_compat.py b/synapse/_pydantic_compat.py deleted file mode 100644 index a520c0e897..0000000000 --- a/synapse/_pydantic_compat.py +++ /dev/null @@ -1,104 +0,0 @@ -# -# This file is licensed under the Affero General Public License (AGPL) version 3. -# -# Copyright 2023 Maxwell G -# Copyright (C) 2023 New Vector, Ltd -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# See the GNU Affero General Public License for more details: -# . -# -# Originally licensed under the Apache License, Version 2.0: -# . -# -# [This file includes modifications made by New Vector Limited] -# -# - -from typing import TYPE_CHECKING - -from packaging.version import Version - -try: - from pydantic import __version__ as pydantic_version -except ImportError: - import importlib.metadata - - pydantic_version = importlib.metadata.version("pydantic") - -HAS_PYDANTIC_V2: bool = Version(pydantic_version).major == 2 - -if TYPE_CHECKING or HAS_PYDANTIC_V2: - from pydantic.v1 import ( - AnyHttpUrl, - BaseModel, - Extra, - Field, - FilePath, - MissingError, - PydanticValueError, - StrictBool, - StrictInt, - StrictStr, - ValidationError, - conbytes, - confloat, - conint, - constr, - parse_obj_as, - root_validator, - validator, - ) - from pydantic.v1.error_wrappers import ErrorWrapper - from pydantic.v1.typing import get_args -else: - from pydantic import ( - AnyHttpUrl, - BaseModel, - Extra, - Field, - FilePath, - MissingError, - PydanticValueError, - StrictBool, - StrictInt, - StrictStr, - ValidationError, - conbytes, - confloat, - conint, - constr, - parse_obj_as, - root_validator, - validator, - ) - from pydantic.error_wrappers import ErrorWrapper - from pydantic.typing import get_args - -__all__ = ( - "HAS_PYDANTIC_V2", - "AnyHttpUrl", - "BaseModel", - "constr", - "conbytes", - "conint", - "confloat", - "ErrorWrapper", - "Extra", - "Field", - "FilePath", - "get_args", - "MissingError", - "parse_obj_as", - "PydanticValueError", - "StrictBool", - "StrictInt", - "StrictStr", - "ValidationError", - "validator", - "root_validator", -) diff --git a/synapse/api/auth/mas.py b/synapse/api/auth/mas.py index 325d264161..f2b218e34f 100644 --- a/synapse/api/auth/mas.py +++ b/synapse/api/auth/mas.py @@ -16,14 +16,16 @@ from typing import TYPE_CHECKING, Optional from urllib.parse import urlencode -from synapse._pydantic_compat import ( +from pydantic import ( + AnyHttpUrl, BaseModel, - Extra, + ConfigDict, StrictBool, StrictInt, StrictStr, ValidationError, ) + from synapse.api.auth.base import BaseAuth from synapse.api.errors import ( AuthError, @@ -63,8 +65,7 @@ class ServerMetadata(BaseModel): - class Config: - extra = Extra.allow + model_config = ConfigDict(extra="allow") issuer: StrictStr account_management_uri: StrictStr @@ -73,14 +74,12 @@ class Config: class IntrospectionResponse(BaseModel): retrieved_at_ms: StrictInt active: StrictBool - scope: Optional[StrictStr] - username: Optional[StrictStr] - sub: Optional[StrictStr] - device_id: Optional[StrictStr] - expires_in: Optional[StrictInt] - - class Config: - extra = Extra.allow + scope: Optional[StrictStr] = None + username: Optional[StrictStr] = None + sub: Optional[StrictStr] = None + device_id: Optional[StrictStr] = None + expires_in: Optional[StrictInt] = None + model_config = ConfigDict(extra="allow") def get_scope_set(self) -> set[str]: if not self.scope: @@ -148,11 +147,33 @@ def __init__(self, hs: "HomeServer"): @property def _metadata_url(self) -> str: - return f"{self._config.endpoint.rstrip('/')}/.well-known/openid-configuration" + return str( + AnyHttpUrl.build( + scheme=self._config.endpoint.scheme, + username=self._config.endpoint.username, + password=self._config.endpoint.password, + host=self._config.endpoint.host or "", + port=self._config.endpoint.port, + path=".well-known/openid-configuration", + query=None, + fragment=None, + ) + ) @property def _introspection_endpoint(self) -> str: - return f"{self._config.endpoint.rstrip('/')}/oauth2/introspect" + return str( + AnyHttpUrl.build( + scheme=self._config.endpoint.scheme, + username=self._config.endpoint.username, + password=self._config.endpoint.password, + host=self._config.endpoint.host or "", + port=self._config.endpoint.port, + path="oauth2/introspect", + query=None, + fragment=None, + ) + ) async def _load_metadata(self) -> ServerMetadata: response = await self._http_client.get_json(self._metadata_url) diff --git a/synapse/config/_util.py b/synapse/config/_util.py index 3e239c525e..e09c68ebd4 100644 --- a/synapse/config/_util.py +++ b/synapse/config/_util.py @@ -21,8 +21,8 @@ from typing import Any, TypeVar import jsonschema +from pydantic import BaseModel, TypeAdapter, ValidationError -from synapse._pydantic_compat import BaseModel, ValidationError, parse_obj_as from synapse.config._base import ConfigError from synapse.types import JsonDict, StrSequence @@ -93,7 +93,7 @@ def parse_and_validate_mapping( try: # type-ignore: mypy doesn't like constructing `Dict[str, model_type]` because # `model_type` is a runtime variable. Pydantic is fine with this. - instances = parse_obj_as(dict[str, model_type], config) # type: ignore[valid-type] + instances = TypeAdapter(dict[str, model_type]).validate_python(config) # type: ignore[valid-type] except ValidationError as e: raise ConfigError(str(e)) from e return instances diff --git a/synapse/config/mas.py b/synapse/config/mas.py index fe0d326f7a..53cf500e95 100644 --- a/synapse/config/mas.py +++ b/synapse/config/mas.py @@ -15,15 +15,17 @@ from typing import Any, Optional -from synapse._pydantic_compat import ( +from pydantic import ( AnyHttpUrl, Field, FilePath, StrictBool, StrictStr, ValidationError, - validator, + model_validator, ) +from typing_extensions import Self + from synapse.config.experimental import read_secret_from_file_once from synapse.types import JsonDict from synapse.util.pydantic_models import ParseModel @@ -33,27 +35,24 @@ class MasConfigModel(ParseModel): enabled: StrictBool = False - endpoint: AnyHttpUrl = Field(default="http://localhost:8080") + endpoint: AnyHttpUrl = AnyHttpUrl("http://localhost:8080") secret: Optional[StrictStr] = Field(default=None) secret_path: Optional[FilePath] = Field(default=None) - @validator("secret") - def validate_secret_is_set_if_enabled(cls, v: Any, values: dict) -> Any: - if values.get("enabled", False) and not values.get("secret_path") and not v: + @model_validator(mode="after") + def verify_secret(self) -> Self: + if not self.enabled: + return self + if not self.secret and not self.secret_path: raise ValueError( - "You must set a `secret` or `secret_path` when enabling Matrix Authentication Service integration." + "You must set a `secret` or `secret_path` when enabling the Matrix " + "Authentication Service integration." ) - - return v - - @validator("secret_path") - def validate_secret_path_is_set_if_enabled(cls, v: Any, values: dict) -> Any: - if values.get("secret"): + if self.secret and self.secret_path: raise ValueError( "`secret` and `secret_path` cannot be set at the same time." ) - - return v + return self class MasConfig(Config): diff --git a/synapse/config/matrixrtc.py b/synapse/config/matrixrtc.py index 7844d8f398..74fd7cad81 100644 --- a/synapse/config/matrixrtc.py +++ b/synapse/config/matrixrtc.py @@ -17,9 +17,9 @@ from typing import Any, Optional -from pydantic import ValidationError +from pydantic import Field, StrictStr, ValidationError, model_validator +from typing_extensions import Self -from synapse._pydantic_compat import Field, StrictStr, validator from synapse.types import JsonDict from synapse.util.pydantic_models import ParseModel @@ -32,14 +32,13 @@ class TransportConfigModel(ParseModel): livekit_service_url: Optional[StrictStr] = Field(default=None) """An optional livekit service URL. Only required if type is "livekit".""" - @validator("livekit_service_url", always=True) - def validate_livekit_service_url(cls, v: Any, values: dict) -> Any: - if values.get("type") == "livekit" and not v: + @model_validator(mode="after") + def validate_livekit_service_url(self) -> Self: + if self.type == "livekit" and not self.livekit_service_url: raise ValueError( "You must set a `livekit_service_url` when using the 'livekit' transport." ) - - return v + return self class MatrixRtcConfigModel(ParseModel): diff --git a/synapse/config/workers.py b/synapse/config/workers.py index da7148b3a1..90f8c72412 100644 --- a/synapse/config/workers.py +++ b/synapse/config/workers.py @@ -25,12 +25,12 @@ from typing import Any, Optional, Union import attr - -from synapse._pydantic_compat import ( +from pydantic import ( StrictBool, StrictInt, StrictStr, ) + from synapse.config._base import ( Config, ConfigError, diff --git a/synapse/events/validator.py b/synapse/events/validator.py index 6fb52f82c1..c2cecd0fcb 100644 --- a/synapse/events/validator.py +++ b/synapse/events/validator.py @@ -22,8 +22,8 @@ from typing import Union, cast import jsonschema +from pydantic import Field, StrictBool, StrictStr -from synapse._pydantic_compat import Field, StrictBool, StrictStr from synapse.api.constants import ( MAX_ALIAS_LENGTH, EventContentFields, diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index 66694e0607..bca93fb036 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -35,15 +35,10 @@ overload, ) +from pydantic import BaseModel, ValidationError + from twisted.web.server import Request -from synapse._pydantic_compat import ( - BaseModel, - ErrorWrapper, - MissingError, - PydanticValueError, - ValidationError, -) from synapse.api.errors import Codes, SynapseError from synapse.http import redact_uri from synapse.http.server import HttpServer @@ -897,20 +892,20 @@ def validate_json_object(content: JsonDict, model_type: type[Model]) -> Model: if it wasn't a JSON object. """ try: - instance = model_type.parse_obj(content) + instance = model_type.model_validate(content) except ValidationError as e: + err_type = e.errors()[0]["type"] + # Choose a matrix error code. The catch-all is BAD_JSON, but we try to find a # more specific error if possible (which occasionally helps us to be spec- # compliant) This is a bit awkward because the spec's error codes aren't very # clear-cut: BAD_JSON arguably overlaps with MISSING_PARAM and INVALID_PARAM. errcode = Codes.BAD_JSON - raw_errors = e.raw_errors - if len(raw_errors) == 1 and isinstance(raw_errors[0], ErrorWrapper): - raw_error = raw_errors[0].exc - if isinstance(raw_error, MissingError): + if e.error_count() == 1: + if err_type == "missing": errcode = Codes.MISSING_PARAM - elif isinstance(raw_error, PydanticValueError): + elif err_type == "value_error": errcode = Codes.INVALID_PARAM raise SynapseError(HTTPStatus.BAD_REQUEST, str(e), errcode=errcode) diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index e29b0d36e0..3eab53e5a2 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -26,8 +26,8 @@ from typing import TYPE_CHECKING, Optional, Union import attr +from pydantic import StrictBool, StrictInt, StrictStr -from synapse._pydantic_compat import StrictBool, StrictInt, StrictStr from synapse.api.constants import Direction from synapse.api.errors import Codes, NotFoundError, SynapseError from synapse.http.servlet import ( @@ -1476,9 +1476,9 @@ def __init__(self, hs: "HomeServer"): class PostBody(RequestBodyModel): rooms: list[StrictStr] - reason: Optional[StrictStr] - limit: Optional[StrictInt] - use_admin: Optional[StrictBool] + reason: Optional[StrictStr] = None + limit: Optional[StrictInt] = None + use_admin: Optional[StrictBool] = None async def on_POST( self, request: SynapseRequest, user_id: str diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py index 8f2f54f750..f928a8a3f4 100644 --- a/synapse/rest/client/account.py +++ b/synapse/rest/client/account.py @@ -25,10 +25,11 @@ from urllib.parse import urlparse import attr +from pydantic import StrictBool, StrictStr, StringConstraints +from typing_extensions import Annotated from twisted.web.server import Request -from synapse._pydantic_compat import StrictBool, StrictStr, constr from synapse.api.constants import LoginType from synapse.api.errors import ( Codes, @@ -162,11 +163,9 @@ def __init__(self, hs: "HomeServer"): class PostBody(RequestBodyModel): auth: Optional[AuthenticationData] = None logout_devices: StrictBool = True - if TYPE_CHECKING: - # workaround for https://github.com/samuelcolvin/pydantic/issues/156 - new_password: Optional[StrictStr] = None - else: - new_password: Optional[constr(max_length=512, strict=True)] = None + new_password: Optional[ + Annotated[str, StringConstraints(max_length=512, strict=True)] + ] = None @interactive_auth_handler async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: diff --git a/synapse/rest/client/devices.py b/synapse/rest/client/devices.py index 092406b994..e20e49d48b 100644 --- a/synapse/rest/client/devices.py +++ b/synapse/rest/client/devices.py @@ -24,7 +24,8 @@ from http import HTTPStatus from typing import TYPE_CHECKING, Optional -from synapse._pydantic_compat import Extra, StrictStr +from pydantic import ConfigDict, StrictStr + from synapse.api import errors from synapse.api.errors import NotFoundError, SynapseError, UnrecognizedRequestError from synapse.http.server import HttpServer @@ -94,7 +95,7 @@ def __init__(self, hs: "HomeServer"): self.auth_handler = hs.get_auth_handler() class PostBody(RequestBodyModel): - auth: Optional[AuthenticationData] + auth: Optional[AuthenticationData] = None devices: list[StrictStr] @interactive_auth_handler @@ -108,7 +109,7 @@ async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: # TODO: Can/should we remove this fallback now? # deal with older clients which didn't pass a JSON dict # the same as those that pass an empty dict - body = self.PostBody.parse_obj({}) + body = self.PostBody.model_validate({}) else: raise e @@ -172,7 +173,7 @@ async def on_GET( return 200, device class DeleteBody(RequestBodyModel): - auth: Optional[AuthenticationData] + auth: Optional[AuthenticationData] = None @interactive_auth_handler async def on_DELETE( @@ -188,7 +189,7 @@ async def on_DELETE( # TODO: can/should we remove this fallback now? # deal with older clients which didn't pass a JSON dict # the same as those that pass an empty dict - body = self.DeleteBody.parse_obj({}) + body = self.DeleteBody.model_validate({}) else: raise @@ -217,7 +218,7 @@ async def on_DELETE( return 200, {} class PutBody(RequestBodyModel): - display_name: Optional[StrictStr] + display_name: Optional[StrictStr] = None async def on_PUT( self, request: SynapseRequest, device_id: str @@ -247,8 +248,7 @@ class DehydratedDeviceDataModel(RequestBodyModel): Expects other freeform fields. Use .dict() to access them. """ - class Config: - extra = Extra.allow + model_config = ConfigDict(extra="allow") algorithm: StrictStr @@ -316,7 +316,7 @@ async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: class PutBody(RequestBodyModel): device_data: DehydratedDeviceDataModel - initial_device_display_name: Optional[StrictStr] + initial_device_display_name: Optional[StrictStr] = None async def on_PUT(self, request: SynapseRequest) -> tuple[int, JsonDict]: submission = parse_and_validate_json_object_from_request(request, self.PutBody) @@ -391,7 +391,7 @@ def __init__(self, hs: "HomeServer"): self.store = hs.get_datastores().main class PostBody(RequestBodyModel): - next_batch: Optional[StrictStr] + next_batch: Optional[StrictStr] = None async def on_POST( self, request: SynapseRequest, device_id: str @@ -539,9 +539,7 @@ class PutBody(RequestBodyModel): device_data: DehydratedDeviceDataModel device_id: StrictStr initial_device_display_name: Optional[StrictStr] - - class Config: - extra = Extra.allow + model_config = ConfigDict(extra="allow") async def on_PUT(self, request: SynapseRequest) -> tuple[int, JsonDict]: submission = parse_and_validate_json_object_from_request(request, self.PutBody) diff --git a/synapse/rest/client/directory.py b/synapse/rest/client/directory.py index eccada67be..943674bbb1 100644 --- a/synapse/rest/client/directory.py +++ b/synapse/rest/client/directory.py @@ -22,9 +22,10 @@ import logging from typing import TYPE_CHECKING, Literal, Optional +from pydantic import StrictStr + from twisted.web.server import Request -from synapse._pydantic_compat import StrictStr from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError from synapse.http.server import HttpServer from synapse.http.servlet import ( diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index 1f71359d55..b87b9bd68a 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -26,13 +26,8 @@ from http import HTTPStatus from typing import TYPE_CHECKING, Any, Mapping, Optional, Union -from typing_extensions import Self +from pydantic import StrictBool, StrictStr, field_validator -from synapse._pydantic_compat import ( - StrictBool, - StrictStr, - validator, -) from synapse.api.auth.mas import MasDelegatedAuth from synapse.api.errors import ( Codes, @@ -164,7 +159,7 @@ class KeyObject(RequestBodyModel): device_keys: Optional[DeviceKeys] = None """Identity keys for the device. May be absent if no new identity keys are required.""" - fallback_keys: Optional[Mapping[StrictStr, Union[StrictStr, KeyObject]]] + fallback_keys: Optional[Mapping[StrictStr, Union[StrictStr, KeyObject]]] = None """ The public key which should be used if the device's one-time keys are exhausted. The fallback key is not deleted once used, but should be @@ -180,8 +175,9 @@ class KeyObject(RequestBodyModel): May be absent if a new fallback key is not required. """ - @validator("fallback_keys", pre=True) - def validate_fallback_keys(cls: Self, v: Any) -> Any: + @field_validator("fallback_keys", mode="before") + @classmethod + def validate_fallback_keys(cls, v: Any) -> Any: if v is None: return v if not isinstance(v, dict): @@ -206,8 +202,9 @@ def validate_fallback_keys(cls: Self, v: Any) -> Any: https://spec.matrix.org/v1.16/client-server-api/#key-algorithms. """ - @validator("one_time_keys", pre=True) - def validate_one_time_keys(cls: Self, v: Any) -> Any: + @field_validator("one_time_keys", mode="before") + @classmethod + def validate_one_time_keys(cls, v: Any) -> Any: if v is None: return v if not isinstance(v, dict): diff --git a/synapse/rest/client/reporting.py b/synapse/rest/client/reporting.py index f11f6b7b77..0c594b9f3f 100644 --- a/synapse/rest/client/reporting.py +++ b/synapse/rest/client/reporting.py @@ -23,7 +23,8 @@ from http import HTTPStatus from typing import TYPE_CHECKING -from synapse._pydantic_compat import StrictStr +from pydantic import StrictStr + from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError from synapse.http.server import HttpServer from synapse.http.servlet import ( diff --git a/synapse/rest/client/thread_subscriptions.py b/synapse/rest/client/thread_subscriptions.py index f879c7589c..d02f2cb48a 100644 --- a/synapse/rest/client/thread_subscriptions.py +++ b/synapse/rest/client/thread_subscriptions.py @@ -50,7 +50,7 @@ def __init__(self, hs: "HomeServer"): self.handler = hs.get_thread_subscriptions_handler() class PutBody(RequestBodyModel): - automatic: Optional[AnyEventId] + automatic: Optional[AnyEventId] = None """ If supplied, the event ID of an event giving rise to this automatic subscription. diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index 51cb077496..e8b0b31210 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -23,11 +23,11 @@ import re from typing import TYPE_CHECKING, Mapping, Optional +from pydantic import ConfigDict, StrictInt, StrictStr from signedjson.sign import sign_json from twisted.web.server import Request -from synapse._pydantic_compat import Extra, StrictInt, StrictStr from synapse.crypto.keyring import ServerKeyFetcher from synapse.http.server import HttpServer from synapse.http.servlet import ( @@ -48,8 +48,7 @@ class _KeyQueryCriteriaDataModel(RequestBodyModel): - class Config: - extra = Extra.allow + model_config = ConfigDict(extra="allow") minimum_valid_until_ts: Optional[StrictInt] diff --git a/synapse/rest/synapse/mas/devices.py b/synapse/rest/synapse/mas/devices.py index 654fed8c03..eac51de44c 100644 --- a/synapse/rest/synapse/mas/devices.py +++ b/synapse/rest/synapse/mas/devices.py @@ -17,7 +17,8 @@ from http import HTTPStatus from typing import TYPE_CHECKING, Optional -from synapse._pydantic_compat import StrictStr +from pydantic import StrictStr + from synapse.api.errors import NotFoundError from synapse.http.servlet import parse_and_validate_json_object_from_request from synapse.types import JsonDict, UserID @@ -52,7 +53,7 @@ def __init__(self, hs: "HomeServer"): class PostBody(RequestBodyModel): localpart: StrictStr device_id: StrictStr - display_name: Optional[StrictStr] + display_name: Optional[StrictStr] = None async def _async_render_POST( self, request: "SynapseRequest" @@ -176,7 +177,7 @@ def __init__(self, hs: "HomeServer"): class PostBody(RequestBodyModel): localpart: StrictStr - devices: set[StrictStr] + devices: list[str] async def _async_render_POST( self, request: "SynapseRequest" diff --git a/synapse/rest/synapse/mas/users.py b/synapse/rest/synapse/mas/users.py index a802887270..f52c4bb167 100644 --- a/synapse/rest/synapse/mas/users.py +++ b/synapse/rest/synapse/mas/users.py @@ -17,7 +17,8 @@ from http import HTTPStatus from typing import TYPE_CHECKING, Any, Optional, TypedDict -from synapse._pydantic_compat import StrictBool, StrictStr, root_validator +from pydantic import StrictBool, StrictStr, model_validator + from synapse.api.errors import NotFoundError, SynapseError from synapse.http.servlet import ( parse_and_validate_json_object_from_request, @@ -111,7 +112,8 @@ class PostBody(RequestBodyModel): unset_emails: StrictBool = False set_emails: Optional[list[StrictStr]] = None - @root_validator(pre=True) + @model_validator(mode="before") + @classmethod def validate_exclusive(cls, values: Any) -> Any: if "unset_displayname" in values and "set_displayname" in values: raise ValueError( diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index ce213050a9..1c17d4d609 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -35,8 +35,8 @@ ) import attr +from pydantic import BaseModel -from synapse._pydantic_compat import BaseModel from synapse.storage.engines import PostgresEngine from synapse.storage.types import Connection, Cursor from synapse.types import JsonDict, StrCollection @@ -954,7 +954,7 @@ async def validate_constraint_and_delete_in_background( # match the constraint. # 3. We try re-validating the constraint. - parsed_progress = ValidateConstraintProgress.parse_obj(progress) + parsed_progress = ValidateConstraintProgress.model_validate(progress) if parsed_progress.state == ValidateConstraintProgress.State.check: return_columns = ", ".join(unique_columns) diff --git a/synapse/types/handlers/sliding_sync.py b/synapse/types/handlers/sliding_sync.py index aef7db8e98..c83b534e00 100644 --- a/synapse/types/handlers/sliding_sync.py +++ b/synapse/types/handlers/sliding_sync.py @@ -32,8 +32,8 @@ ) import attr +from pydantic import ConfigDict -from synapse._pydantic_compat import Extra from synapse.api.constants import EventTypes from synapse.events import EventBase from synapse.types import ( @@ -65,15 +65,12 @@ class SlidingSyncConfig(SlidingSyncBody): user: UserID requester: Requester - - # Pydantic config - class Config: - # By default, ignore fields that we don't recognise. - extra = Extra.ignore - # By default, don't allow fields to be reassigned after parsing. - allow_mutation = False - # Allow custom types like `UserID` to be used in the model - arbitrary_types_allowed = True + model_config = ConfigDict( + extra="ignore", + frozen=True, + # Allow custom types like `UserID` to be used in the model. + arbitrary_types_allowed=True, + ) class OperationType(Enum): diff --git a/synapse/types/rest/client/__init__.py b/synapse/types/rest/client/__init__.py index 4940fabd12..865c2ba532 100644 --- a/synapse/types/rest/client/__init__.py +++ b/synapse/types/rest/client/__init__.py @@ -18,18 +18,21 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import TYPE_CHECKING, Optional, Union +from typing import Optional, Union -from synapse._pydantic_compat import ( - Extra, +from pydantic import ( + ConfigDict, Field, StrictBool, StrictInt, StrictStr, - conint, - constr, - validator, + StringConstraints, + field_validator, + model_validator, ) +from pydantic_core import PydanticCustomError +from typing_extensions import Annotated, Self + from synapse.types.rest import RequestBodyModel from synapse.util.threepids import validate_email @@ -44,39 +47,36 @@ class AuthenticationData(RequestBodyModel): `.dict(exclude_unset=True)` to access them. """ - class Config: - extra = Extra.allow + model_config = ConfigDict(extra="allow") session: Optional[StrictStr] = None type: Optional[StrictStr] = None -if TYPE_CHECKING: - ClientSecretStr = StrictStr -else: - # See also assert_valid_client_secret() - ClientSecretStr = constr( - regex="[0-9a-zA-Z.=_-]", # noqa: F722 +# See also assert_valid_client_secret() +ClientSecretStr = Annotated[ + str, + StringConstraints( + pattern="[0-9a-zA-Z.=_-]", min_length=1, max_length=255, strict=True, - ) + ), +] class ThreepidRequestTokenBody(RequestBodyModel): client_secret: ClientSecretStr - id_server: Optional[StrictStr] - id_access_token: Optional[StrictStr] - next_link: Optional[StrictStr] + id_server: Optional[StrictStr] = None + id_access_token: Optional[StrictStr] = None + next_link: Optional[StrictStr] = None send_attempt: StrictInt - @validator("id_access_token", always=True) - def token_required_for_identity_server( - cls, token: Optional[str], values: dict[str, object] - ) -> Optional[str]: - if values.get("id_server") is not None and token is None: + @model_validator(mode="after") + def token_required_for_identity_server(self) -> Self: + if self.id_server is not None and self.id_access_token is None: raise ValueError("id_access_token is required if an id_server is supplied.") - return token + return self class EmailRequestTokenBody(ThreepidRequestTokenBody): @@ -87,14 +87,21 @@ class EmailRequestTokenBody(ThreepidRequestTokenBody): # know the exact spelling (eg. upper and lower case) of address in the database. # Without this, an email stored in the database as "foo@bar.com" would cause # user requests for "FOO@bar.com" to raise a Not Found error. - _email_validator = validator("email", allow_reuse=True)(validate_email) + @field_validator("email") + @classmethod + def _email_validator(cls, email: StrictStr) -> StrictStr: + try: + return validate_email(email) + except ValueError as e: + # To ensure backward compatibility of HTTP error codes, we return a + # Pydantic error with the custom, unrecognized error type + # "email_custom_err_type" instead of the default error type + # "value_error". This results in the more generic BAD_JSON HTTP + # error instead of the more specific INVALID_PARAM one. + raise PydanticCustomError("email_custom_err_type", str(e), None) from e -if TYPE_CHECKING: - ISO3116_1_Alpha_2 = StrictStr -else: - # Per spec: two-letter uppercase ISO-3166-1-alpha-2 - ISO3116_1_Alpha_2 = constr(regex="[A-Z]{2}", strict=True) +ISO3116_1_Alpha_2 = Annotated[str, StringConstraints(pattern="[A-Z]{2}", strict=True)] class MsisdnRequestTokenBody(ThreepidRequestTokenBody): @@ -144,12 +151,10 @@ class CommonRoomParameters(RequestBodyModel): (Max 1000 messages) """ - required_state: list[tuple[StrictStr, StrictStr]] - # mypy workaround via https://github.com/pydantic/pydantic/issues/156#issuecomment-1130883884 - if TYPE_CHECKING: - timeline_limit: int - else: - timeline_limit: conint(le=1000, strict=True) # type: ignore[valid-type] + required_state: list[ + Annotated[tuple[StrictStr, StrictStr], Field(strict=False)] + ] + timeline_limit: Annotated[int, Field(le=1000, strict=True)] class SlidingSyncList(CommonRoomParameters): """ @@ -251,13 +256,17 @@ class Filters(RequestBodyModel): tags: Optional[list[StrictStr]] = None not_tags: Optional[list[StrictStr]] = None - # mypy workaround via https://github.com/pydantic/pydantic/issues/156#issuecomment-1130883884 - if TYPE_CHECKING: - ranges: Optional[list[tuple[int, int]]] = None - else: - ranges: Optional[ - list[tuple[conint(ge=0, strict=True), conint(ge=0, strict=True)]] - ] = None # type: ignore[valid-type] + ranges: Optional[ + list[ + Annotated[ + tuple[ + Annotated[int, Field(ge=0, strict=True)], + Annotated[int, Field(ge=0, strict=True)], + ], + Field(strict=False), + ] + ] + ] = None slow_get_all_rooms: Optional[StrictBool] = False filters: Optional[Filters] = None @@ -286,7 +295,8 @@ class ToDeviceExtension(RequestBodyModel): limit: StrictInt = 100 since: Optional[StrictStr] = None - @validator("since") + @field_validator("since") + @classmethod def since_token_check( cls, value: Optional[StrictStr] ) -> Optional[StrictStr]: @@ -382,22 +392,21 @@ class ThreadSubscriptionsExtension(RequestBodyModel): receipts: Optional[ReceiptsExtension] = None typing: Optional[TypingExtension] = None thread_subscriptions: Optional[ThreadSubscriptionsExtension] = Field( - alias="io.element.msc4308.thread_subscriptions" + None, alias="io.element.msc4308.thread_subscriptions" ) - conn_id: Optional[StrictStr] - - # mypy workaround via https://github.com/pydantic/pydantic/issues/156#issuecomment-1130883884 - if TYPE_CHECKING: - lists: Optional[dict[str, SlidingSyncList]] = None - else: - lists: Optional[dict[constr(max_length=64, strict=True), SlidingSyncList]] = ( - None # type: ignore[valid-type] - ) + conn_id: Optional[StrictStr] = None + lists: Optional[ + dict[ + Annotated[str, StringConstraints(max_length=64, strict=True)], + SlidingSyncList, + ] + ] = None room_subscriptions: Optional[dict[StrictStr, RoomSubscription]] = None extensions: Optional[Extensions] = None - @validator("lists") + @field_validator("lists") + @classmethod def lists_length_check( cls, value: Optional[dict[str, SlidingSyncList]] ) -> Optional[dict[str, SlidingSyncList]]: diff --git a/synapse/util/events.py b/synapse/util/events.py index e41799b1f7..4a1aa28ce4 100644 --- a/synapse/util/events.py +++ b/synapse/util/events.py @@ -15,7 +15,8 @@ from typing import Any, Optional -from synapse._pydantic_compat import Field, StrictStr, ValidationError, validator +from pydantic import Field, StrictStr, ValidationError, field_validator + from synapse.types import JsonDict from synapse.util.pydantic_models import ParseModel from synapse.util.stringutils import random_string @@ -40,7 +41,7 @@ class MTextRepresentation(ParseModel): """ body: StrictStr - mimetype: Optional[StrictStr] + mimetype: Optional[StrictStr] = None class MTopic(ParseModel): @@ -52,7 +53,7 @@ class MTopic(ParseModel): See `TopicContentBlock` in the Matrix specification. """ - m_text: Optional[list[MTextRepresentation]] = Field(alias="m.text") + m_text: Optional[list[MTextRepresentation]] = Field(None, alias="m.text") """ An ordered array of textual representations in different mimetypes. """ @@ -60,16 +61,17 @@ class MTopic(ParseModel): # Because "Receivers SHOULD use the first representation in the array that they # understand.", we ignore invalid representations in the `m.text` field and use # what we can. - @validator("m_text", pre=True) + @field_validator("m_text", mode="before") + @classmethod def ignore_invalid_representations( cls, m_text: Any ) -> Optional[list[MTextRepresentation]]: - if not isinstance(m_text, list): - raise ValueError("m.text must be a list") + if not isinstance(m_text, (list, tuple)): + raise ValueError("m.text must be a list or a tuple") representations = [] for element in m_text: try: - representations.append(MTextRepresentation.parse_obj(element)) + representations.append(MTextRepresentation.model_validate(element)) except ValidationError: continue return representations @@ -85,17 +87,18 @@ class TopicContent(ParseModel): The topic in plain text. """ - m_topic: Optional[MTopic] = Field(alias="m.topic") + m_topic: Optional[MTopic] = Field(None, alias="m.topic") """ Textual representation of the room topic in different mimetypes. """ # We ignore invalid `m.topic` fields as we can always fall back to the plain-text # `topic` field. - @validator("m_topic", pre=True) + @field_validator("m_topic", mode="before") + @classmethod def ignore_invalid_m_topic(cls, m_topic: Any) -> Optional[MTopic]: try: - return MTopic.parse_obj(m_topic) + return MTopic.model_validate(m_topic) except ValidationError: return None @@ -114,7 +117,7 @@ def get_plain_text_topic_from_event_content(content: JsonDict) -> Optional[str]: """ try: - topic_content = TopicContent.parse_obj(content) + topic_content = TopicContent.model_validate(content, strict=False) except ValidationError: return None diff --git a/synapse/util/pydantic_models.py b/synapse/util/pydantic_models.py index 4880709501..e1e2d8b99f 100644 --- a/synapse/util/pydantic_models.py +++ b/synapse/util/pydantic_models.py @@ -13,18 +13,20 @@ # # -import re -from typing import Any, Callable, Generator +from typing import Annotated, Union -from synapse._pydantic_compat import BaseModel, Extra, StrictStr +from pydantic import AfterValidator, BaseModel, ConfigDict, StrictStr, StringConstraints + +from synapse.api.errors import SynapseError from synapse.types import EventID class ParseModel(BaseModel): """A custom version of Pydantic's BaseModel which - - ignores unknown fields and - - does not allow fields to be overwritten after construction, + - ignores unknown fields, + - does not allow fields to be overwritten after construction and + - enables strict mode, but otherwise uses Pydantic's default behaviour. @@ -36,48 +38,19 @@ class ParseModel(BaseModel): https://pydantic-docs.helpmanual.io/usage/model_config/#change-behaviour-globally """ - class Config: - # By default, ignore fields that we don't recognise. - extra = Extra.ignore - # By default, don't allow fields to be reassigned after parsing. - allow_mutation = False - - -class AnyEventId(StrictStr): - """ - A validator for strings that need to be an Event ID. - - Accepts any valid grammar of Event ID from any room version. - """ - - EVENT_ID_HASH_ROOM_VERSION_3_PLUS = re.compile( - r"^([a-zA-Z0-9-_]{43}|[a-zA-Z0-9+/]{43})$" - ) + model_config = ConfigDict(extra="ignore", frozen=True, strict=True) - @classmethod - def __get_validators__(cls) -> Generator[Callable[..., Any], Any, Any]: - yield from super().__get_validators__() # type: ignore - yield cls.validate_event_id - @classmethod - def validate_event_id(cls, value: str) -> str: - if not value.startswith("$"): - raise ValueError("Event ID must start with `$`") +def validate_event_id_v1_and_2(value: str) -> str: + try: + EventID.from_string(value) + except SynapseError as e: + raise ValueError from e + return value - if ":" in value: - # Room versions 1 and 2 - EventID.from_string(value) # throws on fail - else: - # Room versions 3+: event ID is $ + a base64 sha256 hash - # Room version 3 is base64, 4+ are base64Url - # In both cases, the base64 is unpadded. - # refs: - # - https://spec.matrix.org/v1.15/rooms/v3/ e.g. $acR1l0raoZnm60CBwAVgqbZqoO/mYU81xysh1u7XcJk - # - https://spec.matrix.org/v1.15/rooms/v4/ e.g. $Rqnc-F-dvnEYJTyHq_iKxU2bZ1CI92-kuZq3a5lr5Zg - b64_hash = value[1:] - if cls.EVENT_ID_HASH_ROOM_VERSION_3_PLUS.fullmatch(b64_hash) is None: - raise ValueError( - "Event ID must either have a domain part or be a valid hash" - ) - return value +EventIdV1And2 = Annotated[StrictStr, AfterValidator(validate_event_id_v1_and_2)] +EventIdV3Plus = Annotated[ + StrictStr, StringConstraints(pattern=r"^\$([a-zA-Z0-9-_]{43}|[a-zA-Z0-9+/]{43})$") +] +AnyEventId = Union[EventIdV1And2, EventIdV3Plus] diff --git a/tests/config/test_oauth_delegation.py b/tests/config/test_oauth_delegation.py index 85e0a3b6b6..6105ca2b04 100644 --- a/tests/config/test_oauth_delegation.py +++ b/tests/config/test_oauth_delegation.py @@ -21,6 +21,7 @@ import os import tempfile +from pathlib import Path from unittest.mock import Mock from synapse.config import ConfigError @@ -309,7 +310,9 @@ def test_endpoint_has_to_be_a_url(self) -> None: def test_secret_and_secret_path_are_mutually_exclusive(self) -> None: with tempfile.NamedTemporaryFile() as f: self.config_dict["matrix_authentication_service"]["secret"] = "verysecret" - self.config_dict["matrix_authentication_service"]["secret_path"] = f.name + self.config_dict["matrix_authentication_service"]["secret_path"] = Path( + f.name + ) with self.assertRaises(ConfigError): self.parse_config() @@ -317,13 +320,15 @@ def test_secret_path_loads_secret(self) -> None: with tempfile.NamedTemporaryFile(buffering=0) as f: f.write(b"53C237") del self.config_dict["matrix_authentication_service"]["secret"] - self.config_dict["matrix_authentication_service"]["secret_path"] = f.name + self.config_dict["matrix_authentication_service"]["secret_path"] = Path( + f.name + ) config = self.parse_config() self.assertEqual(config.mas.secret(), "53C237") def test_secret_path_must_exist(self) -> None: del self.config_dict["matrix_authentication_service"]["secret"] - self.config_dict["matrix_authentication_service"]["secret_path"] = ( + self.config_dict["matrix_authentication_service"]["secret_path"] = Path( "/not/a/valid/file" ) with self.assertRaises(ConfigError): diff --git a/tests/rest/client/test_account.py b/tests/rest/client/test_account.py index c4c62c7800..03474d7400 100644 --- a/tests/rest/client/test_account.py +++ b/tests/rest/client/test_account.py @@ -1201,7 +1201,9 @@ def _request_token_invalid_email( self.assertEqual( HTTPStatus.BAD_REQUEST, channel.code, msg=channel.result["body"] ) - self.assertEqual(expected_errcode, channel.json_body["errcode"]) + self.assertEqual( + expected_errcode, channel.json_body["errcode"], msg=channel.result["body"] + ) self.assertIn(expected_error, channel.json_body["error"]) def _validate_token(self, link: str) -> None: diff --git a/tests/rest/client/test_models.py b/tests/rest/client/test_models.py index 75479e6235..f297856830 100644 --- a/tests/rest/client/test_models.py +++ b/tests/rest/client/test_models.py @@ -21,7 +21,8 @@ import unittest as stdlib_unittest from typing import Literal -from synapse._pydantic_compat import BaseModel, ValidationError +from pydantic import BaseModel, ValidationError + from synapse.types.rest.client import EmailRequestTokenBody @@ -35,16 +36,16 @@ def test_accepts_valid_medium_string(self) -> None: This is arguably more of a test of a class that inherits from str and Enum simultaneously. """ - model = self.Model.parse_obj({"medium": "email"}) + model = self.Model.model_validate({"medium": "email"}) self.assertEqual(model.medium, "email") def test_rejects_invalid_medium_value(self) -> None: with self.assertRaises(ValidationError): - self.Model.parse_obj({"medium": "interpretive_dance"}) + self.Model.model_validate({"medium": "interpretive_dance"}) def test_rejects_invalid_medium_type(self) -> None: with self.assertRaises(ValidationError): - self.Model.parse_obj({"medium": 123}) + self.Model.model_validate({"medium": 123}) class EmailRequestTokenBodyTestCase(stdlib_unittest.TestCase): @@ -56,14 +57,14 @@ class EmailRequestTokenBodyTestCase(stdlib_unittest.TestCase): def test_token_required_if_id_server_provided(self) -> None: with self.assertRaises(ValidationError): - EmailRequestTokenBody.parse_obj( + EmailRequestTokenBody.model_validate( { **self.base_request, "id_server": "identity.wonderland.com", } ) with self.assertRaises(ValidationError): - EmailRequestTokenBody.parse_obj( + EmailRequestTokenBody.model_validate( { **self.base_request, "id_server": "identity.wonderland.com", @@ -73,7 +74,7 @@ def test_token_required_if_id_server_provided(self) -> None: def test_token_typechecked_when_id_server_provided(self) -> None: with self.assertRaises(ValidationError): - EmailRequestTokenBody.parse_obj( + EmailRequestTokenBody.model_validate( { **self.base_request, "id_server": "identity.wonderland.com", diff --git a/tests/rest/client/test_thread_subscriptions.py b/tests/rest/client/test_thread_subscriptions.py index 5aae07ef50..87c477cbb5 100644 --- a/tests/rest/client/test_thread_subscriptions.py +++ b/tests/rest/client/test_thread_subscriptions.py @@ -111,7 +111,7 @@ def test_subscribe_manual_then_automatic(self) -> None: {}, access_token=self.token, ) - self.assertEqual(channel.code, HTTPStatus.OK) + self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) # Assert the subscription was saved channel = self.make_request( @@ -119,8 +119,8 @@ def test_subscribe_manual_then_automatic(self) -> None: f"{PREFIX}/{self.room_id}/thread/{self.root_event_id}/subscription", access_token=self.token, ) - self.assertEqual(channel.code, HTTPStatus.OK) - self.assertEqual(channel.json_body, {"automatic": False}) + self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) + self.assertEqual(channel.json_body, {"automatic": False}, channel.json_body) # Now also register an automatic subscription; it should not # override the manual subscription @@ -130,7 +130,7 @@ def test_subscribe_manual_then_automatic(self) -> None: {"automatic": self.threaded_events[0]}, access_token=self.token, ) - self.assertEqual(channel.code, HTTPStatus.OK) + self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) # Assert the manual subscription was not overridden channel = self.make_request( @@ -138,8 +138,8 @@ def test_subscribe_manual_then_automatic(self) -> None: f"{PREFIX}/{self.room_id}/thread/{self.root_event_id}/subscription", access_token=self.token, ) - self.assertEqual(channel.code, HTTPStatus.OK) - self.assertEqual(channel.json_body, {"automatic": False}) + self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) + self.assertEqual(channel.json_body, {"automatic": False}, channel.json_body) def test_subscribe_automatic_then_manual(self) -> None: """Test subscribing to a thread, first an automatic subscription then a manual subscription. @@ -160,8 +160,8 @@ def test_subscribe_automatic_then_manual(self) -> None: f"{PREFIX}/{self.room_id}/thread/{self.root_event_id}/subscription", access_token=self.token, ) - self.assertEqual(channel.code, HTTPStatus.OK) - self.assertEqual(channel.json_body, {"automatic": True}) + self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) + self.assertEqual(channel.json_body, {"automatic": True}, channel.json_body) # Now also register a manual subscription channel = self.make_request( @@ -170,7 +170,7 @@ def test_subscribe_automatic_then_manual(self) -> None: {}, access_token=self.token, ) - self.assertEqual(channel.code, HTTPStatus.OK) + self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) # Assert the manual subscription was not overridden channel = self.make_request( @@ -178,8 +178,8 @@ def test_subscribe_automatic_then_manual(self) -> None: f"{PREFIX}/{self.room_id}/thread/{self.root_event_id}/subscription", access_token=self.token, ) - self.assertEqual(channel.code, HTTPStatus.OK) - self.assertEqual(channel.json_body, {"automatic": False}) + self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) + self.assertEqual(channel.json_body, {"automatic": False}, channel.json_body) def test_unsubscribe(self) -> None: """Test subscribing to a thread, then unsubscribing.""" @@ -191,7 +191,7 @@ def test_unsubscribe(self) -> None: }, access_token=self.token, ) - self.assertEqual(channel.code, HTTPStatus.OK) + self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) # Assert the subscription was saved channel = self.make_request( @@ -199,23 +199,23 @@ def test_unsubscribe(self) -> None: f"{PREFIX}/{self.room_id}/thread/{self.root_event_id}/subscription", access_token=self.token, ) - self.assertEqual(channel.code, HTTPStatus.OK) - self.assertEqual(channel.json_body, {"automatic": True}) + self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) + self.assertEqual(channel.json_body, {"automatic": True}, channel.json_body) channel = self.make_request( "DELETE", f"{PREFIX}/{self.room_id}/thread/{self.root_event_id}/subscription", access_token=self.token, ) - self.assertEqual(channel.code, HTTPStatus.OK) + self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) channel = self.make_request( "GET", f"{PREFIX}/{self.room_id}/thread/{self.root_event_id}/subscription", access_token=self.token, ) - self.assertEqual(channel.code, HTTPStatus.NOT_FOUND) - self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND") + self.assertEqual(channel.code, HTTPStatus.NOT_FOUND, channel.json_body) + self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND", channel.json_body) def test_set_thread_subscription_nonexistent_thread(self) -> None: """Test setting subscription settings for a nonexistent thread.""" @@ -225,8 +225,8 @@ def test_set_thread_subscription_nonexistent_thread(self) -> None: {}, access_token=self.token, ) - self.assertEqual(channel.code, HTTPStatus.NOT_FOUND) - self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND") + self.assertEqual(channel.code, HTTPStatus.NOT_FOUND, channel.json_body) + self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND", channel.json_body) def test_set_thread_subscription_no_access(self) -> None: """Test that a user can't set thread subscription for a thread they can't access.""" @@ -239,8 +239,8 @@ def test_set_thread_subscription_no_access(self) -> None: {}, access_token=no_access_token, ) - self.assertEqual(channel.code, HTTPStatus.NOT_FOUND) - self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND") + self.assertEqual(channel.code, HTTPStatus.NOT_FOUND, channel.json_body) + self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND", channel.json_body) def test_invalid_body(self) -> None: """Test that sending invalid subscription settings is rejected.""" @@ -251,7 +251,7 @@ def test_invalid_body(self) -> None: {"automatic": True}, access_token=self.token, ) - self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST) + self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body) channel = self.make_request( "PUT", @@ -260,7 +260,7 @@ def test_invalid_body(self) -> None: {"automatic": "$malformedEventId"}, access_token=self.token, ) - self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST) + self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body) def test_auto_subscribe_cause_event_not_in_thread(self) -> None: """ From 07e79805725880e88cb58e3e5565f9b48d0770ed Mon Sep 17 00:00:00 2001 From: V02460 Date: Fri, 31 Oct 2025 13:09:13 +0100 Subject: [PATCH 42/72] =?UTF-8?q?Fix=20Rust=E2=80=99s=20confusing=20lifeti?= =?UTF-8?q?me=20lint=20(#19118)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> --- changelog.d/19118.misc | 1 + rust/src/http_client.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/19118.misc diff --git a/changelog.d/19118.misc b/changelog.d/19118.misc new file mode 100644 index 0000000000..672ed45573 --- /dev/null +++ b/changelog.d/19118.misc @@ -0,0 +1 @@ +Fix a lint error related to lifetimes in Rust 1.90. \ No newline at end of file diff --git a/rust/src/http_client.rs b/rust/src/http_client.rs index e67dae169f..ca4bf1590b 100644 --- a/rust/src/http_client.rs +++ b/rust/src/http_client.rs @@ -137,7 +137,7 @@ fn get_runtime<'a>(reactor: &Bound<'a, PyAny>) -> PyResult = OnceCell::new(); /// Access to the `twisted.internet.defer` module. -fn defer(py: Python<'_>) -> PyResult<&Bound> { +fn defer(py: Python<'_>) -> PyResult<&Bound<'_, PyAny>> { Ok(DEFER .get_or_try_init(|| py.import("twisted.internet.defer").map(Into::into))? .bind(py)) From 3ccc5184e0fe1f00dec69293c097d513da54a410 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 31 Oct 2025 13:16:47 +0000 Subject: [PATCH 43/72] Fix schema lint script to understand `CREATE TABLE IF NOT EXISTS` (#19020) The schema lint tries to make sure we don't add or remove indices in schema files (rather than as background updates), *unless* the table was created in the same schema file. The regex to pull out the `CREATE TABLE` SQL incorrectly didn't recognise `IF NOT EXISTS`. There is a test delta file that shows that we accept different types of `CREATE TABLE` and `CREATE INDEX` statements, as well as an index creation that doesn't have a matching create table (to show that we do still catch it). The test delta should be removed before merge. --- changelog.d/19020.misc | 1 + scripts-dev/check_schema_delta.py | 20 +++++++++++++++----- 2 files changed, 16 insertions(+), 5 deletions(-) create mode 100644 changelog.d/19020.misc diff --git a/changelog.d/19020.misc b/changelog.d/19020.misc new file mode 100644 index 0000000000..f5775ff194 --- /dev/null +++ b/changelog.d/19020.misc @@ -0,0 +1 @@ +Fix CI linter for schema delta files to correctly handle all types of `CREATE TABLE` syntax. diff --git a/scripts-dev/check_schema_delta.py b/scripts-dev/check_schema_delta.py index 7b2dec25d4..dd96c904bb 100755 --- a/scripts-dev/check_schema_delta.py +++ b/scripts-dev/check_schema_delta.py @@ -11,9 +11,13 @@ import git SCHEMA_FILE_REGEX = re.compile(r"^synapse/storage/schema/(.*)/delta/(.*)/(.*)$") -INDEX_CREATION_REGEX = re.compile(r"CREATE .*INDEX .*ON ([a-z_]+)", flags=re.IGNORECASE) -INDEX_DELETION_REGEX = re.compile(r"DROP .*INDEX ([a-z_]+)", flags=re.IGNORECASE) -TABLE_CREATION_REGEX = re.compile(r"CREATE .*TABLE ([a-z_]+)", flags=re.IGNORECASE) +INDEX_CREATION_REGEX = re.compile( + r"CREATE .*INDEX .*ON ([a-z_0-9]+)", flags=re.IGNORECASE +) +INDEX_DELETION_REGEX = re.compile(r"DROP .*INDEX ([a-z_0-9]+)", flags=re.IGNORECASE) +TABLE_CREATION_REGEX = re.compile( + r"CREATE .*TABLE.* ([a-z_0-9]+)\s*\(", flags=re.IGNORECASE +) # The base branch we want to check against. We use the main development branch # on the assumption that is what we are developing against. @@ -173,11 +177,14 @@ def main(force_colors: bool) -> None: clause = match.group() click.secho( - f"Found delta with index deletion: '{clause}' in {delta_file}\nThese should be in background updates.", + f"Found delta with index deletion: '{clause}' in {delta_file}", fg="red", bold=True, color=force_colors, ) + click.secho( + " ↪ These should be in background updates.", + ) return_code = 1 # Check for index creation, which is only allowed for tables we've @@ -188,11 +195,14 @@ def main(force_colors: bool) -> None: table_name = match.group(1) if table_name not in created_tables: click.secho( - f"Found delta with index creation: '{clause}' in {delta_file}\nThese should be in background updates.", + f"Found delta with index creation for existing table: '{clause}' in {delta_file}", fg="red", bold=True, color=force_colors, ) + click.secho( + " ↪ These should be in background updates (or the table should be created in the same delta).", + ) return_code = 1 click.get_current_context().exit(return_code) From 41a2762e588013887682cdd5a997f6d5b8aa6c2b Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 31 Oct 2025 10:12:05 -0500 Subject: [PATCH 44/72] Be mindful of other logging context filters in 3rd-party code (#19068) Be mindful that Synapse can be run alongside other code in the same Python process. We shouldn't overwrite fields on given log record unless we know it's relevant to Synapse. (no clobber) ### Background As part of Element's plan to support a light form of vhosting (virtual host) (multiple instances of Synapse in the same Python process), we're currently diving into the details and implications of running multiple instances of Synapse in the same Python process. "Per-tenant logging" tracked internally by https://github.com/element-hq/synapse-small-hosts/issues/48 --- changelog.d/19068.misc | 1 + synapse/logging/context.py | 40 ++++++++++++++++++++++++++++++++++---- 2 files changed, 37 insertions(+), 4 deletions(-) create mode 100644 changelog.d/19068.misc diff --git a/changelog.d/19068.misc b/changelog.d/19068.misc new file mode 100644 index 0000000000..9e5c34b608 --- /dev/null +++ b/changelog.d/19068.misc @@ -0,0 +1 @@ +Be mindful of other logging context filters in 3rd-party code and avoid overwriting log record fields unless we know the log record is relevant to Synapse. diff --git a/synapse/logging/context.py b/synapse/logging/context.py index 86e994cbb4..919493d1a3 100644 --- a/synapse/logging/context.py +++ b/synapse/logging/context.py @@ -604,25 +604,57 @@ def __init__( self._default_request = request def filter(self, record: logging.LogRecord) -> Literal[True]: - """Add each fields from the logging contexts to the record. + """ + Add each field from the logging context to the record. + + Please be mindful of 3rd-party code outside of Synapse (like in the case of + Synapse Pro for small hosts) as this is running as a global log record filter. + Other code may have set their own attributes on the record and the log record + may not be relevant to Synapse at all so we should not mangle it. + + We can have some defaults but we should avoid overwriting existing attributes on + any log record unless we actually have a Synapse logcontext (not just the + default sentinel logcontext). + Returns: True to include the record in the log output. """ context = current_context() record.request = self._default_request - record.server_name = "unknown_server_from_no_context" + + # Avoid overwriting an existing `server_name` on the record. This is running in + # the context of a global log record filter so there may be 3rd-party code that + # adds their own `server_name` and we don't want to interfere with that + # (clobber). + if not hasattr(record, "server_name"): + record.server_name = "unknown_server_from_no_logcontext" # context should never be None, but if it somehow ends up being, then # we end up in a death spiral of infinite loops, so let's check, for # robustness' sake. if context is not None: - record.server_name = context.server_name + + def safe_set(attr: str, value: Any) -> None: + """ + Only write the attribute if it hasn't already been set or we actually have + a Synapse logcontext (indicating that this log record is relevant to + Synapse). + """ + if context is not SENTINEL_CONTEXT or not hasattr(record, attr): + setattr(record, attr, value) + + safe_set("server_name", context.server_name) + # Logging is interested in the request ID. Note that for backwards # compatibility this is stored as the "request" on the record. - record.request = str(context) + safe_set("request", str(context)) # Add some data from the HTTP request. request = context.request + # The sentinel logcontext has no request so if we get past this point, we + # know we have some actual Synapse logcontext and don't need to worry about + # using `safe_set`. We'll consider this an optimization since this is a + # pretty hot-path. if request is None: return True From 69bab78b440fc2f89c513a8e556bfcca1ee7a8d8 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Mon, 3 Nov 2025 12:53:59 +0100 Subject: [PATCH 45/72] Python 3.14 support (#19055) Co-authored-by: Eric Eastwood --- .ci/scripts/calculate_jobs.py | 4 +- .github/workflows/tests.yml | 2 +- Cargo.lock | 29 ++- changelog.d/19055.misc | 1 + poetry.lock | 313 +++++++++++++-------------- pyproject.toml | 4 + rust/Cargo.toml | 6 +- rust/src/events/internal_metadata.rs | 4 +- rust/src/http_client.rs | 8 +- rust/src/rendezvous/mod.rs | 4 +- 10 files changed, 184 insertions(+), 191 deletions(-) create mode 100644 changelog.d/19055.misc diff --git a/.ci/scripts/calculate_jobs.py b/.ci/scripts/calculate_jobs.py index 2971b3c5c8..87fbc7a266 100755 --- a/.ci/scripts/calculate_jobs.py +++ b/.ci/scripts/calculate_jobs.py @@ -53,7 +53,7 @@ def set_output(key: str, value: str): "database": "sqlite", "extras": "all", } - for version in ("3.11", "3.12", "3.13") + for version in ("3.11", "3.12", "3.13", "3.14") ) trial_postgres_tests = [ @@ -68,7 +68,7 @@ def set_output(key: str, value: str): if not IS_PR: trial_postgres_tests.append( { - "python-version": "3.13", + "python-version": "3.14", "database": "postgres", "postgres-version": "17", "extras": "all", diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 494543e4b9..4f38ab0690 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -619,7 +619,7 @@ jobs: - python-version: "3.10" postgres-version: "13" - - python-version: "3.13" + - python-version: "3.14" postgres-version: "17" services: diff --git a/Cargo.lock b/Cargo.lock index 35f62fe4e9..a057c812af 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -814,9 +814,9 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.25.1" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8970a78afe0628a3e3430376fc5fd76b6b45c4d43360ffd6cdd40bdde72b682a" +checksum = "7ba0117f4212101ee6544044dae45abe1083d30ce7b29c4b5cbdfa2354e07383" dependencies = [ "anyhow", "indoc", @@ -832,19 +832,18 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.25.1" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "458eb0c55e7ece017adeba38f2248ff3ac615e53660d7c71a238d7d2a01c7598" +checksum = "4fc6ddaf24947d12a9aa31ac65431fb1b851b8f4365426e182901eabfb87df5f" dependencies = [ - "once_cell", "target-lexicon", ] [[package]] name = "pyo3-ffi" -version = "0.25.1" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7114fe5457c61b276ab77c5055f206295b812608083644a5c5b2640c3102565c" +checksum = "025474d3928738efb38ac36d4744a74a400c901c7596199e20e45d98eb194105" dependencies = [ "libc", "pyo3-build-config", @@ -852,9 +851,9 @@ dependencies = [ [[package]] name = "pyo3-log" -version = "0.12.4" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45192e5e4a4d2505587e27806c7b710c231c40c56f3bfc19535d0bb25df52264" +checksum = "d359e20231345f21a3b5b6aea7e73f4dc97e1712ef3bfe2d88997ac6a308d784" dependencies = [ "arc-swap", "log", @@ -863,9 +862,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.25.1" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8725c0a622b374d6cb051d11a0983786448f7785336139c3c94f5aa6bef7e50" +checksum = "2e64eb489f22fe1c95911b77c44cc41e7c19f3082fc81cce90f657cdc42ffded" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -875,9 +874,9 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.25.1" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4109984c22491085343c05b0dbc54ddc405c3cf7b4374fc533f5c3313a572ccc" +checksum = "100246c0ecf400b475341b8455a9213344569af29a3c841d29270e53102e0fcf" dependencies = [ "heck", "proc-macro2", @@ -888,9 +887,9 @@ dependencies = [ [[package]] name = "pythonize" -version = "0.25.0" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597907139a488b22573158793aa7539df36ae863eba300c75f3a0d65fc475e27" +checksum = "11e06e4cff9be2bbf2bddf28a486ae619172ea57e79787f856572878c62dcfe2" dependencies = [ "pyo3", "serde", diff --git a/changelog.d/19055.misc b/changelog.d/19055.misc new file mode 100644 index 0000000000..61e626cc9b --- /dev/null +++ b/changelog.d/19055.misc @@ -0,0 +1 @@ +Add support for Python 3.14. \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index a1f133e164..ce8b9ef6ee 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1594,8 +1594,6 @@ groups = ["main"] files = [ {file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"}, {file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"}, - {file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"}, - {file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"}, {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"}, {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"}, {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"}, @@ -1605,8 +1603,6 @@ files = [ {file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"}, {file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"}, {file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"}, - {file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"}, - {file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"}, {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"}, {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"}, {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"}, @@ -1616,8 +1612,6 @@ files = [ {file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"}, {file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"}, {file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"}, - {file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"}, - {file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"}, {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"}, {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"}, {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"}, @@ -1630,8 +1624,6 @@ files = [ {file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"}, {file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"}, {file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"}, - {file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"}, - {file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"}, {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"}, {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"}, {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"}, @@ -1641,8 +1633,6 @@ files = [ {file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"}, {file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"}, {file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"}, - {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"}, - {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"}, {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"}, {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"}, {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"}, @@ -1652,8 +1642,6 @@ files = [ {file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"}, {file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"}, {file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"}, - {file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"}, - {file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"}, {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"}, {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"}, {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"}, @@ -1663,8 +1651,6 @@ files = [ {file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"}, {file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"}, {file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"}, - {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"}, - {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"}, {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"}, {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"}, {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"}, @@ -1674,8 +1660,6 @@ files = [ {file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"}, {file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"}, {file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"}, - {file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"}, - {file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"}, {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"}, {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"}, {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"}, @@ -1685,15 +1669,11 @@ files = [ {file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"}, {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"}, {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"}, - {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"}, - {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"}, {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"}, {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"}, {file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"}, {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"}, {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"}, - {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"}, - {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"}, {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"}, {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"}, {file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"}, @@ -2369,109 +2349,127 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.8.10" +version = "0.28.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" groups = ["main", "dev"] files = [ - {file = "rpds_py-0.8.10-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711"}, - {file = "rpds_py-0.8.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c"}, - {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8"}, - {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0"}, - {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451"}, - {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0"}, - {file = "rpds_py-0.8.10-cp310-none-win32.whl", hash = "sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84"}, - {file = "rpds_py-0.8.10-cp310-none-win_amd64.whl", hash = "sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e"}, - {file = "rpds_py-0.8.10-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7"}, - {file = "rpds_py-0.8.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169"}, - {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb"}, - {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe"}, - {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0"}, - {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8"}, - {file = "rpds_py-0.8.10-cp311-none-win32.whl", hash = "sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9"}, - {file = "rpds_py-0.8.10-cp311-none-win_amd64.whl", hash = "sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055"}, - {file = "rpds_py-0.8.10-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2"}, - {file = "rpds_py-0.8.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292"}, - {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58"}, - {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346"}, - {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7"}, - {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4"}, - {file = "rpds_py-0.8.10-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7"}, - {file = "rpds_py-0.8.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7"}, - {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873"}, - {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52"}, - {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38"}, - {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b"}, - {file = "rpds_py-0.8.10-cp38-none-win32.whl", hash = "sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6"}, - {file = "rpds_py-0.8.10-cp38-none-win_amd64.whl", hash = "sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6"}, - {file = "rpds_py-0.8.10-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8"}, - {file = "rpds_py-0.8.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4"}, - {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8"}, - {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d"}, - {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a"}, - {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2"}, - {file = "rpds_py-0.8.10-cp39-none-win32.whl", hash = "sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49"}, - {file = "rpds_py-0.8.10-cp39-none-win_amd64.whl", hash = "sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1"}, - {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c"}, - {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b"}, - {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734"}, - {file = "rpds_py-0.8.10.tar.gz", hash = "sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4"}, + {file = "rpds_py-0.28.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7b6013db815417eeb56b2d9d7324e64fcd4fa289caeee6e7a78b2e11fc9b438a"}, + {file = "rpds_py-0.28.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a4c6b05c685c0c03f80dabaeb73e74218c49deea965ca63f76a752807397207"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4794c6c3fbe8f9ac87699b131a1f26e7b4abcf6d828da46a3a52648c7930eba"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2e8456b6ee5527112ff2354dd9087b030e3429e43a74f480d4a5ca79d269fd85"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:beb880a9ca0a117415f241f66d56025c02037f7c4efc6fe59b5b8454f1eaa50d"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6897bebb118c44b38c9cb62a178e09f1593c949391b9a1a6fe777ccab5934ee7"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1b553dd06e875249fd43efd727785efb57a53180e0fde321468222eabbeaafa"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:f0b2044fdddeea5b05df832e50d2a06fe61023acb44d76978e1b060206a8a476"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05cf1e74900e8da73fa08cc76c74a03345e5a3e37691d07cfe2092d7d8e27b04"}, + {file = "rpds_py-0.28.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:efd489fec7c311dae25e94fe7eeda4b3d06be71c68f2cf2e8ef990ffcd2cd7e8"}, + {file = "rpds_py-0.28.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ada7754a10faacd4f26067e62de52d6af93b6d9542f0df73c57b9771eb3ba9c4"}, + {file = "rpds_py-0.28.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c2a34fd26588949e1e7977cfcbb17a9a42c948c100cab890c6d8d823f0586457"}, + {file = "rpds_py-0.28.0-cp310-cp310-win32.whl", hash = "sha256:f9174471d6920cbc5e82a7822de8dfd4dcea86eb828b04fc8c6519a77b0ee51e"}, + {file = "rpds_py-0.28.0-cp310-cp310-win_amd64.whl", hash = "sha256:6e32dd207e2c4f8475257a3540ab8a93eff997abfa0a3fdb287cae0d6cd874b8"}, + {file = "rpds_py-0.28.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:03065002fd2e287725d95fbc69688e0c6daf6c6314ba38bdbaa3895418e09296"}, + {file = "rpds_py-0.28.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28ea02215f262b6d078daec0b45344c89e161eab9526b0d898221d96fdda5f27"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25dbade8fbf30bcc551cb352376c0ad64b067e4fc56f90e22ba70c3ce205988c"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c03002f54cc855860bfdc3442928ffdca9081e73b5b382ed0b9e8efe6e5e205"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9699fa7990368b22032baf2b2dce1f634388e4ffc03dfefaaac79f4695edc95"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9b06fe1a75e05e0713f06ea0c89ecb6452210fd60e2f1b6ddc1067b990e08d9"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9f83e7b326a3f9ec3ef84cda98fb0a74c7159f33e692032233046e7fd15da2"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:0d3259ea9ad8743a75a43eb7819324cdab393263c91be86e2d1901ee65c314e0"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a7548b345f66f6695943b4ef6afe33ccd3f1b638bd9afd0f730dd255c249c9e"}, + {file = "rpds_py-0.28.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9a40040aa388b037eb39416710fbcce9443498d2eaab0b9b45ae988b53f5c67"}, + {file = "rpds_py-0.28.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f60c7ea34e78c199acd0d3cda37a99be2c861dd2b8cf67399784f70c9f8e57d"}, + {file = "rpds_py-0.28.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1571ae4292649100d743b26d5f9c63503bb1fedf538a8f29a98dce2d5ba6b4e6"}, + {file = "rpds_py-0.28.0-cp311-cp311-win32.whl", hash = "sha256:5cfa9af45e7c1140af7321fa0bef25b386ee9faa8928c80dc3a5360971a29e8c"}, + {file = "rpds_py-0.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:dd8d86b5d29d1b74100982424ba53e56033dc47720a6de9ba0259cf81d7cecaa"}, + {file = "rpds_py-0.28.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e27d3a5709cc2b3e013bf93679a849213c79ae0573f9b894b284b55e729e120"}, + {file = "rpds_py-0.28.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6b4f28583a4f247ff60cd7bdda83db8c3f5b05a7a82ff20dd4b078571747708f"}, + {file = "rpds_py-0.28.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d678e91b610c29c4b3d52a2c148b641df2b4676ffe47c59f6388d58b99cdc424"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e819e0e37a44a78e1383bf1970076e2ccc4dc8c2bbaa2f9bd1dc987e9afff628"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5ee514e0f0523db5d3fb171f397c54875dbbd69760a414dccf9d4d7ad628b5bd"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3fa06d27fdcee47f07a39e02862da0100cb4982508f5ead53ec533cd5fe55e"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:46959ef2e64f9e4a41fc89aa20dbca2b85531f9a72c21099a3360f35d10b0d5a"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8455933b4bcd6e83fde3fefc987a023389c4b13f9a58c8d23e4b3f6d13f78c84"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:ad50614a02c8c2962feebe6012b52f9802deec4263946cddea37aaf28dd25a66"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e5deca01b271492553fdb6c7fd974659dce736a15bae5dad7ab8b93555bceb28"}, + {file = "rpds_py-0.28.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:735f8495a13159ce6a0d533f01e8674cec0c57038c920495f87dcb20b3ddb48a"}, + {file = "rpds_py-0.28.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:961ca621ff10d198bbe6ba4957decca61aa2a0c56695384c1d6b79bf61436df5"}, + {file = "rpds_py-0.28.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2374e16cc9131022e7d9a8f8d65d261d9ba55048c78f3b6e017971a4f5e6353c"}, + {file = "rpds_py-0.28.0-cp312-cp312-win32.whl", hash = "sha256:d15431e334fba488b081d47f30f091e5d03c18527c325386091f31718952fe08"}, + {file = "rpds_py-0.28.0-cp312-cp312-win_amd64.whl", hash = "sha256:a410542d61fc54710f750d3764380b53bf09e8c4edbf2f9141a82aa774a04f7c"}, + {file = "rpds_py-0.28.0-cp312-cp312-win_arm64.whl", hash = "sha256:1f0cfd1c69e2d14f8c892b893997fa9a60d890a0c8a603e88dca4955f26d1edd"}, + {file = "rpds_py-0.28.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e9e184408a0297086f880556b6168fa927d677716f83d3472ea333b42171ee3b"}, + {file = "rpds_py-0.28.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:edd267266a9b0448f33dc465a97cfc5d467594b600fe28e7fa2f36450e03053a"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85beb8b3f45e4e32f6802fb6cd6b17f615ef6c6a52f265371fb916fae02814aa"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d2412be8d00a1b895f8ad827cc2116455196e20ed994bb704bf138fe91a42724"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf128350d384b777da0e68796afdcebc2e9f63f0e9f242217754e647f6d32491"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2036d09b363aa36695d1cc1a97b36865597f4478470b0697b5ee9403f4fe399"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8e1e9be4fa6305a16be628959188e4fd5cd6f1b0e724d63c6d8b2a8adf74ea6"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:0a403460c9dd91a7f23fc3188de6d8977f1d9603a351d5db6cf20aaea95b538d"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d7366b6553cdc805abcc512b849a519167db8f5e5c3472010cd1228b224265cb"}, + {file = "rpds_py-0.28.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5b43c6a3726efd50f18d8120ec0551241c38785b68952d240c45ea553912ac41"}, + {file = "rpds_py-0.28.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0cb7203c7bc69d7c1585ebb33a2e6074492d2fc21ad28a7b9d40457ac2a51ab7"}, + {file = "rpds_py-0.28.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7a52a5169c664dfb495882adc75c304ae1d50df552fbd68e100fdc719dee4ff9"}, + {file = "rpds_py-0.28.0-cp313-cp313-win32.whl", hash = "sha256:2e42456917b6687215b3e606ab46aa6bca040c77af7df9a08a6dcfe8a4d10ca5"}, + {file = "rpds_py-0.28.0-cp313-cp313-win_amd64.whl", hash = "sha256:e0a0311caedc8069d68fc2bf4c9019b58a2d5ce3cd7cb656c845f1615b577e1e"}, + {file = "rpds_py-0.28.0-cp313-cp313-win_arm64.whl", hash = "sha256:04c1b207ab8b581108801528d59ad80aa83bb170b35b0ddffb29c20e411acdc1"}, + {file = "rpds_py-0.28.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f296ea3054e11fc58ad42e850e8b75c62d9a93a9f981ad04b2e5ae7d2186ff9c"}, + {file = "rpds_py-0.28.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5a7306c19b19005ad98468fcefeb7100b19c79fc23a5f24a12e06d91181193fa"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5d9b86aa501fed9862a443c5c3116f6ead8bc9296185f369277c42542bd646b"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e5bbc701eff140ba0e872691d573b3d5d30059ea26e5785acba9132d10c8c31d"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5690671cd672a45aa8616d7374fdf334a1b9c04a0cac3c854b1136e92374fe"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9f1d92ecea4fa12f978a367c32a5375a1982834649cdb96539dcdc12e609ab1a"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d252db6b1a78d0a3928b6190156042d54c93660ce4d98290d7b16b5296fb7cc"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d61b355c3275acb825f8777d6c4505f42b5007e357af500939d4a35b19177259"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:acbe5e8b1026c0c580d0321c8aae4b0a1e1676861d48d6e8c6586625055b606a"}, + {file = "rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8aa23b6f0fc59b85b4c7d89ba2965af274346f738e8d9fc2455763602e62fd5f"}, + {file = "rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7b14b0c680286958817c22d76fcbca4800ddacef6f678f3a7c79a1fe7067fe37"}, + {file = "rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bcf1d210dfee61a6c86551d67ee1031899c0fdbae88b2d44a569995d43797712"}, + {file = "rpds_py-0.28.0-cp313-cp313t-win32.whl", hash = "sha256:3aa4dc0fdab4a7029ac63959a3ccf4ed605fee048ba67ce89ca3168da34a1342"}, + {file = "rpds_py-0.28.0-cp313-cp313t-win_amd64.whl", hash = "sha256:7b7d9d83c942855e4fdcfa75d4f96f6b9e272d42fffcb72cd4bb2577db2e2907"}, + {file = "rpds_py-0.28.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:dcdcb890b3ada98a03f9f2bb108489cdc7580176cb73b4f2d789e9a1dac1d472"}, + {file = "rpds_py-0.28.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f274f56a926ba2dc02976ca5b11c32855cbd5925534e57cfe1fda64e04d1add2"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fe0438ac4a29a520ea94c8c7f1754cdd8feb1bc490dfda1bfd990072363d527"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a358a32dd3ae50e933347889b6af9a1bdf207ba5d1a3f34e1a38cd3540e6733"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e80848a71c78aa328fefaba9c244d588a342c8e03bda518447b624ea64d1ff56"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f586db2e209d54fe177e58e0bc4946bea5fb0102f150b1b2f13de03e1f0976f8"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ae8ee156d6b586e4292491e885d41483136ab994e719a13458055bec14cf370"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:a805e9b3973f7e27f7cab63a6b4f61d90f2e5557cff73b6e97cd5b8540276d3d"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5d3fd16b6dc89c73a4da0b4ac8b12a7ecc75b2864b95c9e5afed8003cb50a728"}, + {file = "rpds_py-0.28.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6796079e5d24fdaba6d49bda28e2c47347e89834678f2bc2c1b4fc1489c0fb01"}, + {file = "rpds_py-0.28.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:76500820c2af232435cbe215e3324c75b950a027134e044423f59f5b9a1ba515"}, + {file = "rpds_py-0.28.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bbdc5640900a7dbf9dd707fe6388972f5bbd883633eb68b76591044cfe346f7e"}, + {file = "rpds_py-0.28.0-cp314-cp314-win32.whl", hash = "sha256:adc8aa88486857d2b35d75f0640b949759f79dc105f50aa2c27816b2e0dd749f"}, + {file = "rpds_py-0.28.0-cp314-cp314-win_amd64.whl", hash = "sha256:66e6fa8e075b58946e76a78e69e1a124a21d9a48a5b4766d15ba5b06869d1fa1"}, + {file = "rpds_py-0.28.0-cp314-cp314-win_arm64.whl", hash = "sha256:a6fe887c2c5c59413353b7c0caff25d0e566623501ccfff88957fa438a69377d"}, + {file = "rpds_py-0.28.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7a69df082db13c7070f7b8b1f155fa9e687f1d6aefb7b0e3f7231653b79a067b"}, + {file = "rpds_py-0.28.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b1cde22f2c30ebb049a9e74c5374994157b9b70a16147d332f89c99c5960737a"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5338742f6ba7a51012ea470bd4dc600a8c713c0c72adaa0977a1b1f4327d6592"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1460ebde1bcf6d496d80b191d854adedcc619f84ff17dc1c6d550f58c9efbba"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e3eb248f2feba84c692579257a043a7699e28a77d86c77b032c1d9fbb3f0219c"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3bbba5def70b16cd1c1d7255666aad3b290fbf8d0fe7f9f91abafb73611a91"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3114f4db69ac5a1f32e7e4d1cbbe7c8f9cf8217f78e6e002cedf2d54c2a548ed"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:4b0cb8a906b1a0196b863d460c0222fb8ad0f34041568da5620f9799b83ccf0b"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf681ac76a60b667106141e11a92a3330890257e6f559ca995fbb5265160b56e"}, + {file = "rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1e8ee6413cfc677ce8898d9cde18cc3a60fc2ba756b0dec5b71eb6eb21c49fa1"}, + {file = "rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b3072b16904d0b5572a15eb9d31c1954e0d3227a585fc1351aa9878729099d6c"}, + {file = "rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b670c30fd87a6aec281c3c9896d3bae4b205fd75d79d06dc87c2503717e46092"}, + {file = "rpds_py-0.28.0-cp314-cp314t-win32.whl", hash = "sha256:8014045a15b4d2b3476f0a287fcc93d4f823472d7d1308d47884ecac9e612be3"}, + {file = "rpds_py-0.28.0-cp314-cp314t-win_amd64.whl", hash = "sha256:7a4e59c90d9c27c561eb3160323634a9ff50b04e4f7820600a2beb0ac90db578"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f5e7101145427087e493b9c9b959da68d357c28c562792300dd21a095118ed16"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:31eb671150b9c62409a888850aaa8e6533635704fe2b78335f9aaf7ff81eec4d"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b55c1f64482f7d8bd39942f376bfdf2f6aec637ee8c805b5041e14eeb771db"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24743a7b372e9a76171f6b69c01aedf927e8ac3e16c474d9fe20d552a8cb45c7"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:389c29045ee8bbb1627ea190b4976a310a295559eaf9f1464a1a6f2bf84dde78"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23690b5827e643150cf7b49569679ec13fe9a610a15949ed48b85eb7f98f34ec"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f0c9266c26580e7243ad0d72fc3e01d6b33866cfab5084a6da7576bcf1c4f72"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4c6c4db5d73d179746951486df97fd25e92396be07fc29ee8ff9a8f5afbdfb27"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3b695a8fa799dd2cfdb4804b37096c5f6dba1ac7f48a7fbf6d0485bcd060316"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:6aa1bfce3f83baf00d9c5fcdbba93a3ab79958b4c7d7d1f55e7fe68c20e63912"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b0f9dceb221792b3ee6acb5438eb1f02b0cb2c247796a72b016dcc92c6de829"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5d0145edba8abd3db0ab22b5300c99dc152f5c9021fab861be0f0544dc3cbc5f"}, + {file = "rpds_py-0.28.0.tar.gz", hash = "sha256:abd4df20485a0983e2ca334a216249b6186d6e3c1627e106651943dbdb791aea"}, ] [[package]] @@ -3242,54 +3240,45 @@ test = ["zope.testrunner"] [[package]] name = "zope-interface" -version = "7.1.0" +version = "8.0.1" description = "Interfaces for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "zope.interface-7.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2bd9e9f366a5df08ebbdc159f8224904c1c5ce63893984abb76954e6fbe4381a"}, - {file = "zope.interface-7.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:661d5df403cd3c5b8699ac480fa7f58047a3253b029db690efa0c3cf209993ef"}, - {file = "zope.interface-7.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91b6c30689cfd87c8f264acb2fc16ad6b3c72caba2aec1bf189314cf1a84ca33"}, - {file = "zope.interface-7.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b6a4924f5bad9fe21d99f66a07da60d75696a136162427951ec3cb223a5570d"}, - {file = "zope.interface-7.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a3c00b35f6170be5454b45abe2719ea65919a2f09e8a6e7b1362312a872cd3"}, - {file = "zope.interface-7.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:b936d61dbe29572fd2cfe13e30b925e5383bed1aba867692670f5a2a2eb7b4e9"}, - {file = "zope.interface-7.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ac20581fc6cd7c754f6dff0ae06fedb060fa0e9ea6309d8be8b2701d9ea51c4"}, - {file = "zope.interface-7.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:848b6fa92d7c8143646e64124ed46818a0049a24ecc517958c520081fd147685"}, - {file = "zope.interface-7.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec1ef1fdb6f014d5886b97e52b16d0f852364f447d2ab0f0c6027765777b6667"}, - {file = "zope.interface-7.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bcff5c09d0215f42ba64b49205a278e44413d9bf9fa688fd9e42bfe472b5f4f"}, - {file = "zope.interface-7.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07add15de0cc7e69917f7d286b64d54125c950aeb43efed7a5ea7172f000fbc1"}, - {file = "zope.interface-7.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:9940d5bc441f887c5f375ec62bcf7e7e495a2d5b1da97de1184a88fb567f06af"}, - {file = "zope.interface-7.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f245d039f72e6f802902375755846f5de1ee1e14c3e8736c078565599bcab621"}, - {file = "zope.interface-7.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6159e767d224d8f18deff634a1d3722e68d27488c357f62ebeb5f3e2f5288b1f"}, - {file = "zope.interface-7.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e956b1fd7f3448dd5e00f273072e73e50dfafcb35e4227e6d5af208075593c9"}, - {file = "zope.interface-7.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff115ef91c0eeac69cd92daeba36a9d8e14daee445b504eeea2b1c0b55821984"}, - {file = "zope.interface-7.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec001798ab62c3fc5447162bf48496ae9fba02edc295a9e10a0b0c639a6452e"}, - {file = "zope.interface-7.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:124149e2d42067b9c6597f4dafdc7a0983d0163868f897b7bb5dc850b14f9a87"}, - {file = "zope.interface-7.1.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:9733a9a0f94ef53d7aa64661811b20875b5bc6039034c6e42fb9732170130573"}, - {file = "zope.interface-7.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5fcf379b875c610b5a41bc8a891841533f98de0520287d7f85e25386cd10d3e9"}, - {file = "zope.interface-7.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0a45b5af9f72c805ee668d1479480ca85169312211bed6ed18c343e39307d5f"}, - {file = "zope.interface-7.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af4a12b459a273b0b34679a5c3dc5e34c1847c3dd14a628aa0668e19e638ea2"}, - {file = "zope.interface-7.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a735f82d2e3ed47ca01a20dfc4c779b966b16352650a8036ab3955aad151ed8a"}, - {file = "zope.interface-7.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:5501e772aff595e3c54266bc1bfc5858e8f38974ce413a8f1044aae0f32a83a3"}, - {file = "zope.interface-7.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec59fe53db7d32abb96c6d4efeed84aab4a7c38c62d7a901a9b20c09dd936e7a"}, - {file = "zope.interface-7.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e53c291debef523b09e1fe3dffe5f35dde164f1c603d77f770b88a1da34b7ed6"}, - {file = "zope.interface-7.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:711eebc77f2092c6a8b304bad0b81a6ce3cf5490b25574e7309fbc07d881e3af"}, - {file = "zope.interface-7.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a00ead2e24c76436e1b457a5132d87f83858330f6c923640b7ef82d668525d1"}, - {file = "zope.interface-7.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e28ea0bc4b084fc93a483877653a033062435317082cdc6388dec3438309faf"}, - {file = "zope.interface-7.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:27cfb5205d68b12682b6e55ab8424662d96e8ead19550aad0796b08dd2c9a45e"}, - {file = "zope.interface-7.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e3e48f3dea21c147e1b10c132016cb79af1159facca9736d231694ef5a740a8"}, - {file = "zope.interface-7.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a99240b1d02dc469f6afbe7da1bf617645e60290c272968f4e53feec18d7dce8"}, - {file = "zope.interface-7.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc8a318162123eddbdf22fcc7b751288ce52e4ad096d3766ff1799244352449d"}, - {file = "zope.interface-7.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7b25db127db3e6b597c5f74af60309c4ad65acd826f89609662f0dc33a54728"}, - {file = "zope.interface-7.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a29ac607e970b5576547f0e3589ec156e04de17af42839eedcf478450687317"}, - {file = "zope.interface-7.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:a14c9decf0eb61e0892631271d500c1e306c7b6901c998c7035e194d9150fdd1"}, - {file = "zope_interface-7.1.0.tar.gz", hash = "sha256:3f005869a1a05e368965adb2075f97f8ee9a26c61898a9e52a9764d93774f237"}, + {file = "zope_interface-8.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fd7195081b8637eeed8d73e4d183b07199a1dc738fb28b3de6666b1b55662570"}, + {file = "zope_interface-8.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f7c4bc4021108847bce763673ce70d0716b08dfc2ba9889e7bad46ac2b3bb924"}, + {file = "zope_interface-8.0.1-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:758803806b962f32c87b31bb18c298b022965ba34fe532163831cc39118c24ab"}, + {file = "zope_interface-8.0.1-cp310-cp310-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f8e88f35f86bbe8243cad4b2972deef0fdfca0a0723455abbebdc83bbab96b69"}, + {file = "zope_interface-8.0.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7844765695937d9b0d83211220b72e2cf6ac81a08608ad2b58f2c094af498d83"}, + {file = "zope_interface-8.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:64fa7b206dd9669f29d5c1241a768bebe8ab1e8a4b63ee16491f041e058c09d0"}, + {file = "zope_interface-8.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4bd01022d2e1bce4a4a4ed9549edb25393c92e607d7daa6deff843f1f68b479d"}, + {file = "zope_interface-8.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:29be8db8b712d94f1c05e24ea230a879271d787205ba1c9a6100d1d81f06c69a"}, + {file = "zope_interface-8.0.1-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:51ae1b856565b30455b7879fdf0a56a88763b401d3f814fa9f9542d7410dbd7e"}, + {file = "zope_interface-8.0.1-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d2e7596149cb1acd1d4d41b9f8fe2ffc0e9e29e2e91d026311814181d0d9efaf"}, + {file = "zope_interface-8.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b2737c11c34fb9128816759864752d007ec4f987b571c934c30723ed881a7a4f"}, + {file = "zope_interface-8.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:cf66e4bf731aa7e0ced855bb3670e8cda772f6515a475c6a107bad5cb6604103"}, + {file = "zope_interface-8.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:115f27c1cc95ce7a517d960ef381beedb0a7ce9489645e80b9ab3cbf8a78799c"}, + {file = "zope_interface-8.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:af655c573b84e3cb6a4f6fd3fbe04e4dc91c63c6b6f99019b3713ef964e589bc"}, + {file = "zope_interface-8.0.1-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:23f82ef9b2d5370750cc1bf883c3b94c33d098ce08557922a3fbc7ff3b63dfe1"}, + {file = "zope_interface-8.0.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35a1565d5244997f2e629c5c68715b3d9d9036e8df23c4068b08d9316dcb2822"}, + {file = "zope_interface-8.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:029ea1db7e855a475bf88d9910baab4e94d007a054810e9007ac037a91c67c6f"}, + {file = "zope_interface-8.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0beb3e7f7dc153944076fcaf717a935f68d39efa9fce96ec97bafcc0c2ea6cab"}, + {file = "zope_interface-8.0.1-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:c7cc027fc5c61c5d69e5080c30b66382f454f43dc379c463a38e78a9c6bab71a"}, + {file = "zope_interface-8.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fcf9097ff3003b7662299f1c25145e15260ec2a27f9a9e69461a585d79ca8552"}, + {file = "zope_interface-8.0.1-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:6d965347dd1fb9e9a53aa852d4ded46b41ca670d517fd54e733a6b6a4d0561c2"}, + {file = "zope_interface-8.0.1-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9a3b8bb77a4b89427a87d1e9eb969ab05e38e6b4a338a9de10f6df23c33ec3c2"}, + {file = "zope_interface-8.0.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:87e6b089002c43231fb9afec89268391bcc7a3b66e76e269ffde19a8112fb8d5"}, + {file = "zope_interface-8.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:64a43f5280aa770cbafd0307cb3d1ff430e2a1001774e8ceb40787abe4bb6658"}, + {file = "zope_interface-8.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b84464a9fcf801289fa8b15bfc0829e7855d47fb4a8059555effc6f2d1d9a613"}, + {file = "zope_interface-8.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7b915cf7e747b5356d741be79a153aa9107e8923bc93bcd65fc873caf0fb5c50"}, + {file = "zope_interface-8.0.1-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:110c73ddf974b369ef3c6e7b0d87d44673cf4914eba3fe8a33bfb21c6c606ad8"}, + {file = "zope_interface-8.0.1-cp39-cp39-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9e9bdca901c1bcc34e438001718512c65b3b8924aabcd732b6e7a7f0cd715f17"}, + {file = "zope_interface-8.0.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bbd22d4801ad3e8ec704ba9e3e6a4ac2e875e4d77e363051ccb76153d24c5519"}, + {file = "zope_interface-8.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:a0016ca85f93b938824e2f9a43534446e95134a2945b084944786e1ace2020bc"}, + {file = "zope_interface-8.0.1.tar.gz", hash = "sha256:eba5610d042c3704a48222f7f7c6ab5b243ed26f917e2bc69379456b115e02d1"}, ] -[package.dependencies] -setuptools = "*" - [package.extras] docs = ["Sphinx", "furo", "repoze.sphinx.autointerface"] test = ["coverage[toml]", "zope.event", "zope.testing"] @@ -3334,4 +3323,4 @@ url-preview = ["lxml"] [metadata] lock-version = "2.1" python-versions = "^3.10.0" -content-hash = "363f8059c998566788b0465c338a3a8aaa56d1e61cc347f2473b687ff34f2a8d" +content-hash = "262051340e8b5daac02d0bb61a145a609984d76732423131bdbbeb052329f168" diff --git a/pyproject.toml b/pyproject.toml index 5fb0c88b4f..01fbfd8efb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -171,6 +171,10 @@ python = "^3.10.0" # ---------------------- # we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0 jsonschema = ">=3.0.0" +# 0.25.0 is the first version to support Python 3.14. +# We can remove this once https://github.com/python-jsonschema/jsonschema/issues/1426 is fixed +# and included in a release. +rpds-py = ">=0.25.0" # We choose 2.0 as a lower bound: the most recent backwards incompatible release. # It seems generally available, judging by https://pkgs.org/search/?q=immutabledict immutabledict = ">=2.0" diff --git a/rust/Cargo.toml b/rust/Cargo.toml index 4f0319a7f5..e8321d159b 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -30,14 +30,14 @@ http = "1.1.0" lazy_static = "1.4.0" log = "0.4.17" mime = "0.3.17" -pyo3 = { version = "0.25.1", features = [ +pyo3 = { version = "0.26.0", features = [ "macros", "anyhow", "abi3", "abi3-py310", ] } -pyo3-log = "0.12.4" -pythonize = "0.25.0" +pyo3-log = "0.13.1" +pythonize = "0.26.0" regex = "1.6.0" sha2 = "0.10.8" serde = { version = "1.0.144", features = ["derive"] } diff --git a/rust/src/events/internal_metadata.rs b/rust/src/events/internal_metadata.rs index 4711fc540f..fa40fdcfad 100644 --- a/rust/src/events/internal_metadata.rs +++ b/rust/src/events/internal_metadata.rs @@ -41,7 +41,7 @@ use pyo3::{ pybacked::PyBackedStr, pyclass, pymethods, types::{PyAnyMethods, PyDict, PyDictMethods, PyString}, - Bound, IntoPyObject, PyAny, PyObject, PyResult, Python, + Bound, IntoPyObject, Py, PyAny, PyResult, Python, }; use crate::UnwrapInfallible; @@ -289,7 +289,7 @@ impl EventInternalMetadata { /// Get a dict holding the data stored in the `internal_metadata` column in the database. /// /// Note that `outlier` and `stream_ordering` are stored in separate columns so are not returned here. - fn get_dict(&self, py: Python<'_>) -> PyResult { + fn get_dict(&self, py: Python<'_>) -> PyResult> { let dict = PyDict::new(py); for entry in &self.data { diff --git a/rust/src/http_client.rs b/rust/src/http_client.rs index ca4bf1590b..4bd80c8e04 100644 --- a/rust/src/http_client.rs +++ b/rust/src/http_client.rs @@ -134,7 +134,7 @@ fn get_runtime<'a>(reactor: &Bound<'a, PyAny>) -> PyResult = OnceCell::new(); +static DEFER: OnceCell> = OnceCell::new(); /// Access to the `twisted.internet.defer` module. fn defer(py: Python<'_>) -> PyResult<&Bound<'_, PyAny>> { @@ -165,7 +165,7 @@ pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> #[pyclass] struct HttpClient { client: reqwest::Client, - reactor: PyObject, + reactor: Py, } #[pymethods] @@ -237,7 +237,7 @@ impl HttpClient { return Err(HttpResponseException::new(status, buffer)); } - let r = Python::with_gil(|py| buffer.into_pyobject(py).map(|o| o.unbind()))?; + let r = Python::attach(|py| buffer.into_pyobject(py).map(|o| o.unbind()))?; Ok(r) }) @@ -270,7 +270,7 @@ where handle.spawn(async move { let res = task.await; - Python::with_gil(move |py| { + Python::attach(move |py| { // Flatten the panic into standard python error let res = match res { Ok(r) => r, diff --git a/rust/src/rendezvous/mod.rs b/rust/src/rendezvous/mod.rs index 3148e0f67a..848b5035bb 100644 --- a/rust/src/rendezvous/mod.rs +++ b/rust/src/rendezvous/mod.rs @@ -29,7 +29,7 @@ use pyo3::{ exceptions::PyValueError, pyclass, pymethods, types::{PyAnyMethods, PyModule, PyModuleMethods}, - Bound, IntoPyObject, Py, PyAny, PyObject, PyResult, Python, + Bound, IntoPyObject, Py, PyAny, PyResult, Python, }; use ulid::Ulid; @@ -56,7 +56,7 @@ fn prepare_headers(headers: &mut HeaderMap, session: &Session) { #[pyclass] struct RendezvousHandler { base: Uri, - clock: PyObject, + clock: Py, sessions: BTreeMap, capacity: usize, max_content_length: u64, From bc926bd99eaf4882625b8262d07ea55a89675647 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 16:38:19 +0000 Subject: [PATCH 46/72] Bump ruff from 0.12.10 to 0.14.3 (#19124) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 107 +++++++++++++++++++++++++++++-------------------- pyproject.toml | 2 +- 2 files changed, 65 insertions(+), 44 deletions(-) diff --git a/poetry.lock b/poetry.lock index ce8b9ef6ee..4996517afc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -39,7 +39,7 @@ description = "The ultimate Python library in building OAuth and OpenID Connect optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"oidc\" or extra == \"jwt\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\"" files = [ {file = "authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a"}, {file = "authlib-1.6.5.tar.gz", hash = "sha256:6aaf9c79b7cc96c900f0b284061691c5d4e61221640a948fe690b556a6d6d10b"}, @@ -444,7 +444,7 @@ description = "XML bomb protection for Python stdlib modules" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -469,7 +469,7 @@ description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and l optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "elementpath-4.1.5-py3-none-any.whl", hash = "sha256:2ac1a2fb31eb22bbbf817f8cf6752f844513216263f0e3892c8e79782fe4bb55"}, {file = "elementpath-4.1.5.tar.gz", hash = "sha256:c2d6dc524b29ef751ecfc416b0627668119d8812441c555d7471da41d4bacb8d"}, @@ -519,7 +519,7 @@ description = "Python wrapper for hiredis" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"redis\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"redis\"" files = [ {file = "hiredis-3.3.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:9937d9b69321b393fbace69f55423480f098120bc55a3316e1ca3508c4dbbd6f"}, {file = "hiredis-3.3.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:50351b77f89ba6a22aff430b993653847f36b71d444509036baa0f2d79d1ebf4"}, @@ -842,7 +842,7 @@ description = "Jaeger Python OpenTracing Tracer implementation" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"opentracing\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"opentracing\"" files = [ {file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"}, ] @@ -980,7 +980,7 @@ description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\"" files = [ {file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"}, {file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"}, @@ -996,7 +996,7 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"url-preview\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"url-preview\"" files = [ {file = "lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e77dd455b9a16bbd2a5036a63ddbd479c19572af81b624e79ef422f929eef388"}, {file = "lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d444858b9f07cefff6455b983aea9a67f7462ba1f6cbe4a21e8bf6791bf2153"}, @@ -1283,7 +1283,7 @@ description = "An LDAP3 auth provider for Synapse" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\"" files = [ {file = "matrix-synapse-ldap3-0.3.0.tar.gz", hash = "sha256:8bb6517173164d4b9cc44f49de411d8cebdb2e705d5dd1ea1f38733c4a009e1d"}, {file = "matrix_synapse_ldap3-0.3.0-py3-none-any.whl", hash = "sha256:8b4d701f8702551e98cc1d8c20dbed532de5613584c08d0df22de376ba99159d"}, @@ -1525,7 +1525,7 @@ description = "OpenTracing API for Python. See documentation at http://opentraci optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"opentracing\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"opentracing\"" files = [ {file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"}, ] @@ -1594,6 +1594,8 @@ groups = ["main"] files = [ {file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"}, {file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"}, {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"}, {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"}, {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"}, @@ -1603,6 +1605,8 @@ files = [ {file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"}, {file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"}, {file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"}, {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"}, {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"}, {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"}, @@ -1612,6 +1616,8 @@ files = [ {file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"}, {file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"}, {file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"}, {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"}, {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"}, {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"}, @@ -1624,6 +1630,8 @@ files = [ {file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"}, {file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"}, {file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"}, {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"}, {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"}, {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"}, @@ -1633,6 +1641,8 @@ files = [ {file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"}, {file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"}, {file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"}, {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"}, {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"}, {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"}, @@ -1642,6 +1652,8 @@ files = [ {file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"}, {file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"}, {file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"}, {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"}, {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"}, {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"}, @@ -1651,6 +1663,8 @@ files = [ {file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"}, {file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"}, {file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"}, {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"}, {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"}, {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"}, @@ -1660,6 +1674,8 @@ files = [ {file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"}, {file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"}, {file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"}, {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"}, {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"}, {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"}, @@ -1669,11 +1685,15 @@ files = [ {file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"}, {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"}, {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"}, {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"}, {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"}, {file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"}, {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"}, {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"}, {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"}, {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"}, {file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"}, @@ -1711,7 +1731,7 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"postgres\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"postgres\"" files = [ {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"}, {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"}, @@ -1719,6 +1739,7 @@ files = [ {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"}, {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"}, {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"}, + {file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"}, {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"}, {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"}, {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"}, @@ -1731,7 +1752,7 @@ description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=mas optional = true python-versions = "*" groups = ["main"] -markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")" +markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")" files = [ {file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"}, ] @@ -1747,7 +1768,7 @@ description = "A Simple library to enable psycopg2 compatability" optional = true python-versions = "*" groups = ["main"] -markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")" +markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")" files = [ {file = "psycopg2cffi-compat-1.1.tar.gz", hash = "sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05"}, ] @@ -2024,7 +2045,7 @@ description = "A development tool to measure, monitor and analyze the memory beh optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"cache-memory\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"cache-memory\"" files = [ {file = "Pympler-1.0.1-py3-none-any.whl", hash = "sha256:d260dda9ae781e1eab6ea15bacb84015849833ba5555f141d2d9b7b7473b307d"}, {file = "Pympler-1.0.1.tar.gz", hash = "sha256:993f1a3599ca3f4fcd7160c7545ad06310c9e12f70174ae7ae8d4e25f6c5d3fa"}, @@ -2084,7 +2105,7 @@ description = "Python implementation of SAML Version 2 Standard" optional = true python-versions = ">=3.9,<4.0" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "pysaml2-7.5.0-py3-none-any.whl", hash = "sha256:bc6627cc344476a83c757f440a73fda1369f13b6fda1b4e16bca63ffbabb5318"}, {file = "pysaml2-7.5.0.tar.gz", hash = "sha256:f36871d4e5ee857c6b85532e942550d2cf90ea4ee943d75eb681044bbc4f54f7"}, @@ -2109,7 +2130,7 @@ description = "Extensions to the standard Python datetime module" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -2137,7 +2158,7 @@ description = "World timezone definitions, modern and historical" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, @@ -2474,31 +2495,31 @@ files = [ [[package]] name = "ruff" -version = "0.12.10" +version = "0.14.3" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ - {file = "ruff-0.12.10-py3-none-linux_armv6l.whl", hash = "sha256:8b593cb0fb55cc8692dac7b06deb29afda78c721c7ccfed22db941201b7b8f7b"}, - {file = "ruff-0.12.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ebb7333a45d56efc7c110a46a69a1b32365d5c5161e7244aaf3aa20ce62399c1"}, - {file = "ruff-0.12.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d59e58586829f8e4a9920788f6efba97a13d1fa320b047814e8afede381c6839"}, - {file = "ruff-0.12.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:822d9677b560f1fdeab69b89d1f444bf5459da4aa04e06e766cf0121771ab844"}, - {file = "ruff-0.12.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b4a64f4062a50c75019c61c7017ff598cb444984b638511f48539d3a1c98db"}, - {file = "ruff-0.12.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c6f4064c69d2542029b2a61d39920c85240c39837599d7f2e32e80d36401d6e"}, - {file = "ruff-0.12.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:059e863ea3a9ade41407ad71c1de2badfbe01539117f38f763ba42a1206f7559"}, - {file = "ruff-0.12.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1bef6161e297c68908b7218fa6e0e93e99a286e5ed9653d4be71e687dff101cf"}, - {file = "ruff-0.12.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4f1345fbf8fb0531cd722285b5f15af49b2932742fc96b633e883da8d841896b"}, - {file = "ruff-0.12.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f68433c4fbc63efbfa3ba5db31727db229fa4e61000f452c540474b03de52a9"}, - {file = "ruff-0.12.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:141ce3d88803c625257b8a6debf4a0473eb6eed9643a6189b68838b43e78165a"}, - {file = "ruff-0.12.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f3fc21178cd44c98142ae7590f42ddcb587b8e09a3b849cbc84edb62ee95de60"}, - {file = "ruff-0.12.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7d1a4e0bdfafcd2e3e235ecf50bf0176f74dd37902f241588ae1f6c827a36c56"}, - {file = "ruff-0.12.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:e67d96827854f50b9e3e8327b031647e7bcc090dbe7bb11101a81a3a2cbf1cc9"}, - {file = "ruff-0.12.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ae479e1a18b439c59138f066ae79cc0f3ee250712a873d00dbafadaad9481e5b"}, - {file = "ruff-0.12.10-py3-none-win32.whl", hash = "sha256:9de785e95dc2f09846c5e6e1d3a3d32ecd0b283a979898ad427a9be7be22b266"}, - {file = "ruff-0.12.10-py3-none-win_amd64.whl", hash = "sha256:7837eca8787f076f67aba2ca559cefd9c5cbc3a9852fd66186f4201b87c1563e"}, - {file = "ruff-0.12.10-py3-none-win_arm64.whl", hash = "sha256:cc138cc06ed9d4bfa9d667a65af7172b47840e1a98b02ce7011c391e54635ffc"}, - {file = "ruff-0.12.10.tar.gz", hash = "sha256:189ab65149d11ea69a2d775343adf5f49bb2426fc4780f65ee33b423ad2e47f9"}, + {file = "ruff-0.14.3-py3-none-linux_armv6l.whl", hash = "sha256:876b21e6c824f519446715c1342b8e60f97f93264012de9d8d10314f8a79c371"}, + {file = "ruff-0.14.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b6fd8c79b457bedd2abf2702b9b472147cd860ed7855c73a5247fa55c9117654"}, + {file = "ruff-0.14.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:71ff6edca490c308f083156938c0c1a66907151263c4abdcb588602c6e696a14"}, + {file = "ruff-0.14.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:786ee3ce6139772ff9272aaf43296d975c0217ee1b97538a98171bf0d21f87ed"}, + {file = "ruff-0.14.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cd6291d0061811c52b8e392f946889916757610d45d004e41140d81fb6cd5ddc"}, + {file = "ruff-0.14.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a497ec0c3d2c88561b6d90f9c29f5ae68221ac00d471f306fa21fa4264ce5fcd"}, + {file = "ruff-0.14.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e231e1be58fc568950a04fbe6887c8e4b85310e7889727e2b81db205c45059eb"}, + {file = "ruff-0.14.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:469e35872a09c0e45fecf48dd960bfbce056b5db2d5e6b50eca329b4f853ae20"}, + {file = "ruff-0.14.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d6bc90307c469cb9d28b7cfad90aaa600b10d67c6e22026869f585e1e8a2db0"}, + {file = "ruff-0.14.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2f8a0bbcffcfd895df39c9a4ecd59bb80dca03dc43f7fb63e647ed176b741e"}, + {file = "ruff-0.14.3-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:678fdd7c7d2d94851597c23ee6336d25f9930b460b55f8598e011b57c74fd8c5"}, + {file = "ruff-0.14.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1ec1ac071e7e37e0221d2f2dbaf90897a988c531a8592a6a5959f0603a1ecf5e"}, + {file = "ruff-0.14.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:afcdc4b5335ef440d19e7df9e8ae2ad9f749352190e96d481dc501b753f0733e"}, + {file = "ruff-0.14.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:7bfc42f81862749a7136267a343990f865e71fe2f99cf8d2958f684d23ce3dfa"}, + {file = "ruff-0.14.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a65e448cfd7e9c59fae8cf37f9221585d3354febaad9a07f29158af1528e165f"}, + {file = "ruff-0.14.3-py3-none-win32.whl", hash = "sha256:f3d91857d023ba93e14ed2d462ab62c3428f9bbf2b4fbac50a03ca66d31991f7"}, + {file = "ruff-0.14.3-py3-none-win_amd64.whl", hash = "sha256:d7b7006ac0756306db212fd37116cce2bd307e1e109375e1c6c106002df0ae5f"}, + {file = "ruff-0.14.3-py3-none-win_arm64.whl", hash = "sha256:26eb477ede6d399d898791d01961e16b86f02bc2486d0d1a7a9bb2379d055dc1"}, + {file = "ruff-0.14.3.tar.gz", hash = "sha256:4ff876d2ab2b161b6de0aa1f5bd714e8e9b4033dc122ee006925fbacc4f62153"}, ] [[package]] @@ -2541,7 +2562,7 @@ description = "Python client for Sentry (https://sentry.io)" optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"sentry\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"sentry\"" files = [ {file = "sentry_sdk-2.34.1-py2.py3-none-any.whl", hash = "sha256:b7a072e1cdc5abc48101d5146e1ae680fa81fe886d8d95aaa25a0b450c818d32"}, {file = "sentry_sdk-2.34.1.tar.gz", hash = "sha256:69274eb8c5c38562a544c3e9f68b5be0a43be4b697f5fd385bf98e4fbe672687"}, @@ -2729,7 +2750,7 @@ description = "Tornado IOLoop Backed Concurrent Futures" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"opentracing\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"opentracing\"" files = [ {file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"}, {file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"}, @@ -2745,7 +2766,7 @@ description = "Python bindings for the Apache Thrift RPC system" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"opentracing\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"opentracing\"" files = [ {file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"}, ] @@ -2808,7 +2829,7 @@ description = "Tornado is a Python web framework and asynchronous networking lib optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"opentracing\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"opentracing\"" files = [ {file = "tornado-6.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:f81067dad2e4443b015368b24e802d0083fecada4f0a4572fdb72fc06e54a9a6"}, {file = "tornado-6.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9ac1cbe1db860b3cbb251e795c701c41d343f06a96049d6274e7c77559117e41"}, @@ -2942,7 +2963,7 @@ description = "non-blocking redis client for python" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"redis\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"redis\"" files = [ {file = "txredisapi-1.4.11-py3-none-any.whl", hash = "sha256:ac64d7a9342b58edca13ef267d4fa7637c1aa63f8595e066801c1e8b56b22d0b"}, {file = "txredisapi-1.4.11.tar.gz", hash = "sha256:3eb1af99aefdefb59eb877b1dd08861efad60915e30ad5bf3d5bf6c5cedcdbc6"}, @@ -3188,7 +3209,7 @@ description = "An XML Schema validator and decoder" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "xmlschema-2.4.0-py3-none-any.whl", hash = "sha256:dc87be0caaa61f42649899189aab2fd8e0d567f2cf548433ba7b79278d231a4a"}, {file = "xmlschema-2.4.0.tar.gz", hash = "sha256:d74cd0c10866ac609e1ef94a5a69b018ad16e39077bc6393408b40c6babee793"}, @@ -3323,4 +3344,4 @@ url-preview = ["lxml"] [metadata] lock-version = "2.1" python-versions = "^3.10.0" -content-hash = "262051340e8b5daac02d0bb61a145a609984d76732423131bdbbeb052329f168" +content-hash = "2a891bc466355554d5c5873e7f8592e4f693de4d0f734ddb55f8a55bb4e529df" diff --git a/pyproject.toml b/pyproject.toml index 01fbfd8efb..f530666e45 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -339,7 +339,7 @@ all = [ # failing on new releases. Keeping lower bounds loose here means that dependabot # can bump versions without having to update the content-hash in the lockfile. # This helps prevents merge conflicts when running a batch of dependabot updates. -ruff = "0.12.10" +ruff = "0.14.3" # Typechecking lxml-stubs = ">=0.4.0" From e00a41183724f62661423720aa271ef74fee6ea0 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 3 Nov 2025 11:18:56 -0600 Subject: [PATCH 47/72] Move exception handling up the stack (avoid `exit(1)` in our composable functions) (#19116) Move exception handling up the stack (avoid `exit(1)` in our composable functions) Relevant to Synapse Pro for small hosts as we don't want to exit the entire Python process and affect all homeserver tenants. ### Background As part of Element's plan to support a light form of vhosting (virtual host) (multiple instances of Synapse in the same Python process) (c.f Synapse Pro for small hosts), we're currently diving into the details and implications of running multiple instances of Synapse in the same Python process. "Clean tenant provisioning" tracked internally by https://github.com/element-hq/synapse-small-hosts/issues/48 --- changelog.d/19116.misc | 1 + synapse/app/generic_worker.py | 16 ++++++++-------- synapse/app/homeserver.py | 10 +++++----- 3 files changed, 14 insertions(+), 13 deletions(-) create mode 100644 changelog.d/19116.misc diff --git a/changelog.d/19116.misc b/changelog.d/19116.misc new file mode 100644 index 0000000000..2291d0781a --- /dev/null +++ b/changelog.d/19116.misc @@ -0,0 +1 @@ +Move exception handling up the stack (avoid `exit(1)` in our composable functions). diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 8f512c1577..1a7bedaac5 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -364,14 +364,11 @@ def start(config: HomeServerConfig) -> None: # Start the tracer init_tracer(hs) # noqa - try: - hs.setup() + hs.setup() - # Ensure the replication streamer is always started in case we write to any - # streams. Will no-op if no streams can be written to by this worker. - hs.get_replication_streamer() - except Exception as e: - handle_startup_exception(e) + # Ensure the replication streamer is always started in case we write to any + # streams. Will no-op if no streams can be written to by this worker. + hs.get_replication_streamer() async def start() -> None: await _base.start(hs) @@ -388,7 +385,10 @@ async def start() -> None: def main() -> None: homeserver_config = load_config(sys.argv[1:]) with LoggingContext(name="main", server_name=homeserver_config.server.server_name): - start(homeserver_config) + try: + start(homeserver_config) + except Exception as e: + handle_startup_exception(e) if __name__ == "__main__": diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 023a0d877f..9fd65b2718 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -414,10 +414,7 @@ def setup( # Start the tracer init_tracer(hs) # noqa - try: - hs.setup() - except Exception as e: - handle_startup_exception(e) + hs.setup() async def _start_when_reactor_running() -> None: # TODO: Feels like this should be moved somewhere else. @@ -464,7 +461,10 @@ def main() -> None: # check base requirements check_requirements() hs = create_homeserver(homeserver_config) - setup(hs) + try: + setup(hs) + except Exception as e: + handle_startup_exception(e) # redirect stdio to the logs, if configured. if not hs.config.logging.no_redirect_stdio: From f02ac5a4d5ca5297a14a01ff88f450dd8e757b43 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 17:19:20 +0000 Subject: [PATCH 48/72] Bump markdown-it-py from 3.0.0 to 4.0.0 (#19123) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4996517afc..72f784110f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1163,14 +1163,14 @@ test = ["coverage[toml] (>=7.2.5)", "mypy (>=1.2.0)", "pytest (>=7.3.0)", "pytes [[package]] name = "markdown-it-py" -version = "3.0.0" +version = "4.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" groups = ["dev"] files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, + {file = "markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147"}, + {file = "markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3"}, ] [package.dependencies] @@ -1178,13 +1178,12 @@ mdurl = ">=0.1,<1.0" [package.extras] benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "markdown-it-pyrs", "mistletoe (>=1.0,<2.0)", "mistune (>=3.0,<4.0)", "panflute (>=2.3,<3.0)"] linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] +plugins = ["mdit-py-plugins (>=0.5.0)"] profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] +rtd = ["ipykernel", "jupyter_sphinx", "mdit-py-plugins (>=0.5.0)", "myst-parser", "pyyaml", "sphinx", "sphinx-book-theme (>=1.0,<2.0)", "sphinx-copybutton", "sphinx-design"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions", "requests"] [[package]] name = "markupsafe" From a7107458c6c7bc6de7c1056c21196c259c4a2ea4 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 3 Nov 2025 12:04:43 -0600 Subject: [PATCH 49/72] Refactor app entrypoints (avoid `exit(1)` in our composable functions) (#19121) - Move `register_start` (calls `os._exit(1)`) out of `setup` (our composable function) - We want to avoid `exit(...)` because we use these composable functions in Synapse Pro for small hosts where we have multiple Synapse instances running in the same process. We don't want a problem from one homeserver tenant causing the entire Python process to exit and affect all of the other homeserver tenants. - Continuation of https://github.com/element-hq/synapse/pull/19116 - Align our app entrypoints: `homeserver` (main), `generic_worker` (worker), and `admin_cmd` ### Background As part of Element's plan to support a light form of vhosting (virtual host) (multiple instances of Synapse in the same Python process) (c.f Synapse Pro for small hosts), we're currently diving into the details and implications of running multiple instances of Synapse in the same Python process. "Clean tenant provisioning" tracked internally by https://github.com/element-hq/synapse-small-hosts/issues/48 --- changelog.d/19121.misc | 1 + synapse/app/_base.py | 2 +- synapse/app/admin_cmd.py | 83 +++++++++++++++++++++------- synapse/app/appservice.py | 9 +-- synapse/app/client_reader.py | 9 +-- synapse/app/event_creator.py | 9 +-- synapse/app/federation_reader.py | 9 +-- synapse/app/federation_sender.py | 9 +-- synapse/app/frontend_proxy.py | 9 +-- synapse/app/generic_worker.py | 95 ++++++++++++++++++++++++++++---- synapse/app/homeserver.py | 90 ++++++++++++++++++++---------- synapse/app/media_repository.py | 9 +-- synapse/app/pusher.py | 9 +-- synapse/app/synchrotron.py | 9 +-- synapse/app/user_dir.py | 8 +-- 15 files changed, 230 insertions(+), 130 deletions(-) create mode 100644 changelog.d/19121.misc diff --git a/changelog.d/19121.misc b/changelog.d/19121.misc new file mode 100644 index 0000000000..cb1fb8f024 --- /dev/null +++ b/changelog.d/19121.misc @@ -0,0 +1 @@ +Refactor and align app entrypoints (avoid `exit(1)` in our composable functions). diff --git a/synapse/app/_base.py b/synapse/app/_base.py index c0fcf8ca29..e5f4cfb0e6 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -602,7 +602,7 @@ def run_sighup(*args: Any, **kwargs: Any) -> None: _already_setup_sighup_handling = True -async def start(hs: "HomeServer", freeze: bool = True) -> None: +async def start(hs: "HomeServer", *, freeze: bool = True) -> None: """ Start a Synapse server or worker. diff --git a/synapse/app/admin_cmd.py b/synapse/app/admin_cmd.py index b5b1edac0a..dac603de88 100644 --- a/synapse/app/admin_cmd.py +++ b/synapse/app/admin_cmd.py @@ -64,7 +64,7 @@ from synapse.storage.databases.main.stream import StreamWorkerStore from synapse.storage.databases.main.tags import TagsWorkerStore from synapse.storage.databases.main.user_erasure_store import UserErasureWorkerStore -from synapse.types import JsonMapping, StateMap +from synapse.types import ISynapseReactor, JsonMapping, StateMap from synapse.util.logcontext import LoggingContext logger = logging.getLogger("synapse.app.admin_cmd") @@ -289,7 +289,21 @@ def load_config(argv_options: list[str]) -> tuple[HomeServerConfig, argparse.Nam return config, args -def start(config: HomeServerConfig, args: argparse.Namespace) -> None: +def create_homeserver( + config: HomeServerConfig, + reactor: Optional[ISynapseReactor] = None, +) -> AdminCmdServer: + """ + Create a homeserver instance for the Synapse admin command process. + + Args: + config: The configuration for the homeserver. + reactor: Optionally provide a reactor to use. Can be useful in different + scenarios that you want control over the reactor, such as tests. + + Returns: + A homeserver instance. + """ if config.worker.worker_app is not None: assert config.worker.worker_app == "synapse.app.admin_cmd" @@ -312,33 +326,62 @@ def start(config: HomeServerConfig, args: argparse.Namespace) -> None: synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts - ss = AdminCmdServer( + admin_command_server = AdminCmdServer( config.server.server_name, config=config, + reactor=reactor, ) - setup_logging(ss, config, use_worker_options=True) - - ss.setup() + return admin_command_server - # We use task.react as the basic run command as it correctly handles tearing - # down the reactor when the deferreds resolve and setting the return value. - # We also make sure that `_base.start` gets run before we actually run the - # command. - async def run() -> None: - with LoggingContext(name="command", server_name=config.server.server_name): - await _base.start(ss) - await args.func(ss, args) +def setup(admin_command_server: AdminCmdServer) -> None: + """ + Setup a `AdminCmdServer` instance. - _base.start_worker_reactor( - "synapse-admin-cmd", - config, - run_command=lambda: task.react(lambda _reactor: defer.ensureDeferred(run())), + Args: + admin_command_server: The homeserver to setup. + """ + setup_logging( + admin_command_server, admin_command_server.config, use_worker_options=True ) + admin_command_server.setup() -if __name__ == "__main__": + +async def start(admin_command_server: AdminCmdServer, args: argparse.Namespace) -> None: + """ + Should be called once the reactor is running. + + Args: + admin_command_server: The homeserver to setup. + args: Command line arguments. + """ + # This needs a logcontext unlike other entrypoints because we're not using + # `register_start(...)` to run this function. + with LoggingContext(name="start", server_name=admin_command_server.hostname): + # We make sure that `_base.start` gets run before we actually run the command. + await _base.start(admin_command_server) + # Run the command + await args.func(admin_command_server, args) + + +def main() -> None: homeserver_config, args = load_config(sys.argv[1:]) with LoggingContext(name="main", server_name=homeserver_config.server.server_name): - start(homeserver_config, args) + admin_command_server = create_homeserver(homeserver_config) + setup(admin_command_server) + + _base.start_worker_reactor( + "synapse-admin-cmd", + admin_command_server.config, + # We use task.react as the basic run command as it correctly handles tearing + # down the reactor when the deferreds resolve and setting the return value. + run_command=lambda: task.react( + lambda _reactor: defer.ensureDeferred(start(admin_command_server, args)) + ), + ) + + +if __name__ == "__main__": + main() diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py index 823e1908b5..d251d0ab64 100644 --- a/synapse/app/appservice.py +++ b/synapse/app/appservice.py @@ -19,16 +19,11 @@ # # -import sys - -from synapse.app.generic_worker import load_config, start -from synapse.util.logcontext import LoggingContext +from synapse.app.generic_worker import main as worker_main def main() -> None: - homeserver_config = load_config(sys.argv[1:]) - with LoggingContext(name="main", server_name=homeserver_config.server.server_name): - start(homeserver_config) + worker_main() if __name__ == "__main__": diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index 823e1908b5..d251d0ab64 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -19,16 +19,11 @@ # # -import sys - -from synapse.app.generic_worker import load_config, start -from synapse.util.logcontext import LoggingContext +from synapse.app.generic_worker import main as worker_main def main() -> None: - homeserver_config = load_config(sys.argv[1:]) - with LoggingContext(name="main", server_name=homeserver_config.server.server_name): - start(homeserver_config) + worker_main() if __name__ == "__main__": diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index 1a9b0ad155..5b18578bd0 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -18,16 +18,11 @@ # # -import sys - -from synapse.app.generic_worker import load_config, start -from synapse.util.logcontext import LoggingContext +from synapse.app.generic_worker import main as worker_main def main() -> None: - homeserver_config = load_config(sys.argv[1:]) - with LoggingContext(name="main", server_name=homeserver_config.server.server_name): - start(homeserver_config) + worker_main() if __name__ == "__main__": diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 823e1908b5..d251d0ab64 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -19,16 +19,11 @@ # # -import sys - -from synapse.app.generic_worker import load_config, start -from synapse.util.logcontext import LoggingContext +from synapse.app.generic_worker import main as worker_main def main() -> None: - homeserver_config = load_config(sys.argv[1:]) - with LoggingContext(name="main", server_name=homeserver_config.server.server_name): - start(homeserver_config) + worker_main() if __name__ == "__main__": diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index 823e1908b5..d251d0ab64 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -19,16 +19,11 @@ # # -import sys - -from synapse.app.generic_worker import load_config, start -from synapse.util.logcontext import LoggingContext +from synapse.app.generic_worker import main as worker_main def main() -> None: - homeserver_config = load_config(sys.argv[1:]) - with LoggingContext(name="main", server_name=homeserver_config.server.server_name): - start(homeserver_config) + worker_main() if __name__ == "__main__": diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index 823e1908b5..d251d0ab64 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -19,16 +19,11 @@ # # -import sys - -from synapse.app.generic_worker import load_config, start -from synapse.util.logcontext import LoggingContext +from synapse.app.generic_worker import main as worker_main def main() -> None: - homeserver_config = load_config(sys.argv[1:]) - with LoggingContext(name="main", server_name=homeserver_config.server.server_name): - start(homeserver_config) + worker_main() if __name__ == "__main__": diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 1a7bedaac5..a1dde368d4 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -21,6 +21,7 @@ # import logging import sys +from typing import Optional from twisted.web.resource import Resource @@ -111,6 +112,7 @@ from synapse.storage.databases.main.ui_auth import UIAuthWorkerStore from synapse.storage.databases.main.user_directory import UserDirectoryStore from synapse.storage.databases.main.user_erasure_store import UserErasureWorkerStore +from synapse.types import ISynapseReactor from synapse.util.httpresourcetree import create_resource_tree logger = logging.getLogger("synapse.app.generic_worker") @@ -332,7 +334,30 @@ def load_config(argv_options: list[str]) -> HomeServerConfig: return config -def start(config: HomeServerConfig) -> None: +def create_homeserver( + config: HomeServerConfig, + reactor: Optional[ISynapseReactor] = None, +) -> GenericWorkerServer: + """ + Create a homeserver instance for the Synapse worker process. + + Our composable functions (`create_homeserver`, `setup`, `start`) should not exit the + Python process (call `exit(...)`) and instead raise exceptions which can be handled + by the caller as desired. This doesn't matter for the normal case of one Synapse + instance running in the Python process (as we're only affecting ourselves), but is + important when we have multiple Synapse homeserver tenants running in the same + Python process (c.f. Synapse Pro for small hosts) as we don't want some problem from + one tenant stopping the rest of the tenants. + + Args: + config: The configuration for the homeserver. + reactor: Optionally provide a reactor to use. Can be useful in different + scenarios that you want control over the reactor, such as tests. + + Returns: + A homeserver instance. + """ + # For backwards compatibility let any of the old app names. assert config.worker.worker_app in ( "synapse.app.appservice", @@ -357,9 +382,29 @@ def start(config: HomeServerConfig) -> None: hs = GenericWorkerServer( config.server.server_name, config=config, + reactor=reactor, ) - setup_logging(hs, config, use_worker_options=True) + return hs + + +def setup(hs: GenericWorkerServer) -> None: + """ + Setup a `GenericWorkerServer` (worker) instance. + + Our composable functions (`create_homeserver`, `setup`, `start`) should not exit the + Python process (call `exit(...)`) and instead raise exceptions which can be handled + by the caller as desired. This doesn't matter for the normal case of one Synapse + instance running in the Python process (as we're only affecting ourselves), but is + important when we have multiple Synapse homeserver tenants running in the same + Python process (c.f. Synapse Pro for small hosts) as we don't want some problem from + one tenant stopping the rest of the tenants. + + Args: + hs: The homeserver to setup. + """ + + setup_logging(hs, hs.config, use_worker_options=True) # Start the tracer init_tracer(hs) # noqa @@ -370,26 +415,56 @@ def start(config: HomeServerConfig) -> None: # streams. Will no-op if no streams can be written to by this worker. hs.get_replication_streamer() - async def start() -> None: - await _base.start(hs) - register_start(hs, start) +async def start( + hs: GenericWorkerServer, + *, + freeze: bool = True, +) -> None: + """ + Should be called once the reactor is running. + + Our composable functions (`create_homeserver`, `setup`, `start`) should not exit the + Python process (call `exit(...)`) and instead raise exceptions which can be handled + by the caller as desired. This doesn't matter for the normal case of one Synapse + instance running in the Python process (as we're only affecting ourselves), but is + important when we have multiple Synapse homeserver tenants running in the same + Python process (c.f. Synapse Pro for small hosts) as we don't want some problem from + one tenant stopping the rest of the tenants. - # redirect stdio to the logs, if configured. - if not hs.config.logging.no_redirect_stdio: - redirect_stdio_to_logs() + Args: + hs: The homeserver to setup. + freeze: whether to freeze the homeserver base objects in the garbage collector. + May improve garbage collection performance by marking objects with an effectively + static lifetime as frozen so they don't need to be considered for cleanup. + If you ever want to `shutdown` the homeserver, this needs to be + False otherwise the homeserver cannot be garbage collected after `shutdown`. + """ - _base.start_worker_reactor("synapse-generic-worker", config) + await _base.start(hs, freeze=freeze) def main() -> None: homeserver_config = load_config(sys.argv[1:]) + + # Create a logging context as soon as possible so we can start associating + # everything with this homeserver. with LoggingContext(name="main", server_name=homeserver_config.server.server_name): + # redirect stdio to the logs, if configured. + if not homeserver_config.logging.no_redirect_stdio: + redirect_stdio_to_logs() + + hs = create_homeserver(homeserver_config) try: - start(homeserver_config) + setup(hs) except Exception as e: handle_startup_exception(e) + # Register a callback to be invoked once the reactor is running + register_start(hs, start, hs) + + _base.start_worker_reactor("synapse-generic-worker", homeserver_config) + if __name__ == "__main__": main() diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 9fd65b2718..3807a18ab7 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -71,7 +71,6 @@ from synapse.server import HomeServer from synapse.storage import DataStore from synapse.types import ISynapseReactor -from synapse.util.check_dependencies import check_requirements from synapse.util.httpresourcetree import create_resource_tree from synapse.util.module_loader import load_module @@ -356,6 +355,14 @@ def create_homeserver( """ Create a homeserver instance for the Synapse main process. + Our composable functions (`create_homeserver`, `setup`, `start`) should not exit the + Python process (call `exit(...)`) and instead raise exceptions which can be handled + by the caller as desired. This doesn't matter for the normal case of one Synapse + instance running in the Python process (as we're only affecting ourselves), but is + important when we have multiple Synapse homeserver tenants running in the same + Python process (c.f. Synapse Pro for small hosts) as we don't want some problem from + one tenant stopping the rest of the tenants. + Args: config: The configuration for the homeserver. reactor: Optionally provide a reactor to use. Can be useful in different @@ -388,22 +395,20 @@ def create_homeserver( def setup( hs: SynapseHomeServer, - *, - freeze: bool = True, ) -> None: """ - Setup a Synapse homeserver instance given a configuration. + Setup a `SynapseHomeServer` (main) instance. + + Our composable functions (`create_homeserver`, `setup`, `start`) should not exit the + Python process (call `exit(...)`) and instead raise exceptions which can be handled + by the caller as desired. This doesn't matter for the normal case of one Synapse + instance running in the Python process (as we're only affecting ourselves), but is + important when we have multiple Synapse homeserver tenants running in the same + Python process (c.f. Synapse Pro for small hosts) as we don't want some problem from + one tenant stopping the rest of the tenants. Args: hs: The homeserver to setup. - freeze: whether to freeze the homeserver base objects in the garbage collector. - May improve garbage collection performance by marking objects with an effectively - static lifetime as frozen so they don't need to be considered for cleanup. - If you ever want to `shutdown` the homeserver, this needs to be - False otherwise the homeserver cannot be garbage collected after `shutdown`. - - Returns: - A homeserver instance. """ setup_logging(hs, hs.config, use_worker_options=False) @@ -416,22 +421,44 @@ def setup( hs.setup() - async def _start_when_reactor_running() -> None: - # TODO: Feels like this should be moved somewhere else. - # - # Load the OIDC provider metadatas, if OIDC is enabled. - if hs.config.oidc.oidc_enabled: - oidc = hs.get_oidc_handler() - # Loading the provider metadata also ensures the provider config is valid. - await oidc.load_metadata() - await _base.start(hs, freeze) +async def start( + hs: SynapseHomeServer, + *, + freeze: bool = True, +) -> None: + """ + Should be called once the reactor is running. + + Our composable functions (`create_homeserver`, `setup`, `start`) should not exit the + Python process (call `exit(...)`) and instead raise exceptions which can be handled + by the caller as desired. This doesn't matter for the normal case of one Synapse + instance running in the Python process (as we're only affecting ourselves), but is + important when we have multiple Synapse homeserver tenants running in the same + Python process (c.f. Synapse Pro for small hosts) as we don't want some problem from + one tenant stopping the rest of the tenants. - # TODO: Feels like this should be moved somewhere else. - hs.get_datastores().main.db_pool.updates.start_doing_background_updates() + Args: + hs: The homeserver to setup. + freeze: whether to freeze the homeserver base objects in the garbage collector. + May improve garbage collection performance by marking objects with an effectively + static lifetime as frozen so they don't need to be considered for cleanup. + If you ever want to `shutdown` the homeserver, this needs to be + False otherwise the homeserver cannot be garbage collected after `shutdown`. + """ + + # TODO: Feels like this should be moved somewhere else. + # + # Load the OIDC provider metadatas, if OIDC is enabled. + if hs.config.oidc.oidc_enabled: + oidc = hs.get_oidc_handler() + # Loading the provider metadata also ensures the provider config is valid. + await oidc.load_metadata() - # Register a callback to be invoked once the reactor is running - register_start(hs, _start_when_reactor_running) + await _base.start(hs, freeze=freeze) + + # TODO: Feels like this should be moved somewhere else. + hs.get_datastores().main.db_pool.updates.start_doing_background_updates() def start_reactor( @@ -457,18 +484,21 @@ def start_reactor( def main() -> None: homeserver_config = load_or_generate_config(sys.argv[1:]) + # Create a logging context as soon as possible so we can start associating + # everything with this homeserver. with LoggingContext(name="main", server_name=homeserver_config.server.server_name): - # check base requirements - check_requirements() + # redirect stdio to the logs, if configured. + if not homeserver_config.logging.no_redirect_stdio: + redirect_stdio_to_logs() + hs = create_homeserver(homeserver_config) try: setup(hs) except Exception as e: handle_startup_exception(e) - # redirect stdio to the logs, if configured. - if not hs.config.logging.no_redirect_stdio: - redirect_stdio_to_logs() + # Register a callback to be invoked once the reactor is running + register_start(hs, start, hs) start_reactor(homeserver_config) diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index 823e1908b5..d251d0ab64 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -19,16 +19,11 @@ # # -import sys - -from synapse.app.generic_worker import load_config, start -from synapse.util.logcontext import LoggingContext +from synapse.app.generic_worker import main as worker_main def main() -> None: - homeserver_config = load_config(sys.argv[1:]) - with LoggingContext(name="main", server_name=homeserver_config.server.server_name): - start(homeserver_config) + worker_main() if __name__ == "__main__": diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py index 823e1908b5..d251d0ab64 100644 --- a/synapse/app/pusher.py +++ b/synapse/app/pusher.py @@ -19,16 +19,11 @@ # # -import sys - -from synapse.app.generic_worker import load_config, start -from synapse.util.logcontext import LoggingContext +from synapse.app.generic_worker import main as worker_main def main() -> None: - homeserver_config = load_config(sys.argv[1:]) - with LoggingContext(name="main", server_name=homeserver_config.server.server_name): - start(homeserver_config) + worker_main() if __name__ == "__main__": diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py index 823e1908b5..d251d0ab64 100644 --- a/synapse/app/synchrotron.py +++ b/synapse/app/synchrotron.py @@ -19,16 +19,11 @@ # # -import sys - -from synapse.app.generic_worker import load_config, start -from synapse.util.logcontext import LoggingContext +from synapse.app.generic_worker import main as worker_main def main() -> None: - homeserver_config = load_config(sys.argv[1:]) - with LoggingContext(name="main", server_name=homeserver_config.server.server_name): - start(homeserver_config) + worker_main() if __name__ == "__main__": diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index f64d82e41f..2c47d9f4fd 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -19,16 +19,12 @@ # # -import sys -from synapse.app.generic_worker import load_config, start -from synapse.util.logcontext import LoggingContext +from synapse.app.generic_worker import main as worker_main def main() -> None: - homeserver_config = load_config(sys.argv[1:]) - with LoggingContext(name="main", server_name=homeserver_config.server.server_name): - start(homeserver_config) + worker_main() if __name__ == "__main__": From 2c5deb800ec2dba7fa1e8559811b698a72201807 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 19:18:16 +0000 Subject: [PATCH 50/72] Bump icu_segmenter from 2.0.0 to 2.0.1 (#19126) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a057c812af..3ff8f2c477 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -589,9 +589,9 @@ dependencies = [ [[package]] name = "icu_segmenter" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e185fc13b6401c138cf40db12b863b35f5edf31b88192a545857b41aeaf7d3d3" +checksum = "38e30e593cf9c3ca2f51aa312eb347cd1ba95715e91a842ec3fc9058eab2af4b" dependencies = [ "core_maths", "displaydoc", From 4f9dc3b6134f2073111e29d105ff1b4736b88ad8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 19:19:06 +0000 Subject: [PATCH 51/72] Bump psycopg2 from 2.9.10 to 2.9.11 (#19125) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/poetry.lock b/poetry.lock index 72f784110f..1de977f15c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1725,23 +1725,20 @@ twisted = ["twisted"] [[package]] name = "psycopg2" -version = "2.9.10" +version = "2.9.11" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] markers = "extra == \"all\" or extra == \"postgres\"" files = [ - {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"}, - {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"}, - {file = "psycopg2-2.9.10-cp311-cp311-win32.whl", hash = "sha256:47c4f9875125344f4c2b870e41b6aad585901318068acd01de93f3677a6522c2"}, - {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"}, - {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"}, - {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"}, - {file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"}, - {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"}, - {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"}, - {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"}, + {file = "psycopg2-2.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:103e857f46bb76908768ead4e2d0ba1d1a130e7b8ed77d3ae91e8b33481813e8"}, + {file = "psycopg2-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:210daed32e18f35e3140a1ebe059ac29209dd96468f2f7559aa59f75ee82a5cb"}, + {file = "psycopg2-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:e03e4a6dbe87ff81540b434f2e5dc2bddad10296db5eea7bdc995bf5f4162938"}, + {file = "psycopg2-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:8dc379166b5b7d5ea66dcebf433011dfc51a7bb8a5fc12367fa05668e5fc53c8"}, + {file = "psycopg2-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:f10a48acba5fe6e312b891f290b4d2ca595fc9a06850fe53320beac353575578"}, + {file = "psycopg2-2.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:6ecddcf573777536bddfefaea8079ce959287798c8f5804bee6933635d538924"}, + {file = "psycopg2-2.9.11.tar.gz", hash = "sha256:964d31caf728e217c697ff77ea69c2ba0865fa41ec20bb00f0977e62fdcc52e3"}, ] [[package]] From e02a6f5e5d354e0862b3e018c0ef69007647f8d3 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 3 Nov 2025 14:07:10 -0600 Subject: [PATCH 52/72] Fix lost logcontext on `HomeServer.shutdown()` (#19108) Same fix as https://github.com/element-hq/synapse/pull/19090 Spawning from working on clean tenant deprovisioning in the Synapse Pro for small hosts project (https://github.com/element-hq/synapse-small-hosts/pull/204). --- changelog.d/19108.bugfix | 1 + synapse/server.py | 4 +++- tests/app/test_homeserver_shutdown.py | 23 ++++++++++++++++++----- 3 files changed, 22 insertions(+), 6 deletions(-) create mode 100644 changelog.d/19108.bugfix diff --git a/changelog.d/19108.bugfix b/changelog.d/19108.bugfix new file mode 100644 index 0000000000..a2afe19f41 --- /dev/null +++ b/changelog.d/19108.bugfix @@ -0,0 +1 @@ +Fix lost logcontext when using `HomeServer.shutdown()`. diff --git a/synapse/server.py b/synapse/server.py index 2c252ce86f..766515c930 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -143,6 +143,7 @@ SimpleHttpClient, ) from synapse.http.matrixfederationclient import MatrixFederationHttpClient +from synapse.logging.context import PreserveLoggingContext from synapse.media.media_repository import MediaRepository from synapse.metrics import ( all_later_gauges_to_clean_up_on_shutdown, @@ -507,7 +508,8 @@ async def shutdown(self) -> None: for background_process in list(self._background_processes): try: - background_process.cancel() + with PreserveLoggingContext(): + background_process.cancel() except Exception: pass self._background_processes.clear() diff --git a/tests/app/test_homeserver_shutdown.py b/tests/app/test_homeserver_shutdown.py index d8119ba310..f127e5571d 100644 --- a/tests/app/test_homeserver_shutdown.py +++ b/tests/app/test_homeserver_shutdown.py @@ -22,6 +22,7 @@ import weakref from synapse.app.homeserver import SynapseHomeServer +from synapse.logging.context import LoggingContext from synapse.storage.background_updates import UpdaterStatus from tests.server import ( @@ -29,7 +30,7 @@ get_clock, setup_test_homeserver, ) -from tests.unittest import HomeserverTestCase +from tests.unittest import HomeserverTestCase, logcontext_clean class HomeserverCleanShutdownTestCase(HomeserverTestCase): @@ -44,6 +45,7 @@ def setUp(self) -> None: # closed in a timely manner during shutdown. Simulating this behaviour in a unit test # won't be as good as a proper integration test in complement. + @logcontext_clean def test_clean_homeserver_shutdown(self) -> None: """Ensure the `SynapseHomeServer` can be fully shutdown and garbage collected""" self.reactor, self.clock = get_clock() @@ -63,8 +65,13 @@ def test_clean_homeserver_shutdown(self) -> None: # we use in tests doesn't handle this properly (see doc comment) cleanup_test_reactor_system_event_triggers(self.reactor) - # Cleanup the homeserver. - self.get_success(self.hs.shutdown()) + async def shutdown() -> None: + # Use a logcontext just to double-check that we don't mangle the logcontext + # during shutdown. + with LoggingContext(name="hs_shutdown", server_name=self.hs.hostname): + await self.hs.shutdown() + + self.get_success(shutdown()) # Cleanup the internal reference in our test case del self.hs @@ -114,6 +121,7 @@ def test_clean_homeserver_shutdown(self) -> None: # # to generate the result. # objgraph.show_backrefs(synapse_hs, max_depth=10, too_many=10) + @logcontext_clean def test_clean_homeserver_shutdown_mid_background_updates(self) -> None: """Ensure the `SynapseHomeServer` can be fully shutdown and garbage collected before background updates have completed""" @@ -141,8 +149,13 @@ def test_clean_homeserver_shutdown_mid_background_updates(self) -> None: # Ensure the background updates are not complete. self.assertNotEqual(store.db_pool.updates.get_status(), UpdaterStatus.COMPLETE) - # Cleanup the homeserver. - self.get_success(self.hs.shutdown()) + async def shutdown() -> None: + # Use a logcontext just to double-check that we don't mangle the logcontext + # during shutdown. + with LoggingContext(name="hs_shutdown", server_name=self.hs.hostname): + await self.hs.shutdown() + + self.get_success(shutdown()) # Cleanup the internal reference in our test case del self.hs From 891acfd502b5abd60fffdc161d332244e2e1462d Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 3 Nov 2025 15:23:22 -0600 Subject: [PATCH 53/72] Move `oidc.load_metadata()` startup into `_base.start()` (#19056) Slightly related to ["clean-tenant provisioning"](https://github.com/element-hq/synapse-small-hosts/issues/221) as making startup cleaner, makes it more clear how to handle clean provisioning. --- changelog.d/19056.misc | 1 + synapse/app/_base.py | 10 ++++++++++ synapse/app/homeserver.py | 8 -------- 3 files changed, 11 insertions(+), 8 deletions(-) create mode 100644 changelog.d/19056.misc diff --git a/changelog.d/19056.misc b/changelog.d/19056.misc new file mode 100644 index 0000000000..f3a1b4e66e --- /dev/null +++ b/changelog.d/19056.misc @@ -0,0 +1 @@ +Move `oidc.load_metadata()` startup into `_base.start()`. diff --git a/synapse/app/_base.py b/synapse/app/_base.py index e5f4cfb0e6..2de5bdb51e 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -648,6 +648,16 @@ async def start(hs: "HomeServer", *, freeze: bool = True) -> None: # Apply the cache config. hs.config.caches.resize_all_caches() + # Load the OIDC provider metadatas, if OIDC is enabled. + if hs.config.oidc.oidc_enabled: + oidc = hs.get_oidc_handler() + # Loading the provider metadata also ensures the provider config is valid. + # + # FIXME: It feels a bit strange to validate and block on startup as one of these + # OIDC providers could be temporarily unavailable and cause Synapse to be unable + # to start. + await oidc.load_metadata() + # Load the certificate from disk. refresh_certificate(hs) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 3807a18ab7..fb937c63c1 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -447,14 +447,6 @@ async def start( False otherwise the homeserver cannot be garbage collected after `shutdown`. """ - # TODO: Feels like this should be moved somewhere else. - # - # Load the OIDC provider metadatas, if OIDC is enabled. - if hs.config.oidc.oidc_enabled: - oidc = hs.get_oidc_handler() - # Loading the provider metadata also ensures the provider config is valid. - await oidc.load_metadata() - await _base.start(hs, freeze=freeze) # TODO: Feels like this should be moved somewhere else. From db00925ae77f134872b5e7cb26a1cacb31281334 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 3 Nov 2025 16:16:23 -0600 Subject: [PATCH 54/72] Redirect `stdout`/`stderr` to logs after initialization (#19131) This regressed in https://github.com/element-hq/synapse/pull/19121. I moved things in https://github.com/element-hq/synapse/pull/19121 because I thought that it made sense to redirect anything printed to `stdout`/`stderr` to the logs as early as possible. But we actually want to log any immediately apparent problems during initialization to `stderr` in the terminal so that they are obvious and visible to the operator. Now, I've moved `redirect_stdio_to_logs()` back to where it was previously along with some proper comment context for why we have it there. --- changelog.d/19131.misc | 1 + synapse/app/admin_cmd.py | 1 + synapse/app/generic_worker.py | 13 +++++++++---- synapse/app/homeserver.py | 13 +++++++++---- 4 files changed, 20 insertions(+), 8 deletions(-) create mode 100644 changelog.d/19131.misc diff --git a/changelog.d/19131.misc b/changelog.d/19131.misc new file mode 100644 index 0000000000..cb1fb8f024 --- /dev/null +++ b/changelog.d/19131.misc @@ -0,0 +1 @@ +Refactor and align app entrypoints (avoid `exit(1)` in our composable functions). diff --git a/synapse/app/admin_cmd.py b/synapse/app/admin_cmd.py index dac603de88..193482b7fc 100644 --- a/synapse/app/admin_cmd.py +++ b/synapse/app/admin_cmd.py @@ -369,6 +369,7 @@ async def start(admin_command_server: AdminCmdServer, args: argparse.Namespace) def main() -> None: homeserver_config, args = load_config(sys.argv[1:]) with LoggingContext(name="main", server_name=homeserver_config.server.server_name): + # Initialize and setup the homeserver admin_command_server = create_homeserver(homeserver_config) setup(admin_command_server) diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index a1dde368d4..0a4abd1839 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -450,16 +450,21 @@ def main() -> None: # Create a logging context as soon as possible so we can start associating # everything with this homeserver. with LoggingContext(name="main", server_name=homeserver_config.server.server_name): - # redirect stdio to the logs, if configured. - if not homeserver_config.logging.no_redirect_stdio: - redirect_stdio_to_logs() - + # Initialize and setup the homeserver hs = create_homeserver(homeserver_config) try: setup(hs) except Exception as e: handle_startup_exception(e) + # For problems immediately apparent during initialization, we want to log to + # stderr in the terminal so that they are obvious and visible to the operator. + # + # Now that we're past the initialization stage, we can redirect anything printed + # to stdio to the logs, if configured. + if not homeserver_config.logging.no_redirect_stdio: + redirect_stdio_to_logs() + # Register a callback to be invoked once the reactor is running register_start(hs, start, hs) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index fb937c63c1..bd51aad9ab 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -479,16 +479,21 @@ def main() -> None: # Create a logging context as soon as possible so we can start associating # everything with this homeserver. with LoggingContext(name="main", server_name=homeserver_config.server.server_name): - # redirect stdio to the logs, if configured. - if not homeserver_config.logging.no_redirect_stdio: - redirect_stdio_to_logs() - + # Initialize and setup the homeserver hs = create_homeserver(homeserver_config) try: setup(hs) except Exception as e: handle_startup_exception(e) + # For problems immediately apparent during initialization, we want to log to + # stderr in the terminal so that they are obvious and visible to the operator. + # + # Now that we're past the initialization stage, we can redirect anything printed + # to stdio to the logs, if configured. + if not homeserver_config.logging.no_redirect_stdio: + redirect_stdio_to_logs() + # Register a callback to be invoked once the reactor is running register_start(hs, start, hs) From 08f570f5f5668a9f4f3fad4669da3a34e9704566 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Tue, 4 Nov 2025 13:32:49 +0100 Subject: [PATCH 55/72] Fix "There is no current event loop in thread" error in tests (#19134) --- changelog.d/19134.bugfix | 1 + tests/events/test_auto_accept_invites.py | 17 ++++------------- 2 files changed, 5 insertions(+), 13 deletions(-) create mode 100644 changelog.d/19134.bugfix diff --git a/changelog.d/19134.bugfix b/changelog.d/19134.bugfix new file mode 100644 index 0000000000..61e626cc9b --- /dev/null +++ b/changelog.d/19134.bugfix @@ -0,0 +1 @@ +Add support for Python 3.14. \ No newline at end of file diff --git a/tests/events/test_auto_accept_invites.py b/tests/events/test_auto_accept_invites.py index d3842e72d7..623ec67ed6 100644 --- a/tests/events/test_auto_accept_invites.py +++ b/tests/events/test_auto_accept_invites.py @@ -19,9 +19,8 @@ # # import asyncio -from asyncio import Future from http import HTTPStatus -from typing import Any, Awaitable, Optional, TypeVar, cast +from typing import Any, Optional, TypeVar, cast from unittest.mock import Mock import attr @@ -787,18 +786,10 @@ def membership(self) -> str: async def make_awaitable(value: T) -> T: - return value - - -def make_multiple_awaitable(result: TV) -> Awaitable[TV]: """ - Makes an awaitable, suitable for mocking an `async` function. - This uses Futures as they can be awaited multiple times so can be returned - to multiple callers. + Makes a fresh awaitable, suitable for mocking an `async` function. """ - future: Future[TV] = Future() - future.set_result(result) - return future + return value def create_module( @@ -809,7 +800,7 @@ def create_module( module_api = Mock(spec=ModuleApi) module_api.is_mine.side_effect = lambda a: a.split(":")[1] == "test" module_api.worker_name = worker_name - module_api.sleep.return_value = make_multiple_awaitable(None) + module_api.sleep.return_value = lambda *_args, **_kwargs: make_awaitable(None) module_api.get_userinfo_by_id.return_value = UserInfo( user_id=UserID.from_string("@user:test"), is_admin=False, From 5408101d21a08c42359737643a6cdab5021c1eb4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 4 Nov 2025 12:44:57 +0000 Subject: [PATCH 56/72] Speed up pruning of ratelimiter (#19129) I noticed this in some profiling. Basically, we prune the ratelimiters by copying and iterating over every entry every 60 seconds. Instead, let's use a wheel timer to track when we should potentially prune a given key, and then we a) check fewer keys, and b) can run more frequently. Hopefully this should mean we don't have a large pause everytime we prune a ratelimiter with lots of keys. Also fixes a bug where we didn't prune entries that were added via `record_action` and never subsequently updated. This affected the media and joins-per-room ratelimiter. --- changelog.d/19129.misc | 1 + synapse/api/ratelimiting.py | 71 ++++++++++++++++++---- tests/api/test_ratelimiting.py | 15 +++++ tests/federation/test_federation_server.py | 4 +- tests/handlers/test_room_member.py | 8 +-- 5 files changed, 80 insertions(+), 19 deletions(-) create mode 100644 changelog.d/19129.misc diff --git a/changelog.d/19129.misc b/changelog.d/19129.misc new file mode 100644 index 0000000000..117dbfadea --- /dev/null +++ b/changelog.d/19129.misc @@ -0,0 +1 @@ +Speed up pruning of ratelimiters. diff --git a/synapse/api/ratelimiting.py b/synapse/api/ratelimiting.py index 1a43bdff23..ee0e9181ce 100644 --- a/synapse/api/ratelimiting.py +++ b/synapse/api/ratelimiting.py @@ -27,6 +27,7 @@ from synapse.storage.databases.main import DataStore from synapse.types import Requester from synapse.util.clock import Clock +from synapse.util.wheel_timer import WheelTimer if TYPE_CHECKING: # To avoid circular imports: @@ -92,9 +93,14 @@ def __init__( # * The number of tokens currently in the bucket, # * The time point when the bucket was last completely empty, and # * The rate_hz (leak rate) of this particular bucket. - self.actions: dict[Hashable, tuple[float, float, float]] = {} + self.actions: dict[Hashable, tuple[int, float, float]] = {} - self.clock.looping_call(self._prune_message_counts, 60 * 1000) + # Records when actions should potentially be pruned. Note that we don't + # need to be accurate here, as this is just a cleanup job of `actions` + # and doesn't affect correctness. + self._timer: WheelTimer[Hashable] = WheelTimer() + + self.clock.looping_call(self._prune_message_counts, 15 * 1000) def _get_key( self, requester: Optional[Requester], key: Optional[Hashable] @@ -109,9 +115,9 @@ def _get_key( def _get_action_counts( self, key: Hashable, time_now_s: float - ) -> tuple[float, float, float]: + ) -> tuple[int, float, float]: """Retrieve the action counts, with a fallback representing an empty bucket.""" - return self.actions.get(key, (0.0, time_now_s, 0.0)) + return self.actions.get(key, (0, time_now_s, self.rate_hz)) async def can_do_action( self, @@ -217,8 +223,11 @@ async def can_do_action( allowed = True action_count = action_count + n_actions - if update: - self.actions[key] = (action_count, time_start, rate_hz) + # Only record the action if we're allowed to perform it. + if allowed and update: + self._record_action_inner( + key, action_count, time_start, rate_hz, time_now_s + ) if rate_hz > 0: # Find out when the count of existing actions expires @@ -264,7 +273,37 @@ def record_action( key = self._get_key(requester, key) time_now_s = _time_now_s if _time_now_s is not None else self.clock.time() action_count, time_start, rate_hz = self._get_action_counts(key, time_now_s) - self.actions[key] = (action_count + n_actions, time_start, rate_hz) + self._record_action_inner( + key, action_count + n_actions, time_start, rate_hz, time_now_s + ) + + def _record_action_inner( + self, + key: Hashable, + action_count: int, + time_start: float, + rate_hz: float, + time_now_s: float, + ) -> None: + """Helper to atomically update the action count for a given key.""" + prune_time_s = time_start + action_count / rate_hz + + # If the prune time is in the past, we can just remove the entry rather + # than inserting and immediately pruning. + if prune_time_s <= time_now_s: + self.actions.pop(key, None) + return + + self.actions[key] = (action_count, time_start, rate_hz) + + # We need to make sure that we only call prune *after* the entry + # expires, otherwise the scheduled prune may not actually prune it. This + # is just a cleanup job, so it doesn't matter if entries aren't pruned + # immediately after they expire. Hence we schedule the prune a little + # after the entry is due to expire. + prune_time_s += 0.1 + + self._timer.insert(int(time_now_s * 1000), key, int(prune_time_s * 1000)) def _prune_message_counts(self) -> None: """Remove message count entries that have not exceeded their defined @@ -272,18 +311,24 @@ def _prune_message_counts(self) -> None: """ time_now_s = self.clock.time() - # We create a copy of the key list here as the dictionary is modified during - # the loop - for key in list(self.actions.keys()): - action_count, time_start, rate_hz = self.actions[key] + # Pull out all the keys that *might* need pruning. We still need to + # verify they haven't since been updated. + to_prune = self._timer.fetch(int(time_now_s * 1000)) + + for key in to_prune: + value = self.actions.get(key) + if value is None: + continue + + action_count, time_start, rate_hz = value # Rate limit = "seconds since we started limiting this action" * rate_hz # If this limit has not been exceeded, wipe our record of this action time_delta = time_now_s - time_start if action_count - time_delta * rate_hz > 0: continue - else: - del self.actions[key] + + del self.actions[key] async def ratelimit( self, diff --git a/tests/api/test_ratelimiting.py b/tests/api/test_ratelimiting.py index 2e45d4e4d2..34369a8746 100644 --- a/tests/api/test_ratelimiting.py +++ b/tests/api/test_ratelimiting.py @@ -228,6 +228,21 @@ def test_pruning(self) -> None: self.assertNotIn("test_id_1", limiter.actions) + def test_pruning_record_action(self) -> None: + """Test that entries added by record_action also get pruned.""" + limiter = Ratelimiter( + store=self.hs.get_datastores().main, + clock=self.clock, + cfg=RatelimitSettings(key="", per_second=0.1, burst_count=1), + ) + limiter.record_action(None, key="test_id_1", n_actions=1, _time_now_s=0) + + self.assertIn("test_id_1", limiter.actions) + + self.reactor.advance(60) + + self.assertNotIn("test_id_1", limiter.actions) + def test_db_user_override(self) -> None: """Test that users that have ratelimiting disabled in the DB aren't ratelimited. diff --git a/tests/federation/test_federation_server.py b/tests/federation/test_federation_server.py index 509f1f1e82..b1371d0ac7 100644 --- a/tests/federation/test_federation_server.py +++ b/tests/federation/test_federation_server.py @@ -462,7 +462,7 @@ def test_send_join_partial_state(self) -> None: ) self.assertEqual(r[("m.room.member", joining_user)].membership, "join") - @override_config({"rc_joins_per_room": {"per_second": 0, "burst_count": 3}}) + @override_config({"rc_joins_per_room": {"per_second": 0.1, "burst_count": 3}}) def test_make_join_respects_room_join_rate_limit(self) -> None: # In the test setup, two users join the room. Since the rate limiter burst # count is 3, a new make_join request to the room should be accepted. @@ -484,7 +484,7 @@ def test_make_join_respects_room_join_rate_limit(self) -> None: ) self.assertEqual(channel.code, HTTPStatus.TOO_MANY_REQUESTS, channel.json_body) - @override_config({"rc_joins_per_room": {"per_second": 0, "burst_count": 3}}) + @override_config({"rc_joins_per_room": {"per_second": 0.1, "burst_count": 3}}) def test_send_join_contributes_to_room_join_rate_limit_and_is_limited(self) -> None: # Make two make_join requests up front. (These are rate limited, but do not # contribute to the rate limit.) diff --git a/tests/handlers/test_room_member.py b/tests/handlers/test_room_member.py index 92c7c36602..8f9e27603e 100644 --- a/tests/handlers/test_room_member.py +++ b/tests/handlers/test_room_member.py @@ -50,7 +50,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.intially_unjoined_room_id = f"!example:{self.OTHER_SERVER_NAME}" - @override_config({"rc_joins_per_room": {"per_second": 0, "burst_count": 2}}) + @override_config({"rc_joins_per_room": {"per_second": 0.1, "burst_count": 2}}) def test_local_user_local_joins_contribute_to_limit_and_are_limited(self) -> None: # The rate limiter has accumulated one token from Alice's join after the create # event. @@ -76,7 +76,7 @@ def test_local_user_local_joins_contribute_to_limit_and_are_limited(self) -> Non by=0.5, ) - @override_config({"rc_joins_per_room": {"per_second": 0, "burst_count": 2}}) + @override_config({"rc_joins_per_room": {"per_second": 0.1, "burst_count": 2}}) def test_local_user_profile_edits_dont_contribute_to_limit(self) -> None: # The rate limiter has accumulated one token from Alice's join after the create # event. Alice should still be able to change her displayname. @@ -100,7 +100,7 @@ def test_local_user_profile_edits_dont_contribute_to_limit(self) -> None: ) ) - @override_config({"rc_joins_per_room": {"per_second": 0, "burst_count": 1}}) + @override_config({"rc_joins_per_room": {"per_second": 0.1, "burst_count": 1}}) def test_remote_joins_contribute_to_rate_limit(self) -> None: # Join once, to fill the rate limiter bucket. # @@ -248,7 +248,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.room_id = self.helper.create_room_as(self.alice, tok=self.alice_token) self.intially_unjoined_room_id = "!example:otherhs" - @override_config({"rc_joins_per_room": {"per_second": 0, "burst_count": 2}}) + @override_config({"rc_joins_per_room": {"per_second": 0.01, "burst_count": 2}}) def test_local_users_joining_on_another_worker_contribute_to_rate_limit( self, ) -> None: From 2760d153488109c0c694a2dc5151ef7c260c9b36 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 4 Nov 2025 13:34:46 +0000 Subject: [PATCH 57/72] 1.142.0rc1 --- CHANGES.md | 97 +++++++++++++++++++++++++++++++ changelog.d/19020.misc | 1 - changelog.d/19021.feature | 2 - changelog.d/19046.misc | 1 - changelog.d/19047.doc | 1 - changelog.d/19047.misc | 1 - changelog.d/19047.removal | 1 - changelog.d/19055.misc | 1 - changelog.d/19056.misc | 1 - changelog.d/19058.misc | 1 - changelog.d/19062.bugfix | 1 - changelog.d/19067.misc | 1 - changelog.d/19068.misc | 1 - changelog.d/19071.misc | 1 - changelog.d/19073.doc | 1 - changelog.d/19079.bugfix | 1 - changelog.d/19080.misc | 1 - changelog.d/19081.misc | 1 - changelog.d/19085.misc | 1 - changelog.d/19088.misc | 1 - changelog.d/19089.misc | 1 - changelog.d/19090.bugfix | 1 - changelog.d/19092.misc | 1 - changelog.d/19094.misc | 1 - changelog.d/19095.misc | 1 - changelog.d/19096.misc | 1 - changelog.d/19098.misc | 1 - changelog.d/19099.removal | 1 - changelog.d/19100.doc | 1 - changelog.d/19107.misc | 1 - changelog.d/19108.bugfix | 1 - changelog.d/19109.doc | 1 - changelog.d/19110.misc | 1 - changelog.d/19116.misc | 1 - changelog.d/19118.misc | 1 - changelog.d/19121.misc | 1 - changelog.d/19129.misc | 1 - changelog.d/19131.misc | 1 - changelog.d/19134.bugfix | 1 - debian/changelog | 6 ++ pyproject.toml | 2 +- schema/synapse-config.schema.yaml | 2 +- 42 files changed, 105 insertions(+), 41 deletions(-) delete mode 100644 changelog.d/19020.misc delete mode 100644 changelog.d/19021.feature delete mode 100644 changelog.d/19046.misc delete mode 100644 changelog.d/19047.doc delete mode 100644 changelog.d/19047.misc delete mode 100644 changelog.d/19047.removal delete mode 100644 changelog.d/19055.misc delete mode 100644 changelog.d/19056.misc delete mode 100644 changelog.d/19058.misc delete mode 100644 changelog.d/19062.bugfix delete mode 100644 changelog.d/19067.misc delete mode 100644 changelog.d/19068.misc delete mode 100644 changelog.d/19071.misc delete mode 100644 changelog.d/19073.doc delete mode 100644 changelog.d/19079.bugfix delete mode 100644 changelog.d/19080.misc delete mode 100644 changelog.d/19081.misc delete mode 100644 changelog.d/19085.misc delete mode 100644 changelog.d/19088.misc delete mode 100644 changelog.d/19089.misc delete mode 100644 changelog.d/19090.bugfix delete mode 100644 changelog.d/19092.misc delete mode 100644 changelog.d/19094.misc delete mode 100644 changelog.d/19095.misc delete mode 100644 changelog.d/19096.misc delete mode 100644 changelog.d/19098.misc delete mode 100644 changelog.d/19099.removal delete mode 100644 changelog.d/19100.doc delete mode 100644 changelog.d/19107.misc delete mode 100644 changelog.d/19108.bugfix delete mode 100644 changelog.d/19109.doc delete mode 100644 changelog.d/19110.misc delete mode 100644 changelog.d/19116.misc delete mode 100644 changelog.d/19118.misc delete mode 100644 changelog.d/19121.misc delete mode 100644 changelog.d/19129.misc delete mode 100644 changelog.d/19131.misc delete mode 100644 changelog.d/19134.bugfix diff --git a/CHANGES.md b/CHANGES.md index eead7e35cd..2578bcdbc3 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,100 @@ +# Synapse 1.142.0rc1 (2025-11-04) + +## Dropped support for Python 3.9 + +This release drops support for Python 3.9, in line with our [dependency +deprecation +policy](https://element-hq.github.io/synapse/latest/deprecation_policy.html#platform-dependencies), +as it is now [end of life](https://endoflife.date/python). + + +## Deprecation of MacOS Python wheels + +The team has decided to deprecate and eventually stop publishing python wheels +for MacOS. This is a burden on the team, and we're not aware of any parties +that use them. Synapse docker images will continue to work on MacOS, as will +building Synapse from source (though note this requires a Rust compiler). + +At present, publishing MacOS Python wheels will continue for the next release +(1.143.0), but will not be available after that (1.144.0+). If you do make use +of these wheels downstream, please reach out to us in +[#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org). We'd +love to hear from you! + +## Features + +- Add support for Python 3.14. ([\#19055](https://github.com/element-hq/synapse/issues/19055), [\#19134](https://github.com/element-hq/synapse/issues/19134)) +- Add an [Admin API](https://element-hq.github.io/synapse/latest/usage/administration/admin_api/index.html) + to allow an admin to fetch the space/room hierarchy for a given space. ([\#19021](https://github.com/element-hq/synapse/issues/19021)) + +## Bugfixes + +- Fix a bug introduced in 1.111.0 where failed attempts to download authenticated remote media would not be handled correctly. ([\#19062](https://github.com/element-hq/synapse/issues/19062)) +- Update the `oidc_session_no_samesite` cookie to have the `Secure` attribute, so the only difference between it and the paired `oidc_session` cookie, is the configuration of the `SameSite` attribute as described in the comments / cookie names. Contributed by @kieranlane. ([\#19079](https://github.com/element-hq/synapse/issues/19079)) +- Fix a bug introduced in 1.140.0 where lost logcontext warnings would be emitted from timeouts in sync and requests made by Synapse itself. ([\#19090](https://github.com/element-hq/synapse/issues/19090)) +- Fix a bug introdued in 1.140.0 where lost logcontext warning were emitted when using `HomeServer.shutdown()`. ([\#19108](https://github.com/element-hq/synapse/issues/19108)) + +## Improved Documentation + +- Update the link to the Debian oldstable package for SQLite. ([\#19047](https://github.com/element-hq/synapse/issues/19047)) +- Point out additional Redis configuration options available in the worker docs. Contributed by @servisbryce. ([\#19073](https://github.com/element-hq/synapse/issues/19073)) +- Update the list of Debian releases that the downstream Debian package is maintained for. ([\#19100](https://github.com/element-hq/synapse/issues/19100)) +- Add [a page](https://element-hq.github.io/synapse/latest/development/internal_documentation/release_notes_review_checklist.html) to the documentation describing the steps the Synapse team takes to review the release notes before publishing them. ([\#19109](https://github.com/element-hq/synapse/issues/19109)) + +## Deprecations and Removals + +- Drop support for Python 3.9. ([\#19099](https://github.com/element-hq/synapse/issues/19099)) +- Remove support for SQLite < 3.37.2. ([\#19047](https://github.com/element-hq/synapse/issues/19047)) + +## Internal Changes + +- Fix CI linter for schema delta files to correctly handle all types of `CREATE TABLE` syntax. ([\#19020](https://github.com/element-hq/synapse/issues/19020)) +- Use type hinting generics in standard collections, as per [PEP 585](https://peps.python.org/pep-0585/), added in Python 3.9. ([\#19046](https://github.com/element-hq/synapse/issues/19046)) +- Always treat `RETURNING` as supported by SQL engines, now that the minimum-supported versions of both SQLite and PostgreSQL support it. ([\#19047](https://github.com/element-hq/synapse/issues/19047)) +- Move `oidc.load_metadata()` startup into `_base.start()`. ([\#19056](https://github.com/element-hq/synapse/issues/19056)) +- Remove logcontext problems caused by awaiting raw `deferLater(...)`. ([\#19058](https://github.com/element-hq/synapse/issues/19058)) +- Prevent duplicate logging setup when running multiple Synapse instances. ([\#19067](https://github.com/element-hq/synapse/issues/19067)) +- Be mindful of other logging context filters in 3rd-party code and avoid overwriting log record fields unless we know the log record is relevant to Synapse. ([\#19068](https://github.com/element-hq/synapse/issues/19068)) +- Update pydantic to v2. ([\#19071](https://github.com/element-hq/synapse/issues/19071)) +- Update deprecated code in the release script to prevent a warning message from being printed. ([\#19080](https://github.com/element-hq/synapse/issues/19080)) +- Update the deprecated poetry development dependencies group name in `pyproject.toml`. ([\#19081](https://github.com/element-hq/synapse/issues/19081)) +- Remove `pp38*` skip selector from cibuildwheel to silence warning. ([\#19085](https://github.com/element-hq/synapse/issues/19085)) +- Don't immediately exit the release script if the checkout is dirty. Instead, allow the user to clear the dirty changes and retry. ([\#19088](https://github.com/element-hq/synapse/issues/19088)) +- Update the release script's generated announcement text to include a title and extra text for RC's. ([\#19089](https://github.com/element-hq/synapse/issues/19089)) +- Fix lints on main branch. ([\#19092](https://github.com/element-hq/synapse/issues/19092)) +- Use cheaper random string function in logcontext utilities. ([\#19094](https://github.com/element-hq/synapse/issues/19094)) +- Avoid clobbering other `SIGHUP` handlers in 3rd-party code. ([\#19095](https://github.com/element-hq/synapse/issues/19095)) +- Prevent duplicate GitHub draft releases being created during the Synapse release process. ([\#19096](https://github.com/element-hq/synapse/issues/19096)) +- Use Pillow's `Image.getexif` method instead of the experimental `Image._getexif`. ([\#19098](https://github.com/element-hq/synapse/issues/19098)) +- Prevent uv `/usr/local/.lock` file from appearing in built Synapse docker images. ([\#19107](https://github.com/element-hq/synapse/issues/19107)) +- Allow Synapse's runtime dependency checking code to take packaging markers (i.e. `python <= 3.14`) into account when checking dependencies. ([\#19110](https://github.com/element-hq/synapse/issues/19110)) +- Move exception handling up the stack (avoid `exit(1)` in our composable functions). ([\#19116](https://github.com/element-hq/synapse/issues/19116)) +- Fix a lint error related to lifetimes in Rust 1.90. ([\#19118](https://github.com/element-hq/synapse/issues/19118)) +- Refactor and align app entrypoints (avoid `exit(1)` in our composable functions). ([\#19121](https://github.com/element-hq/synapse/issues/19121), [\#19131](https://github.com/element-hq/synapse/issues/19131)) +- Speed up pruning of ratelimiters. ([\#19129](https://github.com/element-hq/synapse/issues/19129)) + + + +### Updates to locked dependencies + +* Bump actions/download-artifact from 5.0.0 to 6.0.0. ([\#19102](https://github.com/element-hq/synapse/issues/19102)) +* Bump actions/upload-artifact from 4 to 5. ([\#19106](https://github.com/element-hq/synapse/issues/19106)) +* Bump hiredis from 3.2.1 to 3.3.0. ([\#19103](https://github.com/element-hq/synapse/issues/19103)) +* Bump icu_segmenter from 2.0.0 to 2.0.1. ([\#19126](https://github.com/element-hq/synapse/issues/19126)) +* Bump idna from 3.10 to 3.11. ([\#19053](https://github.com/element-hq/synapse/issues/19053)) +* Bump ijson from 3.4.0 to 3.4.0.post0. ([\#19051](https://github.com/element-hq/synapse/issues/19051)) +* Bump markdown-it-py from 3.0.0 to 4.0.0. ([\#19123](https://github.com/element-hq/synapse/issues/19123)) +* Bump msgpack from 1.1.1 to 1.1.2. ([\#19050](https://github.com/element-hq/synapse/issues/19050)) +* Bump psycopg2 from 2.9.10 to 2.9.11. ([\#19125](https://github.com/element-hq/synapse/issues/19125)) +* Bump pyyaml from 6.0.2 to 6.0.3. ([\#19105](https://github.com/element-hq/synapse/issues/19105)) +* Bump regex from 1.11.3 to 1.12.2. ([\#19074](https://github.com/element-hq/synapse/issues/19074)) +* Bump reqwest from 0.12.23 to 0.12.24. ([\#19077](https://github.com/element-hq/synapse/issues/19077)) +* Bump ruff from 0.12.10 to 0.14.3. ([\#19124](https://github.com/element-hq/synapse/issues/19124)) +* Bump sigstore/cosign-installer from 3.10.0 to 4.0.0. ([\#19075](https://github.com/element-hq/synapse/issues/19075)) +* Bump stefanzweifel/git-auto-commit-action from 6.0.1 to 7.0.0. ([\#19052](https://github.com/element-hq/synapse/issues/19052)) +* Bump tokio from 1.47.1 to 1.48.0. ([\#19076](https://github.com/element-hq/synapse/issues/19076)) +* Bump types-psycopg2 from 2.9.21.20250915 to 2.9.21.20251012. ([\#19054](https://github.com/element-hq/synapse/issues/19054)) + # Synapse 1.141.0 (2025-10-29) ## Deprecation of MacOS Python wheels diff --git a/changelog.d/19020.misc b/changelog.d/19020.misc deleted file mode 100644 index f5775ff194..0000000000 --- a/changelog.d/19020.misc +++ /dev/null @@ -1 +0,0 @@ -Fix CI linter for schema delta files to correctly handle all types of `CREATE TABLE` syntax. diff --git a/changelog.d/19021.feature b/changelog.d/19021.feature deleted file mode 100644 index dea4748769..0000000000 --- a/changelog.d/19021.feature +++ /dev/null @@ -1,2 +0,0 @@ -Add an [Admin API](https://element-hq.github.io/synapse/latest/usage/administration/admin_api/index.html) -to allow an admin to fetch the space/room hierarchy for a given space. \ No newline at end of file diff --git a/changelog.d/19046.misc b/changelog.d/19046.misc deleted file mode 100644 index 4013804f7f..0000000000 --- a/changelog.d/19046.misc +++ /dev/null @@ -1 +0,0 @@ -Use type hinting generics in standard collections, as per PEP 585, added in Python 3.9. diff --git a/changelog.d/19047.doc b/changelog.d/19047.doc deleted file mode 100644 index fee241f2a5..0000000000 --- a/changelog.d/19047.doc +++ /dev/null @@ -1 +0,0 @@ -Update the link to the Debian oldstable package for SQLite. diff --git a/changelog.d/19047.misc b/changelog.d/19047.misc deleted file mode 100644 index 47f686a158..0000000000 --- a/changelog.d/19047.misc +++ /dev/null @@ -1 +0,0 @@ -Always treat `RETURNING` as supported by SQL engines, now that the minimum-supported versions of both SQLite and PostgreSQL support it. diff --git a/changelog.d/19047.removal b/changelog.d/19047.removal deleted file mode 100644 index da7a161868..0000000000 --- a/changelog.d/19047.removal +++ /dev/null @@ -1 +0,0 @@ -Remove support for SQLite < 3.37.2. diff --git a/changelog.d/19055.misc b/changelog.d/19055.misc deleted file mode 100644 index 61e626cc9b..0000000000 --- a/changelog.d/19055.misc +++ /dev/null @@ -1 +0,0 @@ -Add support for Python 3.14. \ No newline at end of file diff --git a/changelog.d/19056.misc b/changelog.d/19056.misc deleted file mode 100644 index f3a1b4e66e..0000000000 --- a/changelog.d/19056.misc +++ /dev/null @@ -1 +0,0 @@ -Move `oidc.load_metadata()` startup into `_base.start()`. diff --git a/changelog.d/19058.misc b/changelog.d/19058.misc deleted file mode 100644 index 15bc4b39bd..0000000000 --- a/changelog.d/19058.misc +++ /dev/null @@ -1 +0,0 @@ -Remove logcontext problems caused by awaiting raw `deferLater(...)`. diff --git a/changelog.d/19062.bugfix b/changelog.d/19062.bugfix deleted file mode 100644 index c5231cbbc8..0000000000 --- a/changelog.d/19062.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug introduced in 1.111.0 where failed attempts to download authenticated remote media would not be handled correctly. \ No newline at end of file diff --git a/changelog.d/19067.misc b/changelog.d/19067.misc deleted file mode 100644 index 560fbfc668..0000000000 --- a/changelog.d/19067.misc +++ /dev/null @@ -1 +0,0 @@ -Prevent duplicate logging setup when running multiple Synapse instances. diff --git a/changelog.d/19068.misc b/changelog.d/19068.misc deleted file mode 100644 index 9e5c34b608..0000000000 --- a/changelog.d/19068.misc +++ /dev/null @@ -1 +0,0 @@ -Be mindful of other logging context filters in 3rd-party code and avoid overwriting log record fields unless we know the log record is relevant to Synapse. diff --git a/changelog.d/19071.misc b/changelog.d/19071.misc deleted file mode 100644 index d0930f339b..0000000000 --- a/changelog.d/19071.misc +++ /dev/null @@ -1 +0,0 @@ -Update pydantic to v2. \ No newline at end of file diff --git a/changelog.d/19073.doc b/changelog.d/19073.doc deleted file mode 100644 index 6bbaaba99e..0000000000 --- a/changelog.d/19073.doc +++ /dev/null @@ -1 +0,0 @@ -Point out additional Redis configuration options available in the worker docs. Contributed by @servisbryce. diff --git a/changelog.d/19079.bugfix b/changelog.d/19079.bugfix deleted file mode 100644 index a7d9800d1d..0000000000 --- a/changelog.d/19079.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix the `oidc_session_no_samesite` cookie to have the `Secure` attribute, so the only difference between it and the paired `oidc_session` cookie, is the configuration of the `SameSite` attribute as described in the comments / cookie names. Contributed by @kieranlane. \ No newline at end of file diff --git a/changelog.d/19080.misc b/changelog.d/19080.misc deleted file mode 100644 index c738be3fe9..0000000000 --- a/changelog.d/19080.misc +++ /dev/null @@ -1 +0,0 @@ -Update deprecated code in the release script to prevent a warning message from being printed. \ No newline at end of file diff --git a/changelog.d/19081.misc b/changelog.d/19081.misc deleted file mode 100644 index 8518840fb6..0000000000 --- a/changelog.d/19081.misc +++ /dev/null @@ -1 +0,0 @@ -Update the deprecated poetry development dependencies group name in `pyproject.toml`. \ No newline at end of file diff --git a/changelog.d/19085.misc b/changelog.d/19085.misc deleted file mode 100644 index d48fad9d5d..0000000000 --- a/changelog.d/19085.misc +++ /dev/null @@ -1 +0,0 @@ -Remove `pp38*` skip selector from cibuildwheel to silence warning. \ No newline at end of file diff --git a/changelog.d/19088.misc b/changelog.d/19088.misc deleted file mode 100644 index 3224b3697d..0000000000 --- a/changelog.d/19088.misc +++ /dev/null @@ -1 +0,0 @@ -Don't immediately exit the release script if the checkout is dirty. Instead, allow the user to clear the dirty changes and retry. \ No newline at end of file diff --git a/changelog.d/19089.misc b/changelog.d/19089.misc deleted file mode 100644 index 81c8775fd0..0000000000 --- a/changelog.d/19089.misc +++ /dev/null @@ -1 +0,0 @@ -Update the release script's generated announcement text to include a title and extra text for RC's. \ No newline at end of file diff --git a/changelog.d/19090.bugfix b/changelog.d/19090.bugfix deleted file mode 100644 index 077dafcbf8..0000000000 --- a/changelog.d/19090.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix lost logcontext warnings from timeouts in sync and requests made by Synapse itself. diff --git a/changelog.d/19092.misc b/changelog.d/19092.misc deleted file mode 100644 index c5060c1c8b..0000000000 --- a/changelog.d/19092.misc +++ /dev/null @@ -1 +0,0 @@ -Fix lints on main branch. diff --git a/changelog.d/19094.misc b/changelog.d/19094.misc deleted file mode 100644 index 0d38d17483..0000000000 --- a/changelog.d/19094.misc +++ /dev/null @@ -1 +0,0 @@ -Use cheaper random string function in logcontext utilities. diff --git a/changelog.d/19095.misc b/changelog.d/19095.misc deleted file mode 100644 index c9949c9cb5..0000000000 --- a/changelog.d/19095.misc +++ /dev/null @@ -1 +0,0 @@ -Avoid clobbering other `SIGHUP` handlers in 3rd-party code. diff --git a/changelog.d/19096.misc b/changelog.d/19096.misc deleted file mode 100644 index 0b7bdf0967..0000000000 --- a/changelog.d/19096.misc +++ /dev/null @@ -1 +0,0 @@ -Prevent duplicate GitHub draft releases being created during the Synapse release process. \ No newline at end of file diff --git a/changelog.d/19098.misc b/changelog.d/19098.misc deleted file mode 100644 index a6933348a3..0000000000 --- a/changelog.d/19098.misc +++ /dev/null @@ -1 +0,0 @@ -Use Pillow's `Image.getexif` method instead of the experimental `Image._getexif`. diff --git a/changelog.d/19099.removal b/changelog.d/19099.removal deleted file mode 100644 index 8279a1c7f9..0000000000 --- a/changelog.d/19099.removal +++ /dev/null @@ -1 +0,0 @@ -Drop support for Python 3.9. diff --git a/changelog.d/19100.doc b/changelog.d/19100.doc deleted file mode 100644 index a723f34c4f..0000000000 --- a/changelog.d/19100.doc +++ /dev/null @@ -1 +0,0 @@ -Update the list of Debian releases that the downstream Debian package is maintained for. diff --git a/changelog.d/19107.misc b/changelog.d/19107.misc deleted file mode 100644 index 38cb9a9b3b..0000000000 --- a/changelog.d/19107.misc +++ /dev/null @@ -1 +0,0 @@ -Prevent uv `/usr/local/.lock` file from appearing in built Synapse docker images. \ No newline at end of file diff --git a/changelog.d/19108.bugfix b/changelog.d/19108.bugfix deleted file mode 100644 index a2afe19f41..0000000000 --- a/changelog.d/19108.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix lost logcontext when using `HomeServer.shutdown()`. diff --git a/changelog.d/19109.doc b/changelog.d/19109.doc deleted file mode 100644 index 4cce54b486..0000000000 --- a/changelog.d/19109.doc +++ /dev/null @@ -1 +0,0 @@ -Add [a page](https://element-hq.github.io/synapse/latest/development/internal_documentation/release_notes_review_checklist.html) to the documentation describing the steps the Synapse team takes to review the release notes before publishing them. \ No newline at end of file diff --git a/changelog.d/19110.misc b/changelog.d/19110.misc deleted file mode 100644 index dc45eef17c..0000000000 --- a/changelog.d/19110.misc +++ /dev/null @@ -1 +0,0 @@ -Allow Synapse's runtime dependency checking code to take packaging markers (i.e. `python <= 3.14`) into account when checking dependencies. \ No newline at end of file diff --git a/changelog.d/19116.misc b/changelog.d/19116.misc deleted file mode 100644 index 2291d0781a..0000000000 --- a/changelog.d/19116.misc +++ /dev/null @@ -1 +0,0 @@ -Move exception handling up the stack (avoid `exit(1)` in our composable functions). diff --git a/changelog.d/19118.misc b/changelog.d/19118.misc deleted file mode 100644 index 672ed45573..0000000000 --- a/changelog.d/19118.misc +++ /dev/null @@ -1 +0,0 @@ -Fix a lint error related to lifetimes in Rust 1.90. \ No newline at end of file diff --git a/changelog.d/19121.misc b/changelog.d/19121.misc deleted file mode 100644 index cb1fb8f024..0000000000 --- a/changelog.d/19121.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor and align app entrypoints (avoid `exit(1)` in our composable functions). diff --git a/changelog.d/19129.misc b/changelog.d/19129.misc deleted file mode 100644 index 117dbfadea..0000000000 --- a/changelog.d/19129.misc +++ /dev/null @@ -1 +0,0 @@ -Speed up pruning of ratelimiters. diff --git a/changelog.d/19131.misc b/changelog.d/19131.misc deleted file mode 100644 index cb1fb8f024..0000000000 --- a/changelog.d/19131.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor and align app entrypoints (avoid `exit(1)` in our composable functions). diff --git a/changelog.d/19134.bugfix b/changelog.d/19134.bugfix deleted file mode 100644 index 61e626cc9b..0000000000 --- a/changelog.d/19134.bugfix +++ /dev/null @@ -1 +0,0 @@ -Add support for Python 3.14. \ No newline at end of file diff --git a/debian/changelog b/debian/changelog index 14278968a8..78c3a9e54c 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.142.0~rc1) stable; urgency=medium + + * New Synapse release 1.142.0rc1. + + -- Synapse Packaging team Tue, 04 Nov 2025 13:20:15 +0000 + matrix-synapse-py3 (1.141.0) stable; urgency=medium * New Synapse release 1.141.0. diff --git a/pyproject.toml b/pyproject.toml index f530666e45..25a9bfb746 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -107,7 +107,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.141.0" +version = "1.142.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial" diff --git a/schema/synapse-config.schema.yaml b/schema/synapse-config.schema.yaml index 75a9a0aac5..98204a724c 100644 --- a/schema/synapse-config.schema.yaml +++ b/schema/synapse-config.schema.yaml @@ -1,5 +1,5 @@ $schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json -$id: https://element-hq.github.io/synapse/schema/synapse/v1.141/synapse-config.schema.json +$id: https://element-hq.github.io/synapse/schema/synapse/v1.142/synapse-config.schema.json type: object properties: modules: From b2237ff4f1b12b813d872252670b02e076d357ae Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 4 Nov 2025 13:40:58 +0000 Subject: [PATCH 58/72] Add sqlite deprecation to changelog and upgrade notes --- CHANGES.md | 8 ++++++++ docs/upgrade.md | 10 +++++++++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 2578bcdbc3..a7369d0159 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -7,6 +7,14 @@ deprecation policy](https://element-hq.github.io/synapse/latest/deprecation_policy.html#platform-dependencies), as it is now [end of life](https://endoflife.date/python). +## SQLite 3.40.0+ is now required. + +The minimum supported SQLite version has been increased from 3.27.0 to 3.40.0. + +If you use current versions of the +[matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks) +Docker images, no action is required. + ## Deprecation of MacOS Python wheels diff --git a/docs/upgrade.md b/docs/upgrade.md index faf6cbf8dc..d38d07ca81 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -119,7 +119,7 @@ stacking them up. You can monitor the currently running background updates with # Upgrading to v1.142.0 -## Minimum supported Python version +## Python 3.10+ is now required The minimum supported Python version has been increased from v3.9 to v3.10. You will need Python 3.10+ to run Synapse v1.142.0. @@ -128,6 +128,14 @@ If you use current versions of the [matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks) Docker images, no action is required. +## SQLite 3.40.0+ is now required. + +The minimum supported SQLite version has been increased from 3.27.0 to 3.40.0. + +If you use current versions of the +[matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks) +Docker images, no action is required. + # Upgrading to v1.141.0 From d888126372ed171e5795a753fc6b5ba99bd5005e Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 4 Nov 2025 14:05:51 +0000 Subject: [PATCH 59/72] Drop period from title --- CHANGES.md | 2 +- docs/upgrade.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index a7369d0159..298b134014 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -7,7 +7,7 @@ deprecation policy](https://element-hq.github.io/synapse/latest/deprecation_policy.html#platform-dependencies), as it is now [end of life](https://endoflife.date/python). -## SQLite 3.40.0+ is now required. +## SQLite 3.40.0+ is now required The minimum supported SQLite version has been increased from 3.27.0 to 3.40.0. diff --git a/docs/upgrade.md b/docs/upgrade.md index d38d07ca81..b3121a01a0 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -128,7 +128,7 @@ If you use current versions of the [matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks) Docker images, no action is required. -## SQLite 3.40.0+ is now required. +## SQLite 3.40.0+ is now required The minimum supported SQLite version has been increased from 3.27.0 to 3.40.0. From 4bbde142dc5837619748ae900f60c6b9d068de71 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Tue, 4 Nov 2025 17:20:01 +0100 Subject: [PATCH 60/72] Skip building Python 3.9 wheels with cibuildwheel (#19119) --- .github/workflows/release-artifacts.yml | 6 ++++-- changelog.d/19119.misc | 1 + 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 changelog.d/19119.misc diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index 4e38c0f35b..7458d64726 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -141,7 +141,7 @@ jobs: python-version: "3.x" - name: Install cibuildwheel - run: python -m pip install cibuildwheel==3.0.0 + run: python -m pip install cibuildwheel==3.2.1 - name: Only build a single wheel on PR if: startsWith(github.ref, 'refs/pull/') @@ -152,7 +152,9 @@ jobs: env: # Skip testing for platforms which various libraries don't have wheels # for, and so need extra build deps. - CIBW_TEST_SKIP: pp3*-* *i686* *musl* + # + # cp39-*: Python 3.9 is EOL. + CIBW_TEST_SKIP: pp3*-* cp39-* *i686* *musl* - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: diff --git a/changelog.d/19119.misc b/changelog.d/19119.misc new file mode 100644 index 0000000000..93f512ae7e --- /dev/null +++ b/changelog.d/19119.misc @@ -0,0 +1 @@ +Manually skip building Python 3.9 wheels, to prevent errors in the release workflow. \ No newline at end of file From 5d71034f816a22c98b9af2c2c71f4ea6ee581190 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 4 Nov 2025 16:21:50 +0000 Subject: [PATCH 61/72] 1.142.0rc2 --- CHANGES.md | 12 +++++++++++- changelog.d/19119.misc | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 4 files changed, 18 insertions(+), 3 deletions(-) delete mode 100644 changelog.d/19119.misc diff --git a/CHANGES.md b/CHANGES.md index 298b134014..3b78672fdc 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,4 +1,4 @@ -# Synapse 1.142.0rc1 (2025-11-04) +# Synapse 1.142.0rc2 (2025-11-04) ## Dropped support for Python 3.9 @@ -29,6 +29,16 @@ of these wheels downstream, please reach out to us in [#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org). We'd love to hear from you! + +## Internal Changes + +- Manually skip building Python 3.9 wheels, to prevent errors in the release workflow. ([\#19119](https://github.com/element-hq/synapse/issues/19119)) + + + + +# Synapse 1.142.0rc1 (2025-11-04) + ## Features - Add support for Python 3.14. ([\#19055](https://github.com/element-hq/synapse/issues/19055), [\#19134](https://github.com/element-hq/synapse/issues/19134)) diff --git a/changelog.d/19119.misc b/changelog.d/19119.misc deleted file mode 100644 index 93f512ae7e..0000000000 --- a/changelog.d/19119.misc +++ /dev/null @@ -1 +0,0 @@ -Manually skip building Python 3.9 wheels, to prevent errors in the release workflow. \ No newline at end of file diff --git a/debian/changelog b/debian/changelog index 78c3a9e54c..764315d66a 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.142.0~rc2) stable; urgency=medium + + * New Synapse release 1.142.0rc2. + + -- Synapse Packaging team Tue, 04 Nov 2025 16:21:30 +0000 + matrix-synapse-py3 (1.142.0~rc1) stable; urgency=medium * New Synapse release 1.142.0rc1. diff --git a/pyproject.toml b/pyproject.toml index 25a9bfb746..8b63fae048 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -107,7 +107,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.142.0rc1" +version = "1.142.0rc2" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial" From 0cbb2a15e0dbaad0aa528b289fc7afa462854ae2 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Tue, 4 Nov 2025 18:38:25 +0100 Subject: [PATCH 62/72] Don't build free-threaded wheels (#19140) Fixes https://github.com/element-hq/synapse/issues/19139. --- .github/workflows/release-artifacts.yml | 3 ++- changelog.d/19140.misc | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog.d/19140.misc diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index 7458d64726..d346aeb597 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -154,7 +154,8 @@ jobs: # for, and so need extra build deps. # # cp39-*: Python 3.9 is EOL. - CIBW_TEST_SKIP: pp3*-* cp39-* *i686* *musl* + # cp3??t-*: Free-threaded builds are not currently supported. + CIBW_TEST_SKIP: pp3*-* cp39-* cp3??t-* *i686* *musl* - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: diff --git a/changelog.d/19140.misc b/changelog.d/19140.misc new file mode 100644 index 0000000000..b4ae41c457 --- /dev/null +++ b/changelog.d/19140.misc @@ -0,0 +1 @@ +Update release scripts to prevent building wheels for free-threaded Python, as Synapse does not currently support it. \ No newline at end of file From 2fd8d88b424bf003ce8cb2ec74ac5b48ebff0cd8 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 4 Nov 2025 17:39:28 +0000 Subject: [PATCH 63/72] 1.142.0rc3 --- CHANGES.md | 10 +++++++++- changelog.d/19140.misc | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 4 files changed, 16 insertions(+), 3 deletions(-) delete mode 100644 changelog.d/19140.misc diff --git a/CHANGES.md b/CHANGES.md index 3b78672fdc..ab9b72e2a8 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,4 +1,4 @@ -# Synapse 1.142.0rc2 (2025-11-04) +# Synapse 1.142.0rc3 (2025-11-04) ## Dropped support for Python 3.9 @@ -30,6 +30,14 @@ of these wheels downstream, please reach out to us in love to hear from you! +## Internal Changes + +- Update release scripts to prevent building wheels for free-threaded Python, as Synapse does not currently support it. ([\#19140](https://github.com/element-hq/synapse/issues/19140)) + + +# Synapse 1.142.0rc2 (2025-11-04) + + ## Internal Changes - Manually skip building Python 3.9 wheels, to prevent errors in the release workflow. ([\#19119](https://github.com/element-hq/synapse/issues/19119)) diff --git a/changelog.d/19140.misc b/changelog.d/19140.misc deleted file mode 100644 index b4ae41c457..0000000000 --- a/changelog.d/19140.misc +++ /dev/null @@ -1 +0,0 @@ -Update release scripts to prevent building wheels for free-threaded Python, as Synapse does not currently support it. \ No newline at end of file diff --git a/debian/changelog b/debian/changelog index 764315d66a..0dae012858 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.142.0~rc3) stable; urgency=medium + + * New Synapse release 1.142.0rc3. + + -- Synapse Packaging team Tue, 04 Nov 2025 17:39:11 +0000 + matrix-synapse-py3 (1.142.0~rc2) stable; urgency=medium * New Synapse release 1.142.0rc2. diff --git a/pyproject.toml b/pyproject.toml index 8b63fae048..991cb3e7f3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -107,7 +107,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.142.0rc2" +version = "1.142.0rc3" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial" From 18f1d28a498c8ac18a7f8b3fdb778af2f0954fc4 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 7 Nov 2025 10:41:05 +0000 Subject: [PATCH 64/72] 1.142.0rc1 regression fix: Allow coercing a `str` to a `FilePath` in `MasConfigModel` (#19144) --- changelog.d/19144.bugfix | 1 + synapse/config/mas.py | 3 ++- synapse/util/pydantic_models.py | 7 +++++++ 3 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 changelog.d/19144.bugfix diff --git a/changelog.d/19144.bugfix b/changelog.d/19144.bugfix new file mode 100644 index 0000000000..3efec8080b --- /dev/null +++ b/changelog.d/19144.bugfix @@ -0,0 +1 @@ +Fix a bug introduced in 1.142.0rc1 where any attempt to configure `matrix_authentication_service.secret_path` would prevent the homeserver from starting up. \ No newline at end of file diff --git a/synapse/config/mas.py b/synapse/config/mas.py index 53cf500e95..c3e2630f2c 100644 --- a/synapse/config/mas.py +++ b/synapse/config/mas.py @@ -37,7 +37,8 @@ class MasConfigModel(ParseModel): enabled: StrictBool = False endpoint: AnyHttpUrl = AnyHttpUrl("http://localhost:8080") secret: Optional[StrictStr] = Field(default=None) - secret_path: Optional[FilePath] = Field(default=None) + # We set `strict=False` to allow `str` instances. + secret_path: Optional[FilePath] = Field(default=None, strict=False) @model_validator(mode="after") def verify_secret(self) -> Self: diff --git a/synapse/util/pydantic_models.py b/synapse/util/pydantic_models.py index e1e2d8b99f..506063d1a1 100644 --- a/synapse/util/pydantic_models.py +++ b/synapse/util/pydantic_models.py @@ -30,6 +30,13 @@ class ParseModel(BaseModel): but otherwise uses Pydantic's default behaviour. + Strict mode can adversely affect some types of fields, and should be disabled + for a field if: + + - the field's type is a `Path` or `FilePath`. Strict mode will refuse to + coerce from `str` (likely what the yaml parser will produce) to `FilePath`, + raising a `ValidationError`. + For now, ignore unknown fields. In the future, we could change this so that unknown config values cause a ValidationError, provided the error messages are meaningful to server operators. From 5d4a73149919b52a6f286e8964121ccb2467620c Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Fri, 7 Nov 2025 10:54:55 +0000 Subject: [PATCH 65/72] 1.142.0rc4 --- CHANGES.md | 9 +++++++++ changelog.d/19144.bugfix | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 4 files changed, 16 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/19144.bugfix diff --git a/CHANGES.md b/CHANGES.md index ab9b72e2a8..7bb0ece7e7 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,12 @@ +# Synapse 1.142.0rc4 (2025-11-07) + +## Bugfixes + +- Fix a bug introduced in 1.142.0rc1 where any attempt to configure `matrix_authentication_service.secret_path` would prevent the homeserver from starting up. ([\#19144](https://github.com/element-hq/synapse/issues/19144)) + + + + # Synapse 1.142.0rc3 (2025-11-04) ## Dropped support for Python 3.9 diff --git a/changelog.d/19144.bugfix b/changelog.d/19144.bugfix deleted file mode 100644 index 3efec8080b..0000000000 --- a/changelog.d/19144.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug introduced in 1.142.0rc1 where any attempt to configure `matrix_authentication_service.secret_path` would prevent the homeserver from starting up. \ No newline at end of file diff --git a/debian/changelog b/debian/changelog index 0dae012858..6531a59569 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.142.0~rc4) stable; urgency=medium + + * New Synapse release 1.142.0rc4. + + -- Synapse Packaging team Fri, 07 Nov 2025 10:54:42 +0000 + matrix-synapse-py3 (1.142.0~rc3) stable; urgency=medium * New Synapse release 1.142.0rc3. diff --git a/pyproject.toml b/pyproject.toml index 991cb3e7f3..f81c1e6baf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -107,7 +107,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.142.0rc3" +version = "1.142.0rc4" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial" From 72073d82ae339700a07486d32f2d2c29c67cce4c Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Fri, 7 Nov 2025 11:20:20 +0000 Subject: [PATCH 66/72] Move important messages to the top of the changelog --- CHANGES.md | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 7bb0ece7e7..8fbd50f20e 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,14 +1,5 @@ # Synapse 1.142.0rc4 (2025-11-07) -## Bugfixes - -- Fix a bug introduced in 1.142.0rc1 where any attempt to configure `matrix_authentication_service.secret_path` would prevent the homeserver from starting up. ([\#19144](https://github.com/element-hq/synapse/issues/19144)) - - - - -# Synapse 1.142.0rc3 (2025-11-04) - ## Dropped support for Python 3.9 This release drops support for Python 3.9, in line with our [dependency @@ -38,6 +29,14 @@ of these wheels downstream, please reach out to us in [#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org). We'd love to hear from you! +## Bugfixes + +- Fix a bug introduced in 1.142.0rc1 where any attempt to configure `matrix_authentication_service.secret_path` would prevent the homeserver from starting up. ([\#19144](https://github.com/element-hq/synapse/issues/19144)) + + + + +# Synapse 1.142.0rc3 (2025-11-04) ## Internal Changes From 39f8e288614c05987ea8e083e39a7e25a3b5fb73 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Mon, 10 Nov 2025 13:05:03 +0000 Subject: [PATCH 67/72] Update `cibuildwheel` config to stop building Python 3.9 and free-threaded wheels (#19154) --- .github/workflows/release-artifacts.yml | 12 +++++++----- changelog.d/19154.misc | 1 + pyproject.toml | 22 ++++++++++++++++------ 3 files changed, 24 insertions(+), 11 deletions(-) create mode 100644 changelog.d/19154.misc diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index d346aeb597..c88546c3bf 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -150,12 +150,14 @@ jobs: - name: Build wheels run: python -m cibuildwheel --output-dir wheelhouse env: - # Skip testing for platforms which various libraries don't have wheels - # for, and so need extra build deps. + # The platforms that we build for are determined by the + # `tool.cibuildwheel.skip` option in `pyproject.toml`. + + # We skip testing wheels for the following platforms in CI: # - # cp39-*: Python 3.9 is EOL. - # cp3??t-*: Free-threaded builds are not currently supported. - CIBW_TEST_SKIP: pp3*-* cp39-* cp3??t-* *i686* *musl* + # pp3*-* (PyPy wheels) broke in CI (TODO: investigate). + # musl: (TODO: investigate). + CIBW_TEST_SKIP: pp3*-* *musl* - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: diff --git a/changelog.d/19154.misc b/changelog.d/19154.misc new file mode 100644 index 0000000000..7d865a1ace --- /dev/null +++ b/changelog.d/19154.misc @@ -0,0 +1 @@ +Properly stop building wheels for Python 3.9 and free-threaded CPython. \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index f81c1e6baf..f6289cf519 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -392,13 +392,23 @@ build-backend = "poetry.core.masonry.api" [tool.cibuildwheel] # Skip unsupported platforms (by us or by Rust). -# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets. +# +# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the +# list of supported build targets. +# +# Also see `.github/workflows/release-artifacts.yml` for the list of +# architectures we build for (based on the runner OS types we use), as well as +# the platforms we exclude from testing in CI. +# # We skip: -# - CPython 3.8: EOLed -# - musllinux i686: excluded to reduce number of wheels we build. -# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677 -skip = "cp38* *-musllinux_i686" -# Enable non-default builds. +# - free-threaded cpython builds: these are not currently supported. +# - cp38: Python 3.8 is end-of-life. +# - cp39: Python 3.9 is end-of-life. +# - i686: We don't support 32-bit platforms. +skip = "cp3??t-* cp38-* cp39-* *i686*" +# Enable non-default builds. See the list of available options: +# https://cibuildwheel.pypa.io/en/stable/options#enable +# # "pypy" used to be included by default up until cibuildwheel 3. enable = "pypy" From 8feb862ff6ca4cc6ffeb1815aaf9b2051eabed99 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 11 Nov 2025 09:46:48 +0000 Subject: [PATCH 68/72] 1.142.0 --- CHANGES.md | 11 ++++++++++- changelog.d/19154.misc | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 4 files changed, 17 insertions(+), 3 deletions(-) delete mode 100644 changelog.d/19154.misc diff --git a/CHANGES.md b/CHANGES.md index 8fbd50f20e..b30ea24b0d 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,4 +1,4 @@ -# Synapse 1.142.0rc4 (2025-11-07) +# Synapse 1.142.0 (2025-11-11) ## Dropped support for Python 3.9 @@ -29,6 +29,15 @@ of these wheels downstream, please reach out to us in [#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org). We'd love to hear from you! +## Internal Changes + +- Properly stop building wheels for Python 3.9 and free-threaded CPython. ([\#19154](https://github.com/element-hq/synapse/issues/19154)) + + + + +# Synapse 1.142.0rc4 (2025-11-07) + ## Bugfixes - Fix a bug introduced in 1.142.0rc1 where any attempt to configure `matrix_authentication_service.secret_path` would prevent the homeserver from starting up. ([\#19144](https://github.com/element-hq/synapse/issues/19144)) diff --git a/changelog.d/19154.misc b/changelog.d/19154.misc deleted file mode 100644 index 7d865a1ace..0000000000 --- a/changelog.d/19154.misc +++ /dev/null @@ -1 +0,0 @@ -Properly stop building wheels for Python 3.9 and free-threaded CPython. \ No newline at end of file diff --git a/debian/changelog b/debian/changelog index 6531a59569..1c3b9dff46 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.142.0) stable; urgency=medium + + * New Synapse release 1.142.0. + + -- Synapse Packaging team Tue, 11 Nov 2025 09:45:51 +0000 + matrix-synapse-py3 (1.142.0~rc4) stable; urgency=medium * New Synapse release 1.142.0rc4. diff --git a/pyproject.toml b/pyproject.toml index f6289cf519..97bf21d8b9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -107,7 +107,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.142.0rc4" +version = "1.142.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial" From d01a8abc45a55412ceb245cc2587d49a9ca98a13 Mon Sep 17 00:00:00 2001 From: Devon Hudson Date: Tue, 18 Nov 2025 18:45:33 +0000 Subject: [PATCH 69/72] Allow subpaths in MAS endpoints (#19186) Fixes #19184 ### Pull Request Checklist * [X] Pull request is based on the develop branch * [X] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [X] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --- changelog.d/19186.bugfix | 1 + synapse/api/auth/mas.py | 27 +++---------------------- tests/handlers/test_oauth_delegation.py | 26 ++++++++++++++++++++++++ 3 files changed, 30 insertions(+), 24 deletions(-) create mode 100644 changelog.d/19186.bugfix diff --git a/changelog.d/19186.bugfix b/changelog.d/19186.bugfix new file mode 100644 index 0000000000..e5ef1bf6d8 --- /dev/null +++ b/changelog.d/19186.bugfix @@ -0,0 +1 @@ +Fix regression preventing subpaths in MAS endpoints. diff --git a/synapse/api/auth/mas.py b/synapse/api/auth/mas.py index f2b218e34f..44aecb052b 100644 --- a/synapse/api/auth/mas.py +++ b/synapse/api/auth/mas.py @@ -17,7 +17,6 @@ from urllib.parse import urlencode from pydantic import ( - AnyHttpUrl, BaseModel, ConfigDict, StrictBool, @@ -147,33 +146,13 @@ def __init__(self, hs: "HomeServer"): @property def _metadata_url(self) -> str: - return str( - AnyHttpUrl.build( - scheme=self._config.endpoint.scheme, - username=self._config.endpoint.username, - password=self._config.endpoint.password, - host=self._config.endpoint.host or "", - port=self._config.endpoint.port, - path=".well-known/openid-configuration", - query=None, - fragment=None, - ) + return ( + f"{str(self._config.endpoint).rstrip('/')}/.well-known/openid-configuration" ) @property def _introspection_endpoint(self) -> str: - return str( - AnyHttpUrl.build( - scheme=self._config.endpoint.scheme, - username=self._config.endpoint.username, - password=self._config.endpoint.password, - host=self._config.endpoint.host or "", - port=self._config.endpoint.port, - path="oauth2/introspect", - query=None, - fragment=None, - ) - ) + return f"{str(self._config.endpoint).rstrip('/')}/oauth2/introspect" async def _load_metadata(self) -> ServerMetadata: response = await self._http_client.get_json(self._metadata_url) diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index 43004bfc69..dbaee3c51e 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -1057,6 +1057,32 @@ def test_cached_expired_introspection(self) -> None: self.assertEqual(self.server.calls, 1) +class MasAuthDelegationWithSubpath(MasAuthDelegation): + """Test MAS delegation when the MAS server is hosted on a subpath.""" + + def default_config(self) -> dict[str, Any]: + config = super().default_config() + # Override the endpoint to include a subpath + config["matrix_authentication_service"]["endpoint"] = ( + self.server.endpoint + "auth/path/" + ) + return config + + def test_introspection_endpoint_uses_subpath(self) -> None: + """Test that the introspection endpoint correctly uses the configured subpath.""" + expected_introspection_url = ( + self.server.endpoint + "auth/path/oauth2/introspect" + ) + self.assertEqual(self._auth._introspection_endpoint, expected_introspection_url) + + def test_metadata_url_uses_subpath(self) -> None: + """Test that the metadata URL correctly uses the configured subpath.""" + expected_metadata_url = ( + self.server.endpoint + "auth/path/.well-known/openid-configuration" + ) + self.assertEqual(self._auth._metadata_url, expected_metadata_url) + + @parameterized_class( ("config",), [ From 46efbae4c396125a7aa87c683506896888bac9de Mon Sep 17 00:00:00 2001 From: Devon Hudson Date: Tue, 18 Nov 2025 12:26:53 -0700 Subject: [PATCH 70/72] 1.142.1 --- CHANGES.md | 9 +++++++++ changelog.d/19186.bugfix | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 4 files changed, 16 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/19186.bugfix diff --git a/CHANGES.md b/CHANGES.md index b30ea24b0d..5f72855bc0 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,12 @@ +# Synapse 1.142.1 (2025-11-18) + +## Bugfixes + +- Fixed a bug introduced in v1.142.0 preventing subpaths in MAS endpoints from working. ([\#19186](https://github.com/element-hq/synapse/issues/19186)) + + + + # Synapse 1.142.0 (2025-11-11) ## Dropped support for Python 3.9 diff --git a/changelog.d/19186.bugfix b/changelog.d/19186.bugfix deleted file mode 100644 index e5ef1bf6d8..0000000000 --- a/changelog.d/19186.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix regression preventing subpaths in MAS endpoints. diff --git a/debian/changelog b/debian/changelog index 1c3b9dff46..e0b6cfecea 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.142.1) stable; urgency=medium + + * New Synapse release 1.142.1. + + -- Synapse Packaging team Tue, 18 Nov 2025 12:25:23 -0700 + matrix-synapse-py3 (1.142.0) stable; urgency=medium * New Synapse release 1.142.0. diff --git a/pyproject.toml b/pyproject.toml index 97bf21d8b9..bbf26ef0cb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -107,7 +107,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.142.0" +version = "1.142.1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial" From 067a4a049cad459601c7dcfb349ef4ce32fc70bb Mon Sep 17 00:00:00 2001 From: Jason Little Date: Mon, 24 Nov 2025 10:01:43 -0600 Subject: [PATCH 71/72] Remove CI lint testing for pydantic models Now that Pydantic V2 is fully enabled, this isn't needed any more(and was in fact deleted from upstream) --- .github/workflows/famedly-tests.yml | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/.github/workflows/famedly-tests.yml b/.github/workflows/famedly-tests.yml index fac74cdd6d..8e5735479a 100644 --- a/.github/workflows/famedly-tests.yml +++ b/.github/workflows/famedly-tests.yml @@ -115,21 +115,6 @@ jobs: - name: Check line endings run: scripts-dev/check_line_terminators.sh - lint-pydantic: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - - uses: Swatinem/rust-cache@68b3cb7503c78e67dae8373749990a220eb65352 - - uses: matrix-org/setup-python-poetry@v2 - with: - poetry-version: "2.1.1" - python-version: "3.13" - extras: "all" - - run: poetry run scripts-dev/check_pydantic_models.py - #lint-clippy: # runs-on: ubuntu-latest @@ -175,7 +160,6 @@ jobs: - lint - lint-mypy - lint-crlf - - lint-pydantic - check-sampleconfig #- check-schema-delta - check-lockfile From 4ce5dad39256355f79a14e88213cad2f0e702a84 Mon Sep 17 00:00:00 2001 From: Jason Little Date: Mon, 24 Nov 2025 09:20:48 -0600 Subject: [PATCH 72/72] Famedly Release v1.142.1_1 --- CHANGES.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 2778013b77..8dabc707e2 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -5,6 +5,8 @@ - Fixed a bug introduced in v1.142.0 preventing subpaths in MAS endpoints from working. ([\#19186](https://github.com/element-hq/synapse/issues/19186)) +### Famedly additions for v1.142.1_1 +- docs: update contributing guidelines ([\#209](https://github.com/famedly/synapse/pull/209)) (FrenchGithubUser) # Synapse 1.142.0 (2025-11-11)