diff --git a/.github/workflows/deploy-staging.yml b/.github/workflows/deploy-staging.yml index 6b5f881a1..af3bbd7be 100644 --- a/.github/workflows/deploy-staging.yml +++ b/.github/workflows/deploy-staging.yml @@ -39,18 +39,11 @@ jobs: - '.github/workflows/deploy-staging.yml' - '.github/workflows/test.yml' - # ====================== RUN TESTS ====================== - run-tests: - name: Run Tests - needs: detect-changes - if: "!contains(github.event.head_commit.message, '[skip ci]')" - uses: ./.github/workflows/test.yml - # ====================== BUILD FRONTEND TEST ====================== build-frontend-test: name: Build Frontend (Test) runs-on: ubuntu-latest - needs: [detect-changes, run-tests] + needs: [detect-changes] if: | !contains(github.event.head_commit.message, '[skip ci]') && success() && @@ -105,7 +98,7 @@ jobs: build-backend-test: name: Build Backend (Test) runs-on: ubuntu-latest - needs: [detect-changes, run-tests] + needs: [detect-changes] if: | !contains(github.event.head_commit.message, '[skip ci]') && success() && diff --git a/.pre-commit-frontend-wrapper.sh b/.pre-commit-frontend-wrapper.sh index d1870d74b..bd79e5cee 100755 --- a/.pre-commit-frontend-wrapper.sh +++ b/.pre-commit-frontend-wrapper.sh @@ -35,7 +35,7 @@ fi echo "→ Running TypeScript (type checking)..." if command -v bun &> /dev/null; then - bun run tsc --noEmit || { + bun run tsc -b || { echo "TypeScript errors found. Please fix before committing." exit 1 } diff --git a/README.md b/README.md index 320f274d4..18840bda7 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,7 @@ [](https://github.com/dbca-wa/science-projects/actions/workflows/test.yml)   + A project management and approval system for scientific research projects. @@ -207,7 +208,7 @@ The CI/CD pipeline uses a modular approach with reusable workflows: **test.yml** (reusable workflow): - Called by deploy-staging.yml and deploy-prod.yml -- Frontend tests (2-way sharding, ~2 min) +- Frontend tests (2-way sharding, ~2 min) - includes accessibility tests - Backend tests (4-way sharding, ~10 min) - Coverage combining and validation - Path-based execution (only test changed code) diff --git a/backend/Dockerfile b/backend/Dockerfile index 3a99fd7d0..5878cd956 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -184,4 +184,4 @@ EXPOSE 8000 # CMD curl -f http://127.0.0.1:8000/health/ || exit 1 # Launch production server -CMD ["gunicorn", "config.wsgi", "--bind", "0.0.0.0:8000", "--timeout", "300", "--graceful-timeout", "90", "--max-requests", "2048", "--workers", "4", "--preload"] +CMD ["gunicorn", "config.wsgi", "--bind", "0.0.0.0:8000", "--timeout", "300", "--graceful-timeout", "90", "--max-requests", "2048", "--workers", "4", "--preload", "--worker-tmp-dir", "/tmp"] diff --git a/backend/caretakers/management/commands/migrate_caretaker_data.py b/backend/caretakers/management/commands/migrate_caretaker_data.py index c725d3bde..46ce389ce 100644 --- a/backend/caretakers/management/commands/migrate_caretaker_data.py +++ b/backend/caretakers/management/commands/migrate_caretaker_data.py @@ -13,7 +13,6 @@ from django.core.management.base import BaseCommand from django.db import connection, transaction -from psycopg2 import sql class Command(BaseCommand): @@ -139,10 +138,9 @@ def _table_exists(self, table_name): def _get_table_count(self, table_name): """Get the number of records in a table.""" with connection.cursor() as cursor: - # Use SQL identifier to prevent injection (table_name is hardcoded but bandit flags it) - cursor.execute( - sql.SQL("SELECT COUNT(*) FROM {}").format(sql.Identifier(table_name)) - ) + # Use string formatting with validated table name (table_name is hardcoded in callers) + # This is safe because table_name only comes from hardcoded strings in this file + cursor.execute(f"SELECT COUNT(*) FROM {table_name}") # nosec B608 return cursor.fetchone()[0] def _migrate_data(self): @@ -152,8 +150,7 @@ def _migrate_data(self): """ with connection.cursor() as cursor: # Insert new records that don't exist in destination - cursor.execute( - """ + cursor.execute(""" INSERT INTO caretakers_caretaker (id, user_id, caretaker_id, end_date, reason, notes, created_at, updated_at) SELECT @@ -172,13 +169,11 @@ def _migrate_data(self): AND dest.caretaker_id = src.caretaker_id ) ON CONFLICT (user_id, caretaker_id) DO NOTHING; - """ - ) + """) copied_count = cursor.rowcount # Update existing records to ensure data is current - cursor.execute( - """ + cursor.execute(""" UPDATE caretakers_caretaker dest SET end_date = src.end_date, @@ -196,8 +191,7 @@ def _migrate_data(self): dest.created_at IS DISTINCT FROM src.created_at OR dest.updated_at IS DISTINCT FROM src.updated_at ); - """ - ) + """) updated_count = cursor.rowcount return copied_count, updated_count diff --git a/backend/config/cache_settings.py b/backend/config/cache_settings.py index dd174ee5e..6d7aa814c 100644 --- a/backend/config/cache_settings.py +++ b/backend/config/cache_settings.py @@ -6,30 +6,117 @@ Note: Organisational Edge proxy (Nginx/Varnish/Fastly) handles HTTP-level caching. This Redis cache is for application-level caching of user-specific data that cannot be cached at the HTTP level. + +Redis is optional: +- If REDIS_URL is set and Redis is reachable: Caching and throttling enabled +- If Redis unavailable: Caching and throttling disabled (graceful degradation) +- Tests always use dummy cache (no Redis required) """ +import logging import os -# Redis Cache Configuration -CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": os.environ.get("REDIS_URL", "redis://127.0.0.1:6379/1"), - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - "SOCKET_CONNECT_TIMEOUT": 5, # seconds - "SOCKET_TIMEOUT": 5, # seconds - "RETRY_ON_TIMEOUT": True, - "MAX_CONNECTIONS": 50, - "CONNECTION_POOL_KWARGS": { - "max_connections": 50, - "retry_on_timeout": True, - }, - }, - "KEY_PREFIX": "spms", - "TIMEOUT": 300, # 5 minutes default TTL +logger = logging.getLogger(__name__) + +# Track Redis availability globally for other modules +REDIS_AVAILABLE = False + + +def get_cache_config(): + """ + Get cache configuration - Redis if available, dummy cache otherwise. + + Behavior: + - Tests: Always use dummy cache (no Redis needed) + - Production/Staging/Development: Use Redis if REDIS_URL set and connectable + - If Redis unavailable: Use dummy cache and log clearly + + Sets global REDIS_AVAILABLE flag for other components (e.g., throttling). + + Returns: + dict: Django CACHES configuration + """ + global REDIS_AVAILABLE + + redis_url = os.environ.get("REDIS_URL") + is_testing = os.environ.get("PYTEST_RUNNING", "0") == "1" + environment = os.environ.get("ENVIRONMENT", "development") + + # Always use dummy cache for tests + if is_testing: + logger.info("CACHE: Using dummy cache for tests (Redis not required)") + REDIS_AVAILABLE = False + return { + "default": { + "BACKEND": "django.core.cache.backends.dummy.DummyCache", + } + } + + # Try Redis if URL is provided + if redis_url: + try: + # Test Redis connection with short timeout + import redis + + client = redis.from_url(redis_url, socket_connect_timeout=2) + client.ping() + client.close() + + logger.info(f"CACHE: Redis connected successfully ({redis_url})") + logger.info("CACHE: Caching ENABLED") + logger.info("CACHE: Rate limiting ENABLED") + REDIS_AVAILABLE = True + + return { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": redis_url, + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + "SOCKET_CONNECT_TIMEOUT": 5, + "SOCKET_TIMEOUT": 5, + "RETRY_ON_TIMEOUT": True, + "MAX_CONNECTIONS": 50, + "CONNECTION_POOL_KWARGS": { + "max_connections": 50, + "retry_on_timeout": True, + }, + }, + "KEY_PREFIX": "spms", + "TIMEOUT": 300, # 5 minutes default TTL + } + } + except Exception as e: + logger.warning(f"CACHE: Redis connection failed ({redis_url}): {e}") + logger.warning("CACHE: Caching DISABLED (Redis unavailable)") + logger.warning("CACHE: Rate limiting DISABLED (Redis unavailable)") + if environment in ["staging", "production"]: + logger.warning( + f"CACHE: Redis is recommended for {environment}. " + "See documentation for setup instructions." + ) + REDIS_AVAILABLE = False + else: + logger.info("CACHE: REDIS_URL not set") + logger.info("CACHE: Caching DISABLED (no Redis configured)") + logger.info("CACHE: Rate limiting DISABLED (no Redis configured)") + if environment in ["staging", "production"]: + logger.info( + f"CACHE: Redis is recommended for {environment}. " + "See documentation for setup instructions." + ) + REDIS_AVAILABLE = False + + # Use dummy cache when Redis unavailable + return { + "default": { + "BACKEND": "django.core.cache.backends.dummy.DummyCache", + } } -} + + +# Export cache configuration +CACHES = get_cache_config() # Cache Key Patterns # Use these patterns for consistent cache key generation across the application diff --git a/backend/config/settings.py b/backend/config/settings.py index 4e67e57f9..3d72be098 100644 --- a/backend/config/settings.py +++ b/backend/config/settings.py @@ -278,7 +278,44 @@ "django.middleware.clickjacking.XFrameOptionsMiddleware", ] +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + }, + }, +] + +WSGI_APPLICATION = "config.wsgi.application" + +# endregion ======================================================================================== + +# region Cache Configuration ====================================================== +# Import cache keys, TTL values, and Redis availability flag +from config.cache_settings import ( # noqa: E402, F401 + CACHE_KEYS, + CACHE_TTL, + CACHES, + REDIS_AVAILABLE, +) + +# endregion ======================================================================================== + +# region Logs and Tracking ======================================================================= +# Initialize logger early for use in configuration +LOGGER = logging.getLogger(__name__) + +# endregion ======================================================================================== +# region REST Framework Configuration ============================================= REST_FRAMEWORK = { "DEFAULT_PERMISSION_CLASSES": [ "rest_framework.permissions.IsAuthenticated", @@ -286,6 +323,7 @@ "DEFAULT_AUTHENTICATION_CLASSES": [ "rest_framework.authentication.SessionAuthentication", ], + # Throttle configuration (always present, but only enforced if Redis is available) "DEFAULT_THROTTLE_CLASSES": [ "rest_framework.throttling.AnonRateThrottle", "rest_framework.throttling.UserRateThrottle", @@ -301,45 +339,18 @@ }, } -TEMPLATES = [ - { - "BACKEND": "django.template.backends.django.DjangoTemplates", - "DIRS": [], - "APP_DIRS": True, - "OPTIONS": { - "context_processors": [ - "django.template.context_processors.debug", - "django.template.context_processors.request", - "django.contrib.auth.context_processors.auth", - "django.contrib.messages.context_processors.messages", - ], - }, - }, -] - -WSGI_APPLICATION = "config.wsgi.application" - -# endregion ======================================================================================== - -# region Cache Configuration ====================================================== -# Import cache keys and TTL values (needed by services) -from config.cache_settings import CACHE_KEYS, CACHE_TTL # noqa: E402, F401 - -# Use dummy cache for tests, Redis for production/development -if os.environ.get("PYTEST_RUNNING"): - # Dummy cache for tests (no Redis required) - CACHES = { - "default": { - "BACKEND": "django.core.cache.backends.dummy.DummyCache", - } - } +# Log throttling status based on Redis availability +if REDIS_AVAILABLE: + LOGGER.info("THROTTLING: Rate limiting ENABLED (Redis available)") else: - # Redis cache for production/development - from config.cache_settings import CACHES # noqa: E402, F401 + # Throttling configuration is present but won't be enforced without Redis + LOGGER.info( + "THROTTLING: Rate limiting DISABLED (Redis unavailable - throttling requires cache backend)" + ) # endregion ======================================================================================== -# region Logs and Tracking ======================================================================= +# region Sentry Configuration ===================================================== # Initialize Sentry only if SENTRY_URL is provided (optional) SENTRY_URL = env("SENTRY_URL", default=None) if ENVIRONMENT != "development" and SENTRY_URL: @@ -435,8 +446,6 @@ def format(self, record: LogRecord) -> str: }, } -LOGGER = logging.getLogger(__name__) - # endregion ======================================================================================== # region Django Debug Toolbar (Development Only) ====================================== diff --git a/backend/poetry.lock b/backend/poetry.lock index 99608f4c1..6757edd42 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -17,14 +17,14 @@ tests = ["mypy (>=1.14.0)", "pytest", "pytest-asyncio"] [[package]] name = "autoflake" -version = "2.3.1" +version = "2.3.3" description = "Removes unused imports and unused variables" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" groups = ["dev"] files = [ - {file = "autoflake-2.3.1-py3-none-any.whl", hash = "sha256:3ae7495db9084b7b32818b4140e6dc4fc280b712fb414f5b8fe57b0a8e85a840"}, - {file = "autoflake-2.3.1.tar.gz", hash = "sha256:c98b75dc5b0a86459c4f01a1d32ac7eb4338ec4317a4469515ff1e687ecd909e"}, + {file = "autoflake-2.3.3-py3-none-any.whl", hash = "sha256:a51a3412aff16135ee5b3ec25922459fef10c1f23ce6d6c4977188df859e8b53"}, + {file = "autoflake-2.3.3.tar.gz", hash = "sha256:c24809541e23999f7a7b0d2faadf15deb0bc04cdde49728a2fd943a0c8055504"}, ] [package.dependencies] @@ -612,14 +612,14 @@ files = [ [[package]] name = "dj-database-url" -version = "3.1.0" +version = "3.1.2" description = "Use Database URLs in your Django Application." optional = false python-versions = ">=3.10" groups = ["main"] files = [ - {file = "dj_database_url-3.1.0-py3-none-any.whl", hash = "sha256:155a56fbbecbaaf1348ccd73bf29138b4c9988363ba08261a0f0145e392e638c"}, - {file = "dj_database_url-3.1.0.tar.gz", hash = "sha256:d80218426b83f9302c8d27d4fccf52de5cf0cab179f0645fb2839f37605d1353"}, + {file = "dj_database_url-3.1.2-py3-none-any.whl", hash = "sha256:544e015fee3efa5127a1eb1cca465f4ace578265b3671fe61d0ed7dbafb5ec8a"}, + {file = "dj_database_url-3.1.2.tar.gz", hash = "sha256:63c20e4bbaa51690dfd4c8d189521f6bf6bc9da9fcdb23d95d2ee8ee87f9ec62"}, ] [package.dependencies] @@ -767,14 +767,14 @@ tzdata = ["tzdata"] [[package]] name = "filelock" -version = "3.23.0" +version = "3.24.3" description = "A platform independent file lock." optional = false python-versions = ">=3.10" groups = ["main", "dev"] files = [ - {file = "filelock-3.23.0-py3-none-any.whl", hash = "sha256:4203c3f43983c7c95e4bbb68786f184f6acb7300899bf99d686bb82d526bdf62"}, - {file = "filelock-3.23.0.tar.gz", hash = "sha256:f64442f6f4707b9385049bb490be0bc48e3ab8e74ad27d4063435252917f4d4b"}, + {file = "filelock-3.24.3-py3-none-any.whl", hash = "sha256:426e9a4660391f7f8a810d71b0555bce9008b0a1cc342ab1f6947d37639e002d"}, + {file = "filelock-3.24.3.tar.gz", hash = "sha256:011a5644dc937c22699943ebbfc46e969cdde3e171470a6e40b9533e5a72affa"}, ] [[package]] @@ -831,14 +831,14 @@ files = [ [[package]] name = "hypothesis" -version = "6.151.6" +version = "6.151.9" description = "The property-based testing library for Python" optional = false python-versions = ">=3.10" groups = ["dev"] files = [ - {file = "hypothesis-6.151.6-py3-none-any.whl", hash = "sha256:4e6e933a98c6f606b3e0ada97a750e7fff12277a40260b9300a05e7a5c3c5e2e"}, - {file = "hypothesis-6.151.6.tar.gz", hash = "sha256:755decfa326c8c97a4c8766fe40509985003396442138554b0ae824f9584318f"}, + {file = "hypothesis-6.151.9-py3-none-any.whl", hash = "sha256:7b7220585c67759b1b1ef839b1e6e9e3d82ed468cfc1ece43c67184848d7edd9"}, + {file = "hypothesis-6.151.9.tar.gz", hash = "sha256:2f284428dda6c3c48c580de0e18470ff9c7f5ef628a647ee8002f38c3f9097ca"}, ] [package.dependencies] @@ -1199,60 +1199,60 @@ files = [ [[package]] name = "pandas" -version = "3.0.0" +version = "3.0.1" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.11" groups = ["main"] files = [ - {file = "pandas-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d64ce01eb9cdca96a15266aa679ae50212ec52757c79204dbc7701a222401850"}, - {file = "pandas-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:613e13426069793aa1ec53bdcc3b86e8d32071daea138bbcf4fa959c9cdaa2e2"}, - {file = "pandas-3.0.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0192fee1f1a8e743b464a6607858ee4b071deb0b118eb143d71c2a1d170996d5"}, - {file = "pandas-3.0.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f0b853319dec8d5e0c8b875374c078ef17f2269986a78168d9bd57e49bf650ae"}, - {file = "pandas-3.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:707a9a877a876c326ae2cb640fbdc4ef63b0a7b9e2ef55c6df9942dcee8e2af9"}, - {file = "pandas-3.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:afd0aa3d0b5cda6e0b8ffc10dbcca3b09ef3cbcd3fe2b27364f85fdc04e1989d"}, - {file = "pandas-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:113b4cca2614ff7e5b9fee9b6f066618fe73c5a83e99d721ffc41217b2bf57dd"}, - {file = "pandas-3.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c14837eba8e99a8da1527c0280bba29b0eb842f64aa94982c5e21227966e164b"}, - {file = "pandas-3.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9803b31f5039b3c3b10cc858c5e40054adb4b29b4d81cb2fd789f4121c8efbcd"}, - {file = "pandas-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14c2a4099cd38a1d18ff108168ea417909b2dea3bd1ebff2ccf28ddb6a74d740"}, - {file = "pandas-3.0.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d257699b9a9960e6125686098d5714ac59d05222bef7a5e6af7a7fd87c650801"}, - {file = "pandas-3.0.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:69780c98f286076dcafca38d8b8eee1676adf220199c0a39f0ecbf976b68151a"}, - {file = "pandas-3.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4a66384f017240f3858a4c8a7cf21b0591c3ac885cddb7758a589f0f71e87ebb"}, - {file = "pandas-3.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be8c515c9bc33989d97b89db66ea0cececb0f6e3c2a87fcc8b69443a6923e95f"}, - {file = "pandas-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a453aad8c4f4e9f166436994a33884442ea62aa8b27d007311e87521b97246e1"}, - {file = "pandas-3.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:da768007b5a33057f6d9053563d6b74dd6d029c337d93c6d0d22a763a5c2ecc0"}, - {file = "pandas-3.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b78d646249b9a2bc191040988c7bb524c92fa8534fb0898a0741d7e6f2ffafa6"}, - {file = "pandas-3.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bc9cba7b355cb4162442a88ce495e01cb605f17ac1e27d6596ac963504e0305f"}, - {file = "pandas-3.0.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c9a1a149aed3b6c9bf246033ff91e1b02d529546c5d6fb6b74a28fea0cf4c70"}, - {file = "pandas-3.0.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95683af6175d884ee89471842acfca29172a85031fccdabc35e50c0984470a0e"}, - {file = "pandas-3.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1fbbb5a7288719e36b76b4f18d46ede46e7f916b6c8d9915b756b0a6c3f792b3"}, - {file = "pandas-3.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e8b9808590fa364416b49b2a35c1f4cf2785a6c156935879e57f826df22038e"}, - {file = "pandas-3.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:98212a38a709feb90ae658cb6227ea3657c22ba8157d4b8f913cd4c950de5e7e"}, - {file = "pandas-3.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:177d9df10b3f43b70307a149d7ec49a1229a653f907aa60a48f1877d0e6be3be"}, - {file = "pandas-3.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2713810ad3806767b89ad3b7b69ba153e1c6ff6d9c20f9c2140379b2a98b6c98"}, - {file = "pandas-3.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:15d59f885ee5011daf8335dff47dcb8a912a27b4ad7826dc6cbe809fd145d327"}, - {file = "pandas-3.0.0-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24e6547fb64d2c92665dd2adbfa4e85fa4fd70a9c070e7cfb03b629a0bbab5eb"}, - {file = "pandas-3.0.0-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:48ee04b90e2505c693d3f8e8f524dab8cb8aaf7ddcab52c92afa535e717c4812"}, - {file = "pandas-3.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:66f72fb172959af42a459e27a8d8d2c7e311ff4c1f7db6deb3b643dbc382ae08"}, - {file = "pandas-3.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4a4a400ca18230976724a5066f20878af785f36c6756e498e94c2a5e5d57779c"}, - {file = "pandas-3.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:940eebffe55528074341a5a36515f3e4c5e25e958ebbc764c9502cfc35ba3faa"}, - {file = "pandas-3.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:597c08fb9fef0edf1e4fa2f9828dd27f3d78f9b8c9b4a748d435ffc55732310b"}, - {file = "pandas-3.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:447b2d68ac5edcbf94655fe909113a6dba6ef09ad7f9f60c80477825b6c489fe"}, - {file = "pandas-3.0.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:debb95c77ff3ed3ba0d9aa20c3a2f19165cc7956362f9873fce1ba0a53819d70"}, - {file = "pandas-3.0.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fedabf175e7cd82b69b74c30adbaa616de301291a5231138d7242596fc296a8d"}, - {file = "pandas-3.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:412d1a89aab46889f3033a386912efcdfa0f1131c5705ff5b668dda88305e986"}, - {file = "pandas-3.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e979d22316f9350c516479dd3a92252be2937a9531ed3a26ec324198a99cdd49"}, - {file = "pandas-3.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:083b11415b9970b6e7888800c43c82e81a06cd6b06755d84804444f0007d6bb7"}, - {file = "pandas-3.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:5db1e62cb99e739fa78a28047e861b256d17f88463c76b8dafc7c1338086dca8"}, - {file = "pandas-3.0.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:697b8f7d346c68274b1b93a170a70974cdc7d7354429894d5927c1effdcccd73"}, - {file = "pandas-3.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8cb3120f0d9467ed95e77f67a75e030b67545bcfa08964e349252d674171def2"}, - {file = "pandas-3.0.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:33fd3e6baa72899746b820c31e4b9688c8e1b7864d7aec2de7ab5035c285277a"}, - {file = "pandas-3.0.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8942e333dc67ceda1095227ad0febb05a3b36535e520154085db632c40ad084"}, - {file = "pandas-3.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:783ac35c4d0fe0effdb0d67161859078618b1b6587a1af15928137525217a721"}, - {file = "pandas-3.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:125eb901e233f155b268bbef9abd9afb5819db74f0e677e89a61b246228c71ac"}, - {file = "pandas-3.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b86d113b6c109df3ce0ad5abbc259fe86a1bd4adfd4a31a89da42f84f65509bb"}, - {file = "pandas-3.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:1c39eab3ad38f2d7a249095f0a3d8f8c22cc0f847e98ccf5bbe732b272e2d9fa"}, - {file = "pandas-3.0.0.tar.gz", hash = "sha256:0facf7e87d38f721f0af46fe70d97373a37701b1c09f7ed7aeeb292ade5c050f"}, + {file = "pandas-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:de09668c1bf3b925c07e5762291602f0d789eca1b3a781f99c1c78f6cac0e7ea"}, + {file = "pandas-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24ba315ba3d6e5806063ac6eb717504e499ce30bd8c236d8693a5fd3f084c796"}, + {file = "pandas-3.0.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:406ce835c55bac912f2a0dcfaf27c06d73c6b04a5dde45f1fd3169ce31337389"}, + {file = "pandas-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:830994d7e1f31dd7e790045235605ab61cff6c94defc774547e8b7fdfbff3dc7"}, + {file = "pandas-3.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a64ce8b0f2de1d2efd2ae40b0abe7f8ae6b29fbfb3812098ed5a6f8e235ad9bf"}, + {file = "pandas-3.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9832c2c69da24b602c32e0c7b1b508a03949c18ba08d4d9f1c1033426685b447"}, + {file = "pandas-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:84f0904a69e7365f79a0c77d3cdfccbfb05bf87847e3a51a41e1426b0edb9c79"}, + {file = "pandas-3.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:4a68773d5a778afb31d12e34f7dd4612ab90de8c6fb1d8ffe5d4a03b955082a1"}, + {file = "pandas-3.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:476f84f8c20c9f5bc47252b66b4bb25e1a9fc2fa98cead96744d8116cb85771d"}, + {file = "pandas-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0ab749dfba921edf641d4036c4c21c0b3ea70fea478165cb98a998fb2a261955"}, + {file = "pandas-3.0.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8e36891080b87823aff3640c78649b91b8ff6eea3c0d70aeabd72ea43ab069b"}, + {file = "pandas-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:532527a701281b9dd371e2f582ed9094f4c12dd9ffb82c0c54ee28d8ac9520c4"}, + {file = "pandas-3.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:356e5c055ed9b0da1580d465657bc7d00635af4fd47f30afb23025352ba764d1"}, + {file = "pandas-3.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9d810036895f9ad6345b8f2a338dd6998a74e8483847403582cab67745bff821"}, + {file = "pandas-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:536232a5fe26dd989bd633e7a0c450705fdc86a207fec7254a55e9a22950fe43"}, + {file = "pandas-3.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:0f463ebfd8de7f326d38037c7363c6dacb857c5881ab8961fb387804d6daf2f7"}, + {file = "pandas-3.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5272627187b5d9c20e55d27caf5f2cd23e286aba25cadf73c8590e432e2b7262"}, + {file = "pandas-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:661e0f665932af88c7877f31da0dc743fe9c8f2524bdffe23d24fdcb67ef9d56"}, + {file = "pandas-3.0.1-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:75e6e292ff898679e47a2199172593d9f6107fd2dd3617c22c2946e97d5df46e"}, + {file = "pandas-3.0.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1ff8cf1d2896e34343197685f432450ec99a85ba8d90cce2030c5eee2ef98791"}, + {file = "pandas-3.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eca8b4510f6763f3d37359c2105df03a7a221a508f30e396a51d0713d462e68a"}, + {file = "pandas-3.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:06aff2ad6f0b94a17822cf8b83bbb563b090ed82ff4fe7712db2ce57cd50d9b8"}, + {file = "pandas-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:9fea306c783e28884c29057a1d9baa11a349bbf99538ec1da44c8476563d1b25"}, + {file = "pandas-3.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:a8d37a43c52917427e897cb2e429f67a449327394396a81034a4449b99afda59"}, + {file = "pandas-3.0.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d54855f04f8246ed7b6fc96b05d4871591143c46c0b6f4af874764ed0d2d6f06"}, + {file = "pandas-3.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e1b677accee34a09e0dc2ce5624e4a58a1870ffe56fc021e9caf7f23cd7668f"}, + {file = "pandas-3.0.1-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a9cabbdcd03f1b6cd254d6dda8ae09b0252524be1592594c00b7895916cb1324"}, + {file = "pandas-3.0.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ae2ab1f166668b41e770650101e7090824fd34d17915dd9cd479f5c5e0065e9"}, + {file = "pandas-3.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6bf0603c2e30e2cafac32807b06435f28741135cb8697eae8b28c7d492fc7d76"}, + {file = "pandas-3.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6c426422973973cae1f4a23e51d4ae85974f44871b24844e4f7de752dd877098"}, + {file = "pandas-3.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b03f91ae8c10a85c1613102c7bef5229b5379f343030a3ccefeca8a33414cf35"}, + {file = "pandas-3.0.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:99d0f92ed92d3083d140bf6b97774f9f13863924cf3f52a70711f4e7588f9d0a"}, + {file = "pandas-3.0.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3b66857e983208654294bb6477b8a63dee26b37bdd0eb34d010556e91261784f"}, + {file = "pandas-3.0.1-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56cf59638bf24dc9bdf2154c81e248b3289f9a09a6d04e63608c159022352749"}, + {file = "pandas-3.0.1-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1a9f55e0f46951874b863d1f3906dcb57df2d9be5c5847ba4dfb55b2c815249"}, + {file = "pandas-3.0.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1849f0bba9c8a2fb0f691d492b834cc8dadf617e29015c66e989448d58d011ee"}, + {file = "pandas-3.0.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3d288439e11b5325b02ae6e9cc83e6805a62c40c5a6220bea9beb899c073b1c"}, + {file = "pandas-3.0.1-cp314-cp314-win_amd64.whl", hash = "sha256:93325b0fe372d192965f4cca88d97667f49557398bbf94abdda3bf1b591dbe66"}, + {file = "pandas-3.0.1-cp314-cp314-win_arm64.whl", hash = "sha256:97ca08674e3287c7148f4858b01136f8bdfe7202ad25ad04fec602dd1d29d132"}, + {file = "pandas-3.0.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:58eeb1b2e0fb322befcf2bbc9ba0af41e616abadb3d3414a6bc7167f6cbfce32"}, + {file = "pandas-3.0.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cd9af1276b5ca9e298bd79a26bda32fa9cc87ed095b2a9a60978d2ca058eaf87"}, + {file = "pandas-3.0.1-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94f87a04984d6b63788327cd9f79dda62b7f9043909d2440ceccf709249ca988"}, + {file = "pandas-3.0.1-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85fe4c4df62e1e20f9db6ebfb88c844b092c22cd5324bdcf94bfa2fc1b391221"}, + {file = "pandas-3.0.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:331ca75a2f8672c365ae25c0b29e46f5ac0c6551fdace8eec4cd65e4fac271ff"}, + {file = "pandas-3.0.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:15860b1fdb1973fffade772fdb931ccf9b2f400a3f5665aef94a00445d7d8dd5"}, + {file = "pandas-3.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:44f1364411d5670efa692b146c748f4ed013df91ee91e9bec5677fb1fd58b937"}, + {file = "pandas-3.0.1-cp314-cp314t-win_arm64.whl", hash = "sha256:108dd1790337a494aa80e38def654ca3f0968cf4f362c85f44c15e471667102d"}, + {file = "pandas-3.0.1.tar.gz", hash = "sha256:4186a699674af418f655dbd420ed87f50d56b4cd6603784279d9eef6627823c8"}, ] [package.dependencies] @@ -1418,14 +1418,14 @@ xmp = ["defusedxml"] [[package]] name = "platformdirs" -version = "4.8.0" +version = "4.9.2" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.10" groups = ["main", "dev"] files = [ - {file = "platformdirs-4.8.0-py3-none-any.whl", hash = "sha256:1c1328b4d2ea997bbcb904175a9bde14e824a3fa79f751ea3888d63d7d727557"}, - {file = "platformdirs-4.8.0.tar.gz", hash = "sha256:c1d4a51ab04087041dd602707fbe7ee8b62b64e590f30e336e5c99c2d0c542d2"}, + {file = "platformdirs-4.9.2-py3-none-any.whl", hash = "sha256:9170634f126f8efdae22fb58ae8a0eaa86f38365bc57897a6c4f781d1f5875bd"}, + {file = "platformdirs-4.9.2.tar.gz", hash = "sha256:9a33809944b9db043ad67ca0db94b14bf452cc6aeaac46a88ea55b26e2e9d291"}, ] [[package]] @@ -1465,14 +1465,14 @@ virtualenv = ">=20.10.0" [[package]] name = "psycopg" -version = "3.3.2" +version = "3.3.3" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.10" groups = ["main"] files = [ - {file = "psycopg-3.3.2-py3-none-any.whl", hash = "sha256:3e94bc5f4690247d734599af56e51bae8e0db8e4311ea413f801fef82b14a99b"}, - {file = "psycopg-3.3.2.tar.gz", hash = "sha256:707a67975ee214d200511177a6a80e56e654754c9afca06a7194ea6bbfde9ca7"}, + {file = "psycopg-3.3.3-py3-none-any.whl", hash = "sha256:f96525a72bcfade6584ab17e89de415ff360748c766f0106959144dcbb38c698"}, + {file = "psycopg-3.3.3.tar.gz", hash = "sha256:5e9a47458b3c1583326513b2556a2a9473a1001a56c9efe9e587245b43148dd9"}, ] [package.dependencies] @@ -1480,76 +1480,76 @@ typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.3.2) ; implementation_name != \"pypy\""] -c = ["psycopg-c (==3.3.2) ; implementation_name != \"pypy\""] -dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "cython-lint (>=0.16)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.19.0)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "types-shapely (>=2.0)", "wheel (>=0.37)"] +binary = ["psycopg-binary (==3.3.3) ; implementation_name != \"pypy\""] +c = ["psycopg-c (==3.3.3) ; implementation_name != \"pypy\""] +dev = ["ast-comments (>=1.1.2)", "black (>=26.1.0)", "codespell (>=2.2)", "cython-lint (>=0.16)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.19.0)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "types-shapely (>=2.0)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] test = ["anyio (>=4.0)", "mypy (>=1.19.0) ; implementation_name != \"pypy\"", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] [[package]] name = "psycopg-binary" -version = "3.3.2" +version = "3.3.3" description = "PostgreSQL database adapter for Python -- C optimisation distribution" optional = false python-versions = ">=3.10" groups = ["main"] files = [ - {file = "psycopg_binary-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0768c5f32934bb52a5df098317eca9bdcf411de627c5dca2ee57662b64b54b41"}, - {file = "psycopg_binary-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:09b3014013f05cd89828640d3a1db5f829cc24ad8fa81b6e42b2c04685a0c9d4"}, - {file = "psycopg_binary-3.3.2-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:3789d452a9d17a841c7f4f97bbcba51a21f957ea35641a4c98507520e6b6a068"}, - {file = "psycopg_binary-3.3.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:44e89938d36acc4495735af70a886d206a5bfdc80258f95b69b52f68b2968d9e"}, - {file = "psycopg_binary-3.3.2-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90ed9da805e52985b0202aed4f352842c907c6b4fc6c7c109c6e646c32e2f43b"}, - {file = "psycopg_binary-3.3.2-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c3a9ccdfee4ae59cf9bf1822777e763bc097ed208f4901e21537fca1070e1391"}, - {file = "psycopg_binary-3.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:de9173f8cc0efd88ac2a89b3b6c287a9a0011cdc2f53b2a12c28d6fd55f9f81c"}, - {file = "psycopg_binary-3.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0611f4822674f3269e507a307236efb62ae5a828fcfc923ac85fe22ca19fd7c8"}, - {file = "psycopg_binary-3.3.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:522b79c7db547767ca923e441c19b97a2157f2f494272a119c854bba4804e186"}, - {file = "psycopg_binary-3.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1ea41c0229f3f5a3844ad0857a83a9f869aa7b840448fa0c200e6bcf85d33d19"}, - {file = "psycopg_binary-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:8ea05b499278790a8fa0ff9854ab0de2542aca02d661ddff94e830df971ff640"}, - {file = "psycopg_binary-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:94503b79f7da0b65c80d0dbb2f81dd78b300319ec2435d5e6dcf9622160bc2fa"}, - {file = "psycopg_binary-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07a5f030e0902ec3e27d0506ceb01238c0aecbc73ecd7fa0ee55f86134600b5b"}, - {file = "psycopg_binary-3.3.2-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e09d0d93d35c134704a2cb2b15f81ffc8174fd602f3e08f7b1a3d8896156cf0"}, - {file = "psycopg_binary-3.3.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:649c1d33bedda431e0c1df646985fbbeb9274afa964e1aef4be053c0f23a2924"}, - {file = "psycopg_binary-3.3.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5774272f754605059521ff037a86e680342e3847498b0aa86b0f3560c70963c"}, - {file = "psycopg_binary-3.3.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d391b70c9cc23f6e1142729772a011f364199d2c5ddc0d596f5f43316fbf982d"}, - {file = "psycopg_binary-3.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f3f601f32244a677c7b029ec39412db2772ad04a28bc2cbb4b1f0931ed0ffad7"}, - {file = "psycopg_binary-3.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0ae60e910531cfcc364a8f615a7941cac89efeb3f0fffe0c4824a6d11461eef7"}, - {file = "psycopg_binary-3.3.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7c43a773dd1a481dbb2fe64576aa303d80f328cce0eae5e3e4894947c41d1da7"}, - {file = "psycopg_binary-3.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5a327327f1188b3fbecac41bf1973a60b86b2eb237db10dc945bd3dc97ec39e4"}, - {file = "psycopg_binary-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:136c43f185244893a527540307167f5d3ef4e08786508afe45d6f146228f5aa9"}, - {file = "psycopg_binary-3.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a9387ab615f929e71ef0f4a8a51e986fa06236ccfa9f3ec98a88f60fbf230634"}, - {file = "psycopg_binary-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3ff7489df5e06c12d1829544eaec64970fe27fe300f7cf04c8495fe682064688"}, - {file = "psycopg_binary-3.3.2-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:9742580ecc8e1ac45164e98d32ca6df90da509c2d3ff26be245d94c430f92db4"}, - {file = "psycopg_binary-3.3.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d45acedcaa58619355f18e0f42af542fcad3fd84ace4b8355d3a5dea23318578"}, - {file = "psycopg_binary-3.3.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d88f32ff8c47cb7f4e7e7a9d1747dcee6f3baa19ed9afa9e5694fd2fb32b61ed"}, - {file = "psycopg_binary-3.3.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:59d0163c4617a2c577cb34afbed93d7a45b8c8364e54b2bd2020ff25d5f5f860"}, - {file = "psycopg_binary-3.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e750afe74e6c17b2c7046d2c3e3173b5a3f6080084671c8aa327215323df155b"}, - {file = "psycopg_binary-3.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f26f113013c4dcfbfe9ced57b5bad2035dda1a7349f64bf726021968f9bccad3"}, - {file = "psycopg_binary-3.3.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8309ee4569dced5e81df5aa2dcd48c7340c8dee603a66430f042dfbd2878edca"}, - {file = "psycopg_binary-3.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c6464150e25b68ae3cb04c4e57496ea11ebfaae4d98126aea2f4702dd43e3c12"}, - {file = "psycopg_binary-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:716a586f99bbe4f710dc58b40069fcb33c7627e95cc6fc936f73c9235e07f9cf"}, - {file = "psycopg_binary-3.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fc5a189e89cbfff174588665bb18d28d2d0428366cc9dae5864afcaa2e57380b"}, - {file = "psycopg_binary-3.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:083c2e182be433f290dc2c516fd72b9b47054fcd305cce791e0a50d9e93e06f2"}, - {file = "psycopg_binary-3.3.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:ac230e3643d1c436a2dfb59ca84357dfc6862c9f372fc5dbd96bafecae581f9f"}, - {file = "psycopg_binary-3.3.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d8c899a540f6c7585cee53cddc929dd4d2db90fd828e37f5d4017b63acbc1a5d"}, - {file = "psycopg_binary-3.3.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:50ff10ab8c0abdb5a5451b9315538865b50ba64c907742a1385fdf5f5772b73e"}, - {file = "psycopg_binary-3.3.2-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:23d2594af848c1fd3d874a9364bef50730124e72df7bb145a20cb45e728c50ed"}, - {file = "psycopg_binary-3.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ea4fe6b4ead3bbbe27244ea224fcd1f53cb119afc38b71a2f3ce570149a03e30"}, - {file = "psycopg_binary-3.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:742ce48cde825b8e52fb1a658253d6d1ff66d152081cbc76aa45e2986534858d"}, - {file = "psycopg_binary-3.3.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e22bf6b54df994aff37ab52695d635f1ef73155e781eee1f5fa75bc08b58c8da"}, - {file = "psycopg_binary-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8db9034cde3bcdafc66980f0130813f5c5d19e74b3f2a19fb3cfbc25ad113121"}, - {file = "psycopg_binary-3.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:df65174c7cf6b05ea273ce955927d3270b3a6e27b0b12762b009ce6082b8d3fc"}, - {file = "psycopg_binary-3.3.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:9ca24062cd9b2270e4d77576042e9cc2b1d543f09da5aba1f1a3d016cea28390"}, - {file = "psycopg_binary-3.3.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c749770da0947bc972e512f35366dd4950c0e34afad89e60b9787a37e97cb443"}, - {file = "psycopg_binary-3.3.2-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:03b7cd73fb8c45d272a34ae7249713e32492891492681e3cf11dff9531cf37e9"}, - {file = "psycopg_binary-3.3.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:43b130e3b6edcb5ee856c7167ccb8561b473308c870ed83978ae478613764f1c"}, - {file = "psycopg_binary-3.3.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7c1feba5a8c617922321aef945865334e468337b8fc5c73074f5e63143013b5a"}, - {file = "psycopg_binary-3.3.2-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cabb2a554d9a0a6bf84037d86ca91782f087dfff2a61298d0b00c19c0bc43f6d"}, - {file = "psycopg_binary-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:74bc306c4b4df35b09bc8cecf806b271e1c5d708f7900145e4e54a2e5dedfed0"}, - {file = "psycopg_binary-3.3.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:d79b0093f0fbf7a962d6a46ae292dc056c65d16a8ee9361f3cfbafd4c197ab14"}, - {file = "psycopg_binary-3.3.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:1586e220be05547c77afc326741dd41cc7fba38a81f9931f616ae98865439678"}, - {file = "psycopg_binary-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:458696a5fa5dad5b6fb5d5862c22454434ce4fe1cf66ca6c0de5f904cbc1ae3e"}, - {file = "psycopg_binary-3.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:04bb2de4ba69d6f8395b446ede795e8884c040ec71d01dd07ac2b2d18d4153d1"}, + {file = "psycopg_binary-3.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b3385b58b2fe408a13d084c14b8dcf468cd36cbbe774408250facc128f9fa75c"}, + {file = "psycopg_binary-3.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1bef235a50a80f6aba05147002bc354559657cb6386dbd04d8e1c97d1d7cbe84"}, + {file = "psycopg_binary-3.3.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:97c839717bf8c8df3f6d983a20949c4fb22e2a34ee172e3e427ede363feda27b"}, + {file = "psycopg_binary-3.3.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:48e500cf1c0984dacf1f28ea482c3cdbb4c2288d51c336c04bc64198ab21fc51"}, + {file = "psycopg_binary-3.3.3-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb36a08859b9432d94ea6b26ec41a2f98f83f14868c91321d0c1e11f672eeae7"}, + {file = "psycopg_binary-3.3.3-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0dde92cfde09293fb63b3f547919ba7d73bd2654573c03502b3263dd0218e44e"}, + {file = "psycopg_binary-3.3.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:78c9ce98caaf82ac8484d269791c1b403d7598633e0e4e2fa1097baae244e2f1"}, + {file = "psycopg_binary-3.3.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d593612758d0041cb13cb0003f7f8d3fabb7ad9319e651e78afae49b1cf5860e"}, + {file = "psycopg_binary-3.3.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:f24e8e17035200a465c178e9ea945527ad0738118694184c450f1192a452ff25"}, + {file = "psycopg_binary-3.3.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e7b607f0e14f2a4cf7e78a05ebd13df6144acfba87cb90842e70d3f125d9f53f"}, + {file = "psycopg_binary-3.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b27d3a23c79fa59557d2cc63a7e8bb4c7e022c018558eda36f9d7c4e6b99a6e0"}, + {file = "psycopg_binary-3.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a89bb9ee11177b2995d87186b1d9fa892d8ea725e85eab28c6525e4cc14ee048"}, + {file = "psycopg_binary-3.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f7d0cf072c6fbac3795b08c98ef9ea013f11db609659dcfc6b1f6cc31f9e181"}, + {file = "psycopg_binary-3.3.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:90eecd93073922f085967f3ed3a98ba8c325cbbc8c1a204e300282abd2369e13"}, + {file = "psycopg_binary-3.3.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dac7ee2f88b4d7bb12837989ca354c38d400eeb21bce3b73dac02622f0a3c8d6"}, + {file = "psycopg_binary-3.3.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b62cf8784eb6d35beaee1056d54caf94ec6ecf2b7552395e305518ab61eb8fd2"}, + {file = "psycopg_binary-3.3.3-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a39f34c9b18e8f6794cca17bfbcd64572ca2482318db644268049f8c738f35a6"}, + {file = "psycopg_binary-3.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:883d68d48ca9ff3cb3d10c5fdebea02c79b48eecacdddbf7cce6e7cdbdc216b8"}, + {file = "psycopg_binary-3.3.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:cab7bc3d288d37a80aa8c0820033250c95e40b1c2b5c57cf59827b19c2a8b69d"}, + {file = "psycopg_binary-3.3.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:56c767007ca959ca32f796b42379fc7e1ae2ed085d29f20b05b3fc394f3715cc"}, + {file = "psycopg_binary-3.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:da2f331a01af232259a21573a01338530c6016dcfad74626c01330535bcd8628"}, + {file = "psycopg_binary-3.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:19f93235ece6dbfc4036b5e4f6d8b13f0b8f2b3eeb8b0bd2936d406991bcdd40"}, + {file = "psycopg_binary-3.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6698dbab5bcef8fdb570fc9d35fd9ac52041771bfcfe6fd0fc5f5c4e36f1e99d"}, + {file = "psycopg_binary-3.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:329ff393441e75f10b673ae99ab45276887993d49e65f141da20d915c05aafd8"}, + {file = "psycopg_binary-3.3.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:eb072949b8ebf4082ae24289a2b0fd724da9adc8f22743409d6fd718ddb379df"}, + {file = "psycopg_binary-3.3.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:263a24f39f26e19ed7fc982d7859a36f17841b05bebad3eb47bb9cd2dd785351"}, + {file = "psycopg_binary-3.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5152d50798c2fa5bd9b68ec68eb68a1b71b95126c1d70adaa1a08cd5eefdc23d"}, + {file = "psycopg_binary-3.3.3-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9d6a1e56dd267848edb824dbeb08cf5bac649e02ee0b03ba883ba3f4f0bd54f2"}, + {file = "psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73eaaf4bb04709f545606c1db2f65f4000e8a04cdbf3e00d165a23004692093e"}, + {file = "psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:162e5675efb4704192411eaf8e00d07f7960b679cd3306e7efb120bb8d9456cc"}, + {file = "psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:fab6b5e37715885c69f5d091f6ff229be71e235f272ebaa35158d5a46fd548a0"}, + {file = "psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a4aab31bd6d1057f287c96c0effca3a25584eb9cc702f282ecb96ded7814e830"}, + {file = "psycopg_binary-3.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:59aa31fe11a0e1d1bcc2ce37ed35fe2ac84cd65bb9036d049b1a1c39064d0f14"}, + {file = "psycopg_binary-3.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05f32239aec25c5fb15f7948cffdc2dc0dac098e48b80a140e4ba32b572a2e7d"}, + {file = "psycopg_binary-3.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c84f9d214f2d1de2fafebc17fa68ac3f6561a59e291553dfc45ad299f4898c1"}, + {file = "psycopg_binary-3.3.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e77957d2ba17cada11be09a5066d93026cdb61ada7c8893101d7fe1c6e1f3925"}, + {file = "psycopg_binary-3.3.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:42961609ac07c232a427da7c87a468d3c82fee6762c220f38e37cfdacb2b178d"}, + {file = "psycopg_binary-3.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae07a3114313dd91fce686cab2f4c44af094398519af0e0f854bc707e1aeedf1"}, + {file = "psycopg_binary-3.3.3-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d257c58d7b36a621dcce1d01476ad8b60f12d80eb1406aee4cf796f88b2ae482"}, + {file = "psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:07c7211f9327d522c9c47560cae00a4ecf6687f4e02d779d035dd3177b41cb12"}, + {file = "psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:8e7e9eca9b363dbedeceeadd8be97149d2499081f3c52d141d7cd1f395a91f83"}, + {file = "psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:cb85b1d5702877c16f28d7b92ba030c1f49ebcc9b87d03d8c10bf45a2f1c7508"}, + {file = "psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4d4606c84d04b80f9138d72f1e28c6c02dc5ae0c7b8f3f8aaf89c681ce1cd1b1"}, + {file = "psycopg_binary-3.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:74eae563166ebf74e8d950ff359be037b85723d99ca83f57d9b244a871d6c13b"}, + {file = "psycopg_binary-3.3.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:497852c5eaf1f0c2d88ab74a64a8097c099deac0c71de1cbcf18659a8a04a4b2"}, + {file = "psycopg_binary-3.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:258d1ea53464d29768bf25930f43291949f4c7becc706f6e220c515a63a24edd"}, + {file = "psycopg_binary-3.3.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:111c59897a452196116db12e7f608da472fbff000693a21040e35fc978b23430"}, + {file = "psycopg_binary-3.3.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:17bb6600e2455993946385249a3c3d0af52cd70c1c1cdbf712e9d696d0b0bf1b"}, + {file = "psycopg_binary-3.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:642050398583d61c9856210568eb09a8e4f2fe8224bf3be21b67a370e677eead"}, + {file = "psycopg_binary-3.3.3-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:533efe6dc3a7cba5e2a84e38970786bb966306863e45f3db152007e9f48638a6"}, + {file = "psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5958dbf28b77ce2033482f6cb9ef04d43f5d8f4b7636e6963d5626f000efb23e"}, + {file = "psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:a6af77b6626ce92b5817bf294b4d45ec1a6161dba80fc2d82cdffdd6814fd023"}, + {file = "psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:47f06fcbe8542b4d96d7392c476a74ada521c5aebdb41c3c0155f6595fc14c8d"}, + {file = "psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e7800e6c6b5dc4b0ca7cc7370f770f53ac83886b76afda0848065a674231e856"}, + {file = "psycopg_binary-3.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:165f22ab5a9513a3d7425ffb7fcc7955ed8ccaeef6d37e369d6cc1dff1582383"}, ] [[package]] @@ -1686,23 +1686,19 @@ testing = ["process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-django" -version = "4.11.1" +version = "4.12.0" description = "A Django plugin for pytest." optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" groups = ["dev"] files = [ - {file = "pytest_django-4.11.1-py3-none-any.whl", hash = "sha256:1b63773f648aa3d8541000c26929c1ea63934be1cfa674c76436966d73fe6a10"}, - {file = "pytest_django-4.11.1.tar.gz", hash = "sha256:a949141a1ee103cb0e7a20f1451d355f83f5e4a5d07bdd4dcfdd1fd0ff227991"}, + {file = "pytest_django-4.12.0-py3-none-any.whl", hash = "sha256:3ff300c49f8350ba2953b90297d23bf5f589db69545f56f1ec5f8cff5da83e85"}, + {file = "pytest_django-4.12.0.tar.gz", hash = "sha256:df94ec819a83c8979c8f6de13d9cdfbe76e8c21d39473cfe2b40c9fc9be3c758"}, ] [package.dependencies] pytest = ">=7.0.0" -[package.extras] -docs = ["sphinx", "sphinx_rtd_theme"] -testing = ["Django", "django-configurations (>=2.0)"] - [[package]] name = "pytest-mock" version = "3.15.1" @@ -1927,14 +1923,14 @@ files = [ [[package]] name = "redis" -version = "7.1.1" +version = "7.2.0" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.10" groups = ["main"] files = [ - {file = "redis-7.1.1-py3-none-any.whl", hash = "sha256:f77817f16071c2950492c67d40b771fa493eb3fccc630a424a10976dbb794b7a"}, - {file = "redis-7.1.1.tar.gz", hash = "sha256:a2814b2bda15b39dad11391cc48edac4697214a8a5a4bd10abe936ab4892eb43"}, + {file = "redis-7.2.0-py3-none-any.whl", hash = "sha256:01f591f8598e483f1842d429e8ae3a820804566f1c73dca1b80e23af9fba0497"}, + {file = "redis-7.2.0.tar.gz", hash = "sha256:4dd5bf4bd4ae80510267f14185a15cba2a38666b941aff68cccf0256b51c1f26"}, ] [package.extras] @@ -1942,6 +1938,8 @@ circuit-breaker = ["pybreaker (>=1.4.0)"] hiredis = ["hiredis (>=3.2.0)"] jwt = ["pyjwt (>=2.9.0)"] ocsp = ["cryptography (>=36.0.1)", "pyopenssl (>=20.0.1)", "requests (>=2.31.0)"] +otel = ["opentelemetry-api (>=1.39.1)", "opentelemetry-exporter-otlp-proto-http (>=1.39.1)", "opentelemetry-sdk (>=1.39.1)"] +xxhash = ["xxhash (>=3.6.0,<3.7.0)"] [[package]] name = "requests" @@ -1967,14 +1965,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "14.3.2" +version = "14.3.3" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" groups = ["dev"] files = [ - {file = "rich-14.3.2-py3-none-any.whl", hash = "sha256:08e67c3e90884651da3239ea668222d19bea7b589149d8014a21c633420dbb69"}, - {file = "rich-14.3.2.tar.gz", hash = "sha256:e712f11c1a562a11843306f5ed999475f09ac31ffb64281f73ab29ffdda8b3b8"}, + {file = "rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d"}, + {file = "rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b"}, ] [package.dependencies] @@ -1986,14 +1984,14 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "sentry-sdk" -version = "2.52.0" +version = "2.53.0" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = ">=3.6" groups = ["main"] files = [ - {file = "sentry_sdk-2.52.0-py2.py3-none-any.whl", hash = "sha256:931c8f86169fc6f2752cb5c4e6480f0d516112e78750c312e081ababecbaf2ed"}, - {file = "sentry_sdk-2.52.0.tar.gz", hash = "sha256:fa0bec872cfec0302970b2996825723d67390cdd5f0229fb9efed93bd5384899"}, + {file = "sentry_sdk-2.53.0-py2.py3-none-any.whl", hash = "sha256:46e1ed8d84355ae54406c924f6b290c3d61f4048625989a723fd622aab838899"}, + {file = "sentry_sdk-2.53.0.tar.gz", hash = "sha256:6520ef2c4acd823f28efc55e43eb6ce2e6d9f954a95a3aa96b6fd14871e92b77"}, ] [package.dependencies] @@ -2197,24 +2195,24 @@ zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] [[package]] name = "virtualenv" -version = "20.36.1" +version = "20.38.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f"}, - {file = "virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba"}, + {file = "virtualenv-20.38.0-py3-none-any.whl", hash = "sha256:d6e78e5889de3a4742df2d3d44e779366325a90cf356f15621fddace82431794"}, + {file = "virtualenv-20.38.0.tar.gz", hash = "sha256:94f39b1abaea5185bf7ea5a46702b56f1d0c9aa2f41a6c2b8b0af4ddc74c10a7"}, ] [package.dependencies] distlib = ">=0.3.7,<1" -filelock = {version = ">=3.20.1,<4", markers = "python_version >= \"3.10\""} +filelock = {version = ">=3.24.2,<4", markers = "python_version >= \"3.10\""} platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] +docs = ["furo (>=2023.7.26)", "pre-commit-uv (>=4.1.4)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinx-autodoc-typehints (>=3.6.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2025.12.21.14)", "sphinxcontrib-mermaid (>=2)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "pytest-xdist (>=3.5)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] [[package]] name = "webencodings" diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 1c0d7742d..e0b9dcb6c 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -34,11 +34,15 @@ services: - DEBUG=True - PYTHONPATH=/usr/src/app/backend - DATABASE_URL=postgresql://postgres:postgres@db:5432/science_projects + - REDIS_URL=redis://redis:6379/1 env_file: - ./backend/.env command: python manage.py runserver 0.0.0.0:8000 depends_on: - - db + db: + condition: service_started + redis: + condition: service_healthy networks: - science-projects-dev @@ -57,8 +61,26 @@ services: networks: - science-projects-dev + # Redis Cache Service + redis: + image: redis:7-alpine + ports: + - "6379:6379" + volumes: + - redis_data:/data + command: redis-server --appendonly yes + networks: + - science-projects-dev + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 5s + timeout: 3s + retries: 5 + start_period: 5s + volumes: postgres_data: # Volume for persistent DB data + redis_data: # Volume for persistent Redis data networks: science-projects-dev: diff --git a/documentation/backend/README.md b/documentation/backend/README.md index 3d8a6e9dd..19ad3c9a8 100644 --- a/documentation/backend/README.md +++ b/documentation/backend/README.md @@ -10,7 +10,8 @@ Welcome to the Science Projects Management System (SPMS) backend documentation. 1. [Getting Started](development/getting-started.md) - Get up and running in under 30 minutes 2. [Local Setup](development/local-setup.md) - Detailed setup instructions -3. [Testing Guide](development/testing-guide.md) - Run your first tests +3. [Seeding Guide](development/seeding-guide.md) - Set up with production-like data +4. [Testing Guide](development/testing-guide.md) - Run your first tests ## Documentation Structure @@ -85,6 +86,7 @@ Operational procedures, monitoring, and troubleshooting for the entire system. 1. [Getting Started](development/getting-started.md) 2. [Local Setup](development/local-setup.md) +3. [Seeding Guide](development/seeding-guide.md) ### Adding a Feature diff --git a/documentation/backend/development/README.md b/documentation/backend/development/README.md index afdf85cf2..4c48cdafd 100644 --- a/documentation/backend/development/README.md +++ b/documentation/backend/development/README.md @@ -10,7 +10,8 @@ New to the project? Start here: 1. **[getting-started.md](getting-started.md)** - Quick setup guide (< 30 minutes) 2. **[local-setup.md](local-setup.md)** - Detailed environment setup -3. **[testing-guide.md](testing-guide.md)** - Run your first tests +3. **[seeding-guide.md](seeding-guide.md)** - Set up with production-like data +4. **[testing-guide.md](testing-guide.md)** - Run your first tests ## Development Guides diff --git a/documentation/backend/development/local-setup.md b/documentation/backend/development/local-setup.md index bb61e27b8..a3add9a54 100644 --- a/documentation/backend/development/local-setup.md +++ b/documentation/backend/development/local-setup.md @@ -288,27 +288,21 @@ poetry run pre-commit run --all-files ## Database Seeding -For development with realistic data, you can seed the database with production data. +For development with realistic data, you can seed your local environment with production data. -### Obtaining Data Dumps +**Quick Overview:** +- Download seeding data from SharePoint (files.zip and SQL dump) +- Extract media files to `backend/files/` +- Restore database from SQL dump using psql or pgAdmin +- Verify seeding with provided commands -Contact the Ecoinformatics Line Manager to obtain: -1. `spms_prod.sql` - Production database dump -2. `media_files.zip` - Production media files +**See the complete guide:** [Seeding Guide](seeding-guide.md) -### Importing Database Dump - -```powershell -psql -U postgres -d spms -f spms_prod.sql -``` - -### Setting Up Media Files - -```powershell -Expand-Archive -Path media_files.zip -DestinationPath temp_media -Move-Item -Path temp_media\* -Destination files\ -Force -Remove-Item -Path temp_media -Recurse -``` +The seeding guide includes: +- Step-by-step instructions for local and staging environments +- Troubleshooting common issues +- Security and data handling best practices +- OIM ticket template for staging environment seeding ## Creating a Complete Superuser diff --git a/documentation/backend/development/seeding-guide.md b/documentation/backend/development/seeding-guide.md new file mode 100644 index 000000000..566c4d04c --- /dev/null +++ b/documentation/backend/development/seeding-guide.md @@ -0,0 +1,315 @@ +# Backend Seeding Guide + +Goal: Set up your local or staging environment with production-like data for realistic testing and development. + +Related Documentation: [Getting Started](getting-started.md), [Local Setup](local-setup.md), [Change Management](../../general/operations/change-management.md) + +## Table of Contents + +- [Overview](#overview) +- [Prerequisites](#prerequisites) +- [Local Environment Seeding](#local-environment-seeding) + - [Media Seeding](#media-seeding) + - [Database Seeding](#database-seeding) +- [Staging Environment Seeding](#staging-environment-seeding) +- [Troubleshooting](#troubleshooting) +- [Security and Best Practices](#security-and-best-practices) + +## Overview + +Seeding data provides production-like database records and media files for local development and staging environments. This allows you to: + +- Test features with realistic data volumes +- Develop with actual user-uploaded media files +- Verify functionality before production deployment +- Onboard new maintainers quickly with working data + +**When to use seeding data:** +- Setting up a new development environment +- Testing features that require existing data +- Reproducing production issues locally +- Preparing staging for release testing + +**Security considerations:** +- Seeding data contains production data (user information, project details, uploaded files) +- Handle with the same care as production data +- Use only in local and staging environments +- Never commit seeding data to version control (files folder is already gitignored) + +## Prerequisites + +Before seeding your environment, ensure you have: + +### Software Requirements + +- **PostgreSQL 17+** - [Installation Guide](https://www.postgresql.org/download/) +- **Python (Latest)** - [Installation Guide](https://www.python.org/downloads/) +- **Poetry** - [Installation Guide](https://python-poetry.org/docs/#installation) +- **Unzip utility** - Usually pre-installed on Windows/macOS/Linux + +Verify installations: +```bash +psql --version # Should show 17.x or higher +python --version # Should show 3.11.x or higher +poetry --version # Should show 1.7.x or higher +``` + +### Access Requirements + +- **DBCA Network Access** - VPN or network approval if working remotely +- **SharePoint Permissions** - Access to Ecoinformatics team site +- **Database Credentials** - PostgreSQL username and password + + +### Related Documentation + +Before seeding, complete the initial backend setup: +- [Getting Started](getting-started.md) - Initial setup +- [Local Setup](local-setup.md) - Detailed configuration +- [Testing Guide](testing-guide.md) - Running tests + +## Local Environment Seeding + +### Accessing Seeding Data + +The seeding data is stored on SharePoint at: + +[SharePoint Seeding Data Folder](https://dpaw.sharepoint.com/teams/Ecoinformatics/Shared%20Documents/Projects/S033%20-%20SPMS/Seeding%20Data?csf=1&web=1&e=HfqhBJ) + +You will need to be signed-in via SSO and have permissions to access the Ecoinformatics SharePoint site + +Available files: +- `files.zip` - Media files (images, documents, user uploads) +- `spms_dump.sql` - Database snapshot with production-like data + +### Media Seeding + +Media seeding involves downloading and extracting user-uploaded files to your local backend directory. + +#### Step 1: Download files.zip + +1. Navigate to the SharePoint folder (link above) +2. Locate `files.zip` in the Seeding Data folder +3. Download to your local machine (any location) + +#### Step 2: Extract to Backend Root + +Extract the contents to the backend root directory: + +```bash +# Navigate to backend directory +cd monorepo/backend + +# Extract files.zip (adjust path to your download location) +unzip ~/Downloads/files.zip +``` + +**Windows (PowerShell):** +```powershell +# Navigate to backend directory +cd monorepo\backend + +# Extract using Expand-Archive +Expand-Archive -Path "$env:USERPROFILE\Downloads\files.zip" -DestinationPath . +``` + +This will create a `files/` directory structure similar to below: +``` +backend/ +├── files/ +│ ├── projects/ +│ ├── annual_reports/ +│ ├── user_avatars/ +│ └── project_documents/ +``` + +#### Step 3: Verify Media Seeding + +Check that files were extracted successfully: + +```bash +# Check files directory exists +ls -la files/ + +# Check file count +find files/ -type f | wc -l +``` + +**Windows (PowerShell):** +```powershell +# Check files directory exists +Get-ChildItem files\ + +# Check file count +(Get-ChildItem -Path files\ -Recurse -File).Count +``` + +**Important:** The `files/` directory is gitignored and must never be committed to version control. + +### Database Seeding + +Database seeding involves restoring a PostgreSQL database from a SQL dump file containing production-like data. + +#### Prerequisites + +- PostgreSQL 17+ installed and running +- Database created (e.g., `spms`) +- Database credentials configured in `.env` + +#### Step 1: Download SQL Dump + +1. Navigate to the SharePoint folder (link above) +2. Locate `spms_dump.sql` in the Seeding Data folder +3. Download to your local machine + +#### Step 2: Create Database and Required Roles + +**Note:** You need postgres properly set up in your system environment variables to use these commands. + +```bash +# Create database +createdb spms + +# Create required PostgreSQL roles +psql spms +``` + +In the psql prompt: +```sql +CREATE ROLE azure_pg_admin; +CREATE ROLE spms_prod; +\q +``` + +**Windows (PowerShell):** +```powershell +# Create database +createdb spms + +# Create required PostgreSQL roles +psql spms +``` + +In the psql prompt: +```sql +CREATE ROLE azure_pg_admin; +CREATE ROLE spms_prod; +\q +``` + +**Note:** If you get "role already exists" errors, that's fine - the roles may have been created previously. + +#### Step 3: Restore Database + +```bash +# Restore database from SQL dump using pg_restore +pg_restore -U postgres -d spms ~/Downloads/spms_dump.sql +``` + +**Windows (PowerShell):** +```powershell +# Restore database from SQL dump using pg_restore +pg_restore -U postgres -d spms "$env:USERPROFILE\Downloads\spms_dump.sql" +``` + +**Note:** If `pg_restore` doesn't work, ensure PostgreSQL's bin directory is in your PATH. + +#### Step 4: Run Migrations + +After restoring the database, run migrations to ensure your local code is in sync: + +```bash +poetry run python manage.py migrate +``` + +#### Step 5: Verify Database Seeding + +Check that tables were created and populated: + +```bash +# Check table count (should be 20-30 tables) +psql -U postgres -d spms -c "SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = 'public';" + +# Check user count (should be 100-200 users) +psql -U postgres -d spms -c "SELECT COUNT(*) FROM users_user;" + +# Check project count (should be 50-100 projects) +psql -U postgres -d spms -c "SELECT COUNT(*) FROM projects_project;" +``` + + +#### Step 6: Update Environment Configuration + +Ensure your `.env` file points to the seeded database: + +```bash +DATABASE_URL=postgresql://postgres:password@127.0.0.1/spms +``` + +#### Step 7: Verify Application Works + +Start the development server and verify everything works: + +```bash +poetry run python manage.py runserver +``` + +Access the application at http://127.0.0.1:8000/api/v1/ and verify you can see data. + +## Staging Environment Seeding + +Staging environment seeding is handled by the Office of Information Management (OIM). Maintainers cannot directly seed staging environments. + +### Process + +1. **Review Change Process** - See [Change Management Documentation](../../general/operations/change-management.md) +2. **Submit OIM Ticket** - Use the template below +3. **Wait for Completion** - Typical timeline: 2-5 business days + +### OIM Ticket Template + +**Subject:** +``` +SPMS Staging Environment Seeding Request +``` + +**Details:** +``` +Request Type: Staging Environment Update + +Please perform the following actions on the SPMS staging environment: + +1. Populate staging database with production data + - Take snapshot of production database + - Restore to staging database + - Verify data integrity + +2. Populate staging files folder with production media + - Copy production media files to staging attached volume + - Verify file permissions and accessibility + +Purpose: Fast-forward staging environment to match production for proper testing of upcoming release. + +Timeline: Requested completion by [DATE] + +Contact: [YOUR NAME] - [YOUR EMAIL] +``` + +### Timeline + +- **Ticket submission:** Immediate +- **OIM review:** 1-2 business days +- **Seeding completion:** 2-5 business days total + +### Submission + +Submit tickets through the standard OIM change management process. See [Change Management Documentation](../../general/operations/change-management.md) for submission instructions and contact information. + +## Related Documentation + +- [Getting Started](getting-started.md) - Initial backend setup +- [Local Setup](local-setup.md) - Detailed configuration +- [Testing Guide](testing-guide.md) - Running tests +- [Change Management](../../general/operations/change-management.md) - OIM ticket process +- [Database Optimisation](database-optimisation.md) - Database management +- [Operations Documentation](../../general/operations/) - Troubleshooting and monitoring diff --git a/documentation/backend/development/testing-guide.md b/documentation/backend/development/testing-guide.md index 2a0d22522..9ae40ef68 100644 --- a/documentation/backend/development/testing-guide.md +++ b/documentation/backend/development/testing-guide.md @@ -439,6 +439,3 @@ def test_project_update(): - ADR-005: pytest Testing Framework --- - -**Created**: 2024-02-07 -**Purpose**: Comprehensive testing guide for SPMS backend diff --git a/documentation/frontend/development/README.md b/documentation/frontend/development/README.md index 3553f316e..e1dfd97df 100644 --- a/documentation/frontend/development/README.md +++ b/documentation/frontend/development/README.md @@ -14,6 +14,7 @@ This section contains guides and standards for developing the Science Projects M - [Code Style](./code-style.md) - TypeScript standards and ESLint configuration - [Testing Guide](./testing-guide.md) - Testing philosophy and implementation +- [Accessibility](./accessibility.md) - WCAG 2.2 AA compliance and accessible development - [Feature Development](./feature-development.md) - Feature development workflow - [Pre-commit Hooks](./pre-commit.md) - Pre-commit checks and validation @@ -21,9 +22,10 @@ This section contains guides and standards for developing the Science Projects M 1. **Setup**: Follow the [Getting Started](./getting-started.md) guide 2. **Code Style**: Adhere to [Code Style](./code-style.md) standards -3. **Testing**: Write tests following the [Testing Guide](./testing-guide.md) -4. **Feature Development**: Use the [Feature Development](./feature-development.md) workflow -5. **Pre-commit**: Ensure [Pre-commit Hooks](./pre-commit.md) pass before committing +3. **Accessibility**: Ensure [Accessibility](./accessibility.md) compliance (WCAG 2.2 AA) +4. **Testing**: Write tests following the [Testing Guide](./testing-guide.md) +5. **Feature Development**: Use the [Feature Development](./feature-development.md) workflow +6. **Pre-commit**: Ensure [Pre-commit Hooks](./pre-commit.md) pass before committing ## Quick Reference @@ -41,6 +43,12 @@ bun run test bun run test:watch bun run test:coverage +# Run accessibility tests +bun run test --run "a11y" + +# Run accessibility scanner +node scripts/accessibility/scanner.js src/pages/MyPage.tsx + # Lint and format bun run lint bun run format diff --git a/documentation/frontend/development/accessibility.md b/documentation/frontend/development/accessibility.md new file mode 100644 index 000000000..084012730 --- /dev/null +++ b/documentation/frontend/development/accessibility.md @@ -0,0 +1,514 @@ +# Accessibility Development Guide + +## Overview + +This guide provides practical guidance for developing accessible features in the Science Projects Management System. We target WCAG 2.2 Level AA compliance as our minimum standard. + +## Target Compliance + +**WCAG 2.2 Level AA** - This is our minimum accessibility standard. All new features and components must meet this level. + +## Quick Start + +### Before You Code + +1. **Use semantic HTML** - Buttons for actions, links for navigation +2. **Add labels to inputs** - Every form field needs a label +3. **Provide alt text** - Describe images meaningfully +4. **Test with keyboard** - Tab through your feature +5. **Check colour contrast** - Use DevTools colour picker + +### Testing Your Work + +```bash +# Run accessibility tests +bun run test --run "a11y" + +# Run scanner on your file +node scripts/accessibility/scanner.js src/pages/MyPage.tsx + +# Run all tests with coverage +bun run test:coverage +``` + +## Core Principles + +### 1. Semantic HTML First + +Use HTML elements that describe what the content IS, not what it looks like. + +**Good**: +```tsx + + +View Projects +``` + +**Bad**: +```tsx +