Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
76 commits
Select commit Hold shift + click to select a range
7797172
refactor(worker): restructure monolithic jobs.py into modular archite…
bencap Jan 7, 2026
340b055
feat: Add comprehensive job traceability system database schema
bencap Jan 7, 2026
fd35ac4
fix(logging): simplify context saving logic to overwrite existing map…
bencap Jan 7, 2026
7ca0c9f
tests: add TransactionSpy class for mocking database transaction meth…
bencap Jan 12, 2026
314a469
feat: add BaseManager class with transaction handling and rollback fe…
bencap Jan 12, 2026
4d6b7ad
feat: Job manager class, supporting utilities, and unit tests
bencap Jan 12, 2026
4372a31
feat: Pipeline manager class, supporting utilities, and unit tests
bencap Jan 14, 2026
c6f72bb
feat: add function to check if job dependencies are reachable
bencap Jan 16, 2026
d77cf68
feat: add markers for test categorization in pytest
bencap Jan 16, 2026
7548bbf
fix: mock job manager returning in fixture rather than yielding
bencap Jan 17, 2026
cd2fab5
fix: enhance error logging for job and pipeline state transitions
bencap Jan 17, 2026
7ee3ce1
fix: re-order imports in job manager test file
bencap Jan 17, 2026
7ec5c40
fix: use conftest_optional import structure in worker test module
bencap Jan 17, 2026
749c512
feat: Add decorators for job and pipeline management
bencap Jan 20, 2026
d28279d
feat: use context for logging in job manager
bencap Jan 20, 2026
0fba014
feat: decorator for job run record guarantees
bencap Jan 21, 2026
603da5b
feat: add test mode support to job and pipeline decorators
bencap Jan 21, 2026
eb6aa64
fix: simplify exc handling in job management decorator
bencap Jan 21, 2026
9a9f77f
feat: allow pipelines to be started by decorated jobs
bencap Jan 22, 2026
a8655ab
tests: unit tests for worker manager utilities
bencap Jan 22, 2026
b9c2ad7
feat: add network test marker and control socket access in pytest
bencap Jan 22, 2026
2e7da03
Refactor test setup by replacing `setup_worker_db` with `with_populat…
bencap Jan 22, 2026
a884b60
wip: refactor jobs to use job management system
bencap Jan 22, 2026
5b227d0
refactor: reduce mocking of database across worker tests
bencap Jan 23, 2026
089e18f
refactor: simplify job definition in job management tests
bencap Jan 23, 2026
08d0c06
refactor: simplify job definition in job management tests
bencap Jan 23, 2026
ba2ff23
refactor: centralize decorator test mode flag fixture
bencap Jan 23, 2026
e24b1dd
feat: enhance pipeline start logic with controllable coordination
bencap Jan 24, 2026
a048272
feat: logic fixups and comprehensive test cases for variant processin…
bencap Jan 24, 2026
bea7c54
feat: add start_pipeline job and related tests for pipeline management
bencap Jan 24, 2026
f33d4e6
feat: gnomAD managed job tests and enhancements
bencap Jan 25, 2026
b671207
feat: uniprot managed job tests and enhancements
bencap Jan 27, 2026
ca61ceb
feat: clingen managed job enhancements
bencap Jan 28, 2026
8131ea8
fixup(variant creation)
bencap Jan 28, 2026
a235a4e
feat: implement job and pipeline factories with definitions and tests
bencap Jan 28, 2026
3d26a7c
feat: integrate PipelineFactory for variant creation and update proce…
bencap Jan 28, 2026
b6e0c83
feat: add context manager for database session management
bencap Jan 28, 2026
92b8c57
feat: use session context manager in worker decorators rather than in…
bencap Jan 28, 2026
344b50f
refactor: streamline context handling in job and pipeline decorators
bencap Jan 28, 2026
36b3915
feat: add new job definitions for score set annotation pipeline
bencap Jan 29, 2026
eca6747
feat: implement AnnotationStatusManager for managing variant annotati…
bencap Jan 29, 2026
fa4c663
feat: add annotation status tracking to jobs
bencap Jan 29, 2026
2aeda22
feat: streamline job results and exception handling in tests
bencap Jan 29, 2026
54043c3
feat: less prescriptive status messages in complete job functions
bencap Jan 29, 2026
ad25a5f
fix: ensure exception info is always present for failed jobs in job m…
bencap Jan 29, 2026
1273b74
fix: move Athena engine fixture to optional conftest for core depende…
bencap Jan 29, 2026
c250dc9
feat: add standalone context creation for worker lifecycle management
bencap Jan 29, 2026
e4c8d7b
feat: add asyncclick dependency and update environment script to use it
bencap Jan 29, 2026
942d2ce
feat: add standalone job definitions and update lifecycle context for…
bencap Jan 29, 2026
072d569
feat: refactor populate_mapped_variant_data to use async and job subm…
bencap Jan 29, 2026
e50a34b
chore: test cleanup
bencap Jan 29, 2026
8efce81
fix: remove ga4gh packages from server group
bencap Jan 23, 2026
7b403ad
docs: minimal developer docs via copilot for worker jobs
bencap Jan 29, 2026
aeb5c08
fix: mypy typing
bencap Jan 29, 2026
20a4e24
fix: test attempting to connect via socket to athena
bencap Jan 29, 2026
29f9c35
feat: add Slack error notifications to job/pipeline decorators
bencap Jan 29, 2026
642a64b
fix: update TODO comments for clarity and specificity in UniProt and …
bencap Jan 29, 2026
9e10bc5
feat: make Redis client optional in managers and add error handling f…
bencap Jan 29, 2026
c3e90db
feat: implement create_job_dependency method in JobFactory with valid…
bencap Jan 29, 2026
1fb23ad
feat: refactor UniProt ID mapping script to use async commands and jo…
bencap Jan 29, 2026
1870eeb
feat: refactor link_gnomad_variants script to use async commands and …
bencap Jan 30, 2026
135f278
feat: refactor clingen_car_submission script to use async commands an…
bencap Jan 30, 2026
d153744
feat: refactor clingen_ldh_submission script to streamline job submis…
bencap Jan 30, 2026
5ee162b
feat: clinvar clinical control refresh job + script
bencap Jan 30, 2026
06f77e7
feat: update annotation type handling to use enum directly and switch…
bencap Feb 4, 2026
bba9e3b
feat: add functions to retrieve associated ClinVar Allele IDs and enh…
bencap Feb 4, 2026
3097942
refactor: remove redundant fixture for setting up sample variants in …
bencap Feb 4, 2026
d37e7e6
chore: add TODO for caching ClinVar control data to improve performance
bencap Feb 4, 2026
d915035
feat: add multiple refresh job definitions for ClinVar controls with …
bencap Feb 4, 2026
33be31f
feat: enhance test workflow to run fast tests on pull requests and fu…
bencap Feb 4, 2026
7614c36
chore: remove deprecated pkg_resources and replace w stdlib. Bump pan…
bencap Feb 13, 2026
93e8519
chore: lock deps
bencap Feb 17, 2026
1198954
feat: add Redis caching for ClinGen API requests to reduce redundant …
bencap Feb 17, 2026
1fb9fdd
feat: add commit option to job progress and status update methods for…
bencap Feb 17, 2026
ae09840
feat: implement stalled job cleanup with unified retry handling
bencap Feb 17, 2026
f120ed5
fix: correct type annotations in cleanup.py
bencap Feb 17, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 26 additions & 5 deletions .github/workflows/run-tests-on-push.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
name: Run Tests (On Push)
name: Run Tests
on:
push:
# Run all tests on main, fast tests on other branches

env:
LOG_CONFIG: test
Expand Down Expand Up @@ -50,7 +51,12 @@ jobs:
- run: pip install --upgrade pip
- run: pip install poetry
- run: poetry install --with dev
- run: poetry run pytest tests/
- name: Run fast tests on non-main branches
if: github.event_name == 'push' && github.ref != 'refs/heads/main'
run: poetry run pytest tests/ -m "not network and not slow"
- name: Run full tests on main
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
run: poetry run pytest tests/

run-tests-3_11:
runs-on: ubuntu-latest
Expand All @@ -66,7 +72,12 @@ jobs:
- run: pip install --upgrade pip
- run: pip install poetry
- run: poetry install --with dev --extras server
- run: poetry run pytest tests/ --show-capture=stdout --cov=src
- name: Run fast tests on non-main branches
if: github.ref != 'refs/heads/main'
run: poetry run pytest tests/ -m "not network and not slow" --show-capture=stdout
- name: Run all tests with coverage on main branch
if: github.ref == 'refs/heads/main'
run: poetry run pytest tests/ --show-capture=stdout --cov=src

run-tests-3_12-core-dependencies:
runs-on: ubuntu-latest
Expand All @@ -80,7 +91,12 @@ jobs:
- run: pip install --upgrade pip
- run: pip install poetry
- run: poetry install --with dev
- run: poetry run pytest tests/
- name: Run fast tests on non-main branches
if: github.ref != 'refs/heads/main'
run: poetry run pytest tests/ -m "not network and not slow"
- name: Run all tests on main branch
if: github.ref == 'refs/heads/main'
run: poetry run pytest tests/

run-tests-3_12:
runs-on: ubuntu-latest
Expand All @@ -96,4 +112,9 @@ jobs:
- run: pip install --upgrade pip
- run: pip install poetry
- run: poetry install --with dev --extras server
- run: poetry run pytest tests/ --show-capture=stdout --cov=src
- name: Run fast tests on non-main branches
if: github.ref != 'refs/heads/main'
run: poetry run pytest tests/ -m "not network and not slow" --show-capture=stdout
- name: Run all tests with coverage on main branch
if: github.ref == 'refs/heads/main'
run: poetry run pytest tests/ --show-capture=stdout --cov=src
222 changes: 222 additions & 0 deletions alembic/versions/8de33cc35cd7_add_pipeline_and_job_tracking_tables.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,222 @@
"""add pipeline and job tracking tables

Revision ID: 8de33cc35cd7
Revises: dcf8572d3a17
Create Date: 2026-01-28 10:08:36.906494

"""

import sqlalchemy as sa
from sqlalchemy.dialects import postgresql

from alembic import op

# revision identifiers, used by Alembic.
revision = "8de33cc35cd7"
down_revision = "dcf8572d3a17"
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"pipelines",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("urn", sa.String(length=255), nullable=True),
sa.Column("name", sa.String(length=500), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("status", sa.String(length=50), nullable=False),
sa.Column("correlation_id", sa.String(length=255), nullable=True),
sa.Column(
"metadata",
postgresql.JSONB(astext_type=sa.Text()),
server_default="{}",
nullable=False,
comment="Flexible metadata storage for pipeline-specific data",
),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("started_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("finished_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("created_by_user_id", sa.Integer(), nullable=True),
sa.Column("mavedb_version", sa.String(length=50), nullable=True),
sa.CheckConstraint(
"status IN ('created', 'running', 'succeeded', 'failed', 'cancelled', 'paused', 'partial')",
name="ck_pipelines_status_valid",
),
sa.ForeignKeyConstraint(["created_by_user_id"], ["users.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("urn"),
)
op.create_index("ix_pipelines_correlation_id", "pipelines", ["correlation_id"], unique=False)
op.create_index("ix_pipelines_created_at", "pipelines", ["created_at"], unique=False)
op.create_index("ix_pipelines_created_by_user_id", "pipelines", ["created_by_user_id"], unique=False)
op.create_index("ix_pipelines_status", "pipelines", ["status"], unique=False)
op.create_table(
"job_runs",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("urn", sa.String(length=255), nullable=True),
sa.Column("job_type", sa.String(length=100), nullable=False),
sa.Column("job_function", sa.String(length=255), nullable=False),
sa.Column("job_params", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column("status", sa.String(length=50), nullable=False),
sa.Column("pipeline_id", sa.Integer(), nullable=True),
sa.Column("priority", sa.Integer(), nullable=False),
sa.Column("max_retries", sa.Integer(), nullable=False),
sa.Column("retry_count", sa.Integer(), nullable=False),
sa.Column("retry_delay_seconds", sa.Integer(), nullable=True),
sa.Column("scheduled_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("started_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("finished_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column("error_traceback", sa.Text(), nullable=True),
sa.Column("failure_category", sa.String(length=100), nullable=True),
sa.Column("progress_current", sa.Integer(), nullable=True),
sa.Column("progress_total", sa.Integer(), nullable=True),
sa.Column("progress_message", sa.String(length=500), nullable=True),
sa.Column("correlation_id", sa.String(length=255), nullable=True),
sa.Column("metadata", postgresql.JSONB(astext_type=sa.Text()), server_default="{}", nullable=False),
sa.Column("mavedb_version", sa.String(length=50), nullable=True),
sa.CheckConstraint(
"status IN ('pending', 'queued', 'running', 'succeeded', 'failed', 'cancelled', 'skipped')",
name="ck_job_runs_status_valid",
),
sa.CheckConstraint("max_retries >= 0", name="ck_job_runs_max_retries_positive"),
sa.CheckConstraint("priority >= 0", name="ck_job_runs_priority_positive"),
sa.CheckConstraint("retry_count >= 0", name="ck_job_runs_retry_count_positive"),
sa.ForeignKeyConstraint(["pipeline_id"], ["pipelines.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("urn"),
)
op.create_index("ix_job_runs_correlation_id", "job_runs", ["correlation_id"], unique=False)
op.create_index("ix_job_runs_created_at", "job_runs", ["created_at"], unique=False)
op.create_index("ix_job_runs_job_type", "job_runs", ["job_type"], unique=False)
op.create_index("ix_job_runs_pipeline_id", "job_runs", ["pipeline_id"], unique=False)
op.create_index("ix_job_runs_scheduled_at", "job_runs", ["scheduled_at"], unique=False)
op.create_index("ix_job_runs_status", "job_runs", ["status"], unique=False)
op.create_index("ix_job_runs_status_scheduled", "job_runs", ["status", "scheduled_at"], unique=False)
op.create_table(
"job_dependencies",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("depends_on_job_id", sa.Integer(), nullable=False),
sa.Column("dependency_type", sa.String(length=50), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("metadata", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.CheckConstraint(
"dependency_type IS NULL OR dependency_type IN ('success_required', 'completion_required')",
name="ck_job_dependencies_type_valid",
),
sa.ForeignKeyConstraint(["depends_on_job_id"], ["job_runs.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["id"], ["job_runs.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id", "depends_on_job_id"),
)
op.create_index("ix_job_dependencies_created_at", "job_dependencies", ["created_at"], unique=False)
op.create_index("ix_job_dependencies_depends_on_job_id", "job_dependencies", ["depends_on_job_id"], unique=False)
op.create_table(
"variant_annotation_status",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("variant_id", sa.Integer(), nullable=False),
sa.Column(
"annotation_type",
sa.String(length=50),
nullable=False,
comment="Type of annotation: vrs, clinvar, gnomad, etc.",
),
sa.Column(
"version",
sa.String(length=50),
nullable=True,
comment="Version of the annotation source used (if applicable)",
),
sa.Column("status", sa.String(length=50), nullable=False, comment="success, failed, skipped, pending"),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column("failure_category", sa.String(length=100), nullable=True),
sa.Column(
"success_data",
postgresql.JSONB(astext_type=sa.Text()),
nullable=True,
comment="Annotation results when successful",
),
sa.Column(
"current",
sa.Boolean(),
server_default="true",
nullable=False,
comment="Whether this is the current status for the variant and annotation type",
),
sa.Column("job_run_id", sa.Integer(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.CheckConstraint(
"annotation_type IN ('vrs_mapping', 'clingen_allele_id', 'mapped_hgvs', 'variant_translation', 'gnomad_allele_frequency', 'clinvar_control', 'vep_functional_consequence', 'ldh_submission')",
name="ck_variant_annotation_type_valid",
),
sa.CheckConstraint("status IN ('success', 'failed', 'skipped')", name="ck_variant_annotation_status_valid"),
sa.ForeignKeyConstraint(["job_run_id"], ["job_runs.id"], ondelete="SET NULL"),
sa.ForeignKeyConstraint(["variant_id"], ["variants.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
"ix_variant_annotation_status_annotation_type", "variant_annotation_status", ["annotation_type"], unique=False
)
op.create_index(
"ix_variant_annotation_status_created_at", "variant_annotation_status", ["created_at"], unique=False
)
op.create_index("ix_variant_annotation_status_current", "variant_annotation_status", ["current"], unique=False)
op.create_index(
"ix_variant_annotation_status_job_run_id", "variant_annotation_status", ["job_run_id"], unique=False
)
op.create_index("ix_variant_annotation_status_status", "variant_annotation_status", ["status"], unique=False)
op.create_index(
"ix_variant_annotation_status_variant_id", "variant_annotation_status", ["variant_id"], unique=False
)
op.create_index(
"ix_variant_annotation_status_variant_type_version_current",
"variant_annotation_status",
["variant_id", "annotation_type", "version", "current"],
unique=False,
)
op.create_index("ix_variant_annotation_status_version", "variant_annotation_status", ["version"], unique=False)
op.create_index(
"ix_variant_annotation_type_status", "variant_annotation_status", ["annotation_type", "status"], unique=False
)
op.create_index(
"ix_variant_annotation_variant_type_status",
"variant_annotation_status",
["variant_id", "annotation_type", "status"],
unique=False,
)
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index("ix_variant_annotation_variant_type_status", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_type_status", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_version", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_variant_type_version_current", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_variant_id", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_status", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_job_run_id", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_current", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_created_at", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_annotation_type", table_name="variant_annotation_status")
op.drop_table("variant_annotation_status")
op.drop_index("ix_job_dependencies_depends_on_job_id", table_name="job_dependencies")
op.drop_index("ix_job_dependencies_created_at", table_name="job_dependencies")
op.drop_table("job_dependencies")
op.drop_index("ix_job_runs_status_scheduled", table_name="job_runs")
op.drop_index("ix_job_runs_status", table_name="job_runs")
op.drop_index("ix_job_runs_scheduled_at", table_name="job_runs")
op.drop_index("ix_job_runs_pipeline_id", table_name="job_runs")
op.drop_index("ix_job_runs_job_type", table_name="job_runs")
op.drop_index("ix_job_runs_created_at", table_name="job_runs")
op.drop_index("ix_job_runs_correlation_id", table_name="job_runs")
op.drop_table("job_runs")
op.drop_index("ix_pipelines_status", table_name="pipelines")
op.drop_index("ix_pipelines_created_by_user_id", table_name="pipelines")
op.drop_index("ix_pipelines_created_at", table_name="pipelines")
op.drop_index("ix_pipelines_correlation_id", table_name="pipelines")
op.drop_table("pipelines")
# ### end Alembic commands ###
4 changes: 4 additions & 0 deletions bin/localstack-init.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/sh
echo "Initializing local S3 bucket..."
awslocal s3 mb s3://score-set-csv-uploads-dev
echo "S3 bucket 'score-set-csv-uploads-dev' created."
13 changes: 13 additions & 0 deletions docker-compose-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,18 @@ services:
volumes:
- mavedb-redis-dev:/data

localstack:
image: localstack/localstack:latest
ports:
- "4566:4566"
env_file:
- settings/.env.dev
environment:
- SERVICES=s3:4566 # We only need S3 for MaveDB
volumes:
- mavedb-localstack-dev:/var/lib/localstack
- "./bin/localstack-init.sh:/etc/localstack/init/ready.d/localstack-init.sh"

seqrepo:
image: biocommons/seqrepo:2024-12-20
volumes:
Expand All @@ -104,3 +116,4 @@ volumes:
mavedb-data-dev:
mavedb-redis-dev:
mavedb-seqrepo-dev:
mavedb-localstack-dev:
53 changes: 53 additions & 0 deletions mypy_stubs/aiocache/__init__.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
"""Type stubs for aiocache library.

Provides type hints for the aiocache caching library functionality used in MaveDB.
"""

from typing import Any, Awaitable, Callable, Optional, Type, TypeVar, Union

from .base import BaseCache

# Type variables for decorator
F = TypeVar("F", bound=Callable[..., Awaitable[Any]])
T = TypeVar("T")

class Cache:
"""Cache factory class for creating cache instances."""

# Cache backend constants
REDIS: Type[BaseCache]
MEMORY: Type[BaseCache]

def __init__(
self,
cache_class: Type[BaseCache],
*,
endpoint: Optional[str] = None,
port: Optional[int] = None,
ssl: bool = False,
namespace: Optional[str] = None,
serializer: Optional[Any] = None,
plugins: Optional[Any] = None,
**kwargs: Any,
) -> None: ...
async def get(self, key: str) -> Any: ...
async def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool: ...
async def delete(self, key: str) -> bool: ...
async def clear(self, namespace: Optional[str] = None) -> bool: ...
async def close(self) -> None: ...

def cached(
ttl: Optional[int] = None,
key: Optional[str] = None,
key_builder: Optional[Callable[..., str]] = None,
cache: Union[Type[BaseCache], BaseCache, None] = None,
serializer: Optional[Any] = None,
plugins: Optional[Any] = None,
alias: Optional[str] = None,
namespace: Optional[str] = None,
noself: bool = False,
skip_cache_func: Optional[Callable[[Any], bool]] = None,
**kwargs: Any,
) -> Callable[[F], F]: ...

__all__ = ["Cache", "cached"]
25 changes: 25 additions & 0 deletions mypy_stubs/aiocache/base.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
"""Type stubs for aiocache.base module.

Provides type hints for the base cache class used by aiocache backends.
"""

from typing import Any, Optional

class BaseCache:
"""Base class for cache backends."""

def __init__(
self,
*,
namespace: Optional[str] = None,
serializer: Optional[Any] = None,
plugins: Optional[Any] = None,
**kwargs: Any,
) -> None: ...
async def get(self, key: str) -> Any: ...
async def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool: ...
async def delete(self, key: str) -> bool: ...
async def clear(self, namespace: Optional[str] = None) -> bool: ...
async def close(self) -> None: ...

__all__ = ["BaseCache"]
Loading