diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 06d692f..4607891 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -29,8 +29,8 @@ jobs: run: uv sync --all-packages --frozen - name: Ruff run: uv run ruff format --check . && uv run ruff check . - - name: Pyright - run: uv run pyright . + - name: Ty + run: uv run ty check tests: name: Run tests diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a3d2bda..ac6fd2c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,11 +1,11 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v5.0.0 + rev: v6.0.0 hooks: - id: check-yaml - id: end-of-file-fixer - repo: https://github.com/tsvikas/sync-with-uv - rev: v0.4.0 + rev: v0.5.0 hooks: - id: sync-with-uv - repo: https://github.com/charliermarsh/ruff-pre-commit @@ -14,7 +14,12 @@ repos: - id: ruff-check args: [--fix, --exit-non-zero-on-fix] - id: ruff-format - - repo: https://github.com/RobertCraigie/pyright-python - rev: v1.1.400 + - repo: local hooks: - - id: pyright + - id: ty-check + name: ty-check + language: python + entry: ty check + pass_filenames: false + args: [--python=.venv/] + additional_dependencies: [ty] diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 5553cd3..d2ea44e 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -1,5 +1,6 @@ { "recommendations": [ + "astral-sh.ty", "ms-python.python", "ms-python.vscode-pylance", "charliermarsh.ruff", diff --git a/.vscode/settings.json b/.vscode/settings.json index 0e990f5..3a37348 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -12,6 +12,7 @@ } }, "files.insertFinalNewline": true, + "python.languageServer": "None", "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, "python.analysis.autoImportCompletions": true, diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1db46a5..ec5195e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -42,7 +42,7 @@ uv run --package tilebox-datasets pytest tilebox-datasets uv run ruff format . && uv run ruff check --fix . # type checking: -uv run pyright . +uv run ty check ``` ### Adding dependencies to one of the packages @@ -54,8 +54,8 @@ uv add --package tilebox-datasets "numpy>=2" ### Used code quality tools - [ruff](https://github.com/astral-sh/ruff) for linting and formatting -- [pyright](https://github.com/microsoft/pyright) for type checking -- [pre-commit](https://pre-commit.com/) for running all of the above automatically on each git commit +- [ty](github.com/astral-sh/ty) for type checking +- [prek](https://prek.j178.dev/) for running all of the above automatically on each git commit ## Protobuf usage diff --git a/pyproject.toml b/pyproject.toml index b3d8dff..13b7e62 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,11 +24,10 @@ dev = [ "pyarrow>=17.0.0", # some dev tooling "ruff>=0.11.10", - # pyright 1.1.401 reports many wrong false positives, let's wait until that is fixed before upgrading - "pyright>=1.1.379,<1.1.401", - "pre-commit>=3.8.0", "types-protobuf>=6.30", "junitparser>=3.2.0", + "ty>=0.0.11", + "prek>=0.2.27", ] [project.scripts] @@ -112,33 +111,10 @@ known-first-party = ["tilebox", "_tilebox"] [tool.ruff.lint.per-file-ignores] "*/tests/*" = ["INP001", "SLF001"] -[tool.pyright] +[tool.ty.src] exclude = [ - "**/.ipynb_checkpoints", - "**/__pycache__", - ".venv", - "tilebox-datasets/tests/example_dataset/*", # auto-generated code - "tilebox-workflows/tests/proto/*", # auto-generated code + # auto-generated code + "**/*_pb2.py", + "**/*_pb2.pyi", + "**/*pb2_grpc.py" ] - -# ignore warnings in those files, but still type check them when used as a dependency in other files -ignore = [ - # it's auto generated - "**/datasets/v1", - "**/workflows/v1", - "**/tilebox/v1", - "**/buf/validate", -] - -# pyright needs to have all the dependencies installed to be able to type check -# we can make sure of this by telling it to use the uv venv -venvPath = "." -venv = ".venv" -extraPaths = [ - "tilebox-datasets", - "tilebox-grpc", - "tilebox-storage", - "tilebox-workflows", -] - -reportPrivateImportUsage = false diff --git a/tilebox-datasets/tests/data/datapoint.py b/tilebox-datasets/tests/data/datapoint.py index d811e81..be8f3d4 100644 --- a/tilebox-datasets/tests/data/datapoint.py +++ b/tilebox-datasets/tests/data/datapoint.py @@ -62,7 +62,7 @@ def example_datapoints(draw: DrawFn, generated_fields: bool = False, missing_fie some_time=draw(datetime_messages() | maybe_none), some_duration=draw(duration_messages() | maybe_none), some_bytes=draw(binary(min_size=1, max_size=10) | maybe_none), - some_bool=draw(booleans() | maybe_none), # type: ignore[arg-type] + some_bool=draw(booleans() | maybe_none), # well-known types some_identifier=draw(uuid_messages() | maybe_none), some_vec3=draw(vec3_messages() | maybe_none), diff --git a/tilebox-datasets/tilebox/datasets/message_pool.py b/tilebox-datasets/tilebox/datasets/message_pool.py index f740b5f..bfd5ffd 100644 --- a/tilebox-datasets/tilebox/datasets/message_pool.py +++ b/tilebox-datasets/tilebox/datasets/message_pool.py @@ -1,5 +1,6 @@ from google.protobuf import descriptor_pb2, duration_pb2, timestamp_pb2 from google.protobuf.descriptor_pool import Default +from google.protobuf.message import Message from google.protobuf.message_factory import GetMessageClass, GetMessages from tilebox.datasets.data.datasets import AnnotatedType @@ -25,5 +26,5 @@ def register_message_types(descriptor_set: descriptor_pb2.FileDescriptorSet) -> GetMessages(descriptor_set.file, pool=Default()) -def get_message_type(type_url: str) -> type: +def get_message_type(type_url: str) -> type[Message]: return GetMessageClass(Default().FindMessageTypeByName(type_url)) diff --git a/tilebox-datasets/tilebox/datasets/progress.py b/tilebox-datasets/tilebox/datasets/progress.py index 4e45189..35fc864 100644 --- a/tilebox-datasets/tilebox/datasets/progress.py +++ b/tilebox-datasets/tilebox/datasets/progress.py @@ -62,7 +62,8 @@ def _calc_progress_seconds(self, time: datetime) -> int: def set_progress(self, time: datetime) -> None: """Set the progress of the progress bar to the given time""" - done = min(self._calc_progress_seconds(time), self._progress_bar.total) + total = self._calc_progress_seconds(self._interval.end) + done = min(self._calc_progress_seconds(time), total) self._progress_bar.update(done - self._progress_bar.n) def set_download_info(self, datapoints: int, byte_size: int, download_time: float) -> None: @@ -79,7 +80,8 @@ def __exit__( ) -> None: try: if traceback is None: - self._progress_bar.update(self._progress_bar.total - self._progress_bar.n) # set to 100% + total = self._calc_progress_seconds(self._interval.end) + self._progress_bar.update(total - self._progress_bar.n) # set to 100% self._progress_bar.close() # mark as completed or failed except AttributeError: diff --git a/tilebox-datasets/tilebox/datasets/protobuf_conversion/field_types.py b/tilebox-datasets/tilebox/datasets/protobuf_conversion/field_types.py index 1e37bc2..052452f 100644 --- a/tilebox-datasets/tilebox/datasets/protobuf_conversion/field_types.py +++ b/tilebox-datasets/tilebox/datasets/protobuf_conversion/field_types.py @@ -1,4 +1,4 @@ -from collections.abc import Sized +from collections.abc import Sequence from datetime import timedelta from typing import Any from uuid import UUID @@ -16,9 +16,10 @@ from tilebox.datasets.datasets.v1.well_known_types_pb2 import UUID as UUIDMessage # noqa: N811 from tilebox.datasets.datasets.v1.well_known_types_pb2 import Geometry, LatLon, LatLonAlt, Quaternion, Vec3 -ProtoFieldValue = Message | float | str | bool | bytes | Sized | None +ScalarProtoFieldValue = Message | float | str | bool | bytes +ProtoFieldValue = ScalarProtoFieldValue | Sequence[ScalarProtoFieldValue] | None -_FILL_VALUES_BY_DTYPE = { +_FILL_VALUES_BY_DTYPE: dict[type[np.dtype[Any]], Any] = { npdtypes.Int8DType: np.int8(0), npdtypes.Int16DType: np.int16(0), npdtypes.Int32DType: np.int32(0), diff --git a/tilebox-datasets/tilebox/datasets/protobuf_conversion/protobuf_xarray.py b/tilebox-datasets/tilebox/datasets/protobuf_conversion/protobuf_xarray.py index 72bf361..62c2a13 100644 --- a/tilebox-datasets/tilebox/datasets/protobuf_conversion/protobuf_xarray.py +++ b/tilebox-datasets/tilebox/datasets/protobuf_conversion/protobuf_xarray.py @@ -3,7 +3,7 @@ """ import contextlib -from collections.abc import Sized +from collections.abc import Sequence from typing import Any, TypeVar import numpy as np @@ -231,10 +231,10 @@ def resize(self, buffer_size: int) -> None: elif buffer_size > len(self._data): # resize the data buffer to the new capacity, by just padding it with zeros at the end missing = buffer_size - len(self._data) - self._data = np.pad( + self._data = np.pad( # ty: ignore[no-matching-overload] self._data, ((0, missing), (0, 0)), - constant_values=self._type.fill_value, # type: ignore[arg-type] + constant_values=self._type.fill_value, ) @@ -258,13 +258,13 @@ def __init__( self._array_dim: int | None = None def __call__(self, index: int, value: ProtoFieldValue) -> None: - if not isinstance(value, Sized): + if not isinstance(value, Sequence): raise TypeError(f"Expected array field but got {type(value)}") if self._array_dim is None or len(value) > self._array_dim: self._resize_array_dim(len(value)) - for i, v in enumerate(value): # type: ignore[arg-type] # somehow the isinstance(value, Sized) isn't used here + for i, v in enumerate(value): # somehow the isinstance(value, Sized) isn't used here self._data[index, i, :] = self._type.from_proto(v) def finalize( @@ -309,10 +309,10 @@ def _resize(self) -> None: else: # resize the data buffer to the new capacity, by just padding it with zeros at the end missing_capacity = self._capacity - self._data.shape[0] missing_array_dim = self._array_dim - self._data.shape[1] - self._data = np.pad( + self._data = np.pad( # ty: ignore[no-matching-overload] self._data, ((0, missing_capacity), (0, missing_array_dim), (0, 0)), - constant_values=self._type.fill_value, # type: ignore[arg-type] + constant_values=self._type.fill_value, ) @@ -374,13 +374,13 @@ def _create_field_converter(field: FieldDescriptor) -> _FieldConverter: """ # special handling for enums: if field.type == FieldDescriptor.TYPE_ENUM: - if field.is_repeated: # type: ignore[attr-defined] + if field.is_repeated: raise NotImplementedError("Repeated enum fields are not supported") return _EnumFieldConverter(field.name, enum_mapping_from_field_descriptor(field)) field_type = infer_field_type(field) - if field.is_repeated: # type: ignore[attr-defined] + if field.is_repeated: return _ArrayFieldConverter(field.name, field_type) return _SimpleFieldConverter(field.name, field_type) diff --git a/tilebox-datasets/tilebox/datasets/protobuf_conversion/to_protobuf.py b/tilebox-datasets/tilebox/datasets/protobuf_conversion/to_protobuf.py index 6405505..4d6b43e 100644 --- a/tilebox-datasets/tilebox/datasets/protobuf_conversion/to_protobuf.py +++ b/tilebox-datasets/tilebox/datasets/protobuf_conversion/to_protobuf.py @@ -20,7 +20,7 @@ def to_messages( # noqa: C901, PLR0912 data: IngestionData, - message_type: type, + message_type: type[Message], required_fields: list[str] | None = None, ignore_fields: list[str] | None = None, ) -> list[Message]: @@ -44,9 +44,9 @@ def to_messages( # noqa: C901, PLR0912 # let's validate our fields, to make sure that they are all known fields for the given protobuf message # and that they are all lists of the same length field_lengths = defaultdict(list) - fields: dict[str, pd.Series | np.ndarray] = {} + fields: dict[str, pd.Series | np.ndarray | list[ProtoFieldValue]] = {} - field_names = list(map(str, data)) + field_names = [str(field) for field in data] if isinstance(data, xr.Dataset): # list(dataset) only returns the variables, not the coords, so for xarray we need to add the coords as well # but not all coords, we only care abou time for now @@ -84,7 +84,7 @@ def to_messages( # noqa: C901, PLR0912 else: values = convert_values_to_proto(values, field_type, filter_none=False) - fields[field_name] = values # type: ignore[assignment] + fields[field_name] = values # now convert every datapoint to a protobuf message if len(field_lengths) == 0: # early return, no actual data to convert @@ -103,7 +103,7 @@ def marshal_messages(messages: list[Message]) -> list[bytes]: def columnar_to_row_based( - data: dict[str, pd.Series | np.ndarray], + data: dict[str, pd.Series | np.ndarray | list[ProtoFieldValue]], ) -> Iterator[dict[str, Any]]: if len(data) == 0: return @@ -126,12 +126,12 @@ def convert_values_to_proto( def convert_repeated_values_to_proto( values: np.ndarray | pd.Series | list[np.ndarray], field_type: ProtobufFieldType -) -> Any: +) -> list[ProtoFieldValue]: if isinstance(values, np.ndarray): # it was an xarray, with potentially padded fill values at the end values = trim_trailing_fill_values(values, field_type.fill_value) # since repeated fields can have different lengths between datapoints, we can filter out None values here - return [convert_values_to_proto(repeated_values, field_type, filter_none=True) for repeated_values in values] + return [convert_values_to_proto(repeated_values, field_type, filter_none=True) for repeated_values in values] # ty: ignore[invalid-return-type] def trim_trailing_fill_values(values: np.ndarray, fill_value: Any) -> list[np.ndarray]: diff --git a/tilebox-datasets/tilebox/datasets/query/id_interval.py b/tilebox-datasets/tilebox/datasets/query/id_interval.py index 628b2a3..ff85ac3 100644 --- a/tilebox-datasets/tilebox/datasets/query/id_interval.py +++ b/tilebox-datasets/tilebox/datasets/query/id_interval.py @@ -55,7 +55,8 @@ def parse(cls, arg: IDIntervalLike, start_exclusive: bool = False, end_inclusive case IDInterval(_, _, _, _): return arg case (UUID(), UUID()): - start, end = arg + start: UUID = arg[0] + end: UUID = arg[1] return IDInterval( start_id=start, end_id=end, @@ -63,10 +64,13 @@ def parse(cls, arg: IDIntervalLike, start_exclusive: bool = False, end_inclusive end_inclusive=end_inclusive, ) case (str(), str()): - start, end = arg + start: str = arg[0] + end: str = arg[1] return IDInterval( start_id=UUID(start), end_id=UUID(end), start_exclusive=start_exclusive, end_inclusive=end_inclusive, ) + + raise ValueError(f"Failed to convert {arg} ({type(arg)}) to IDInterval") diff --git a/tilebox-datasets/tilebox/datasets/service.py b/tilebox-datasets/tilebox/datasets/service.py index 2447a6d..e9ca4dd 100644 --- a/tilebox-datasets/tilebox/datasets/service.py +++ b/tilebox-datasets/tilebox/datasets/service.py @@ -271,7 +271,7 @@ def _client_info() -> ClientInfo: def _environment_info() -> str: python_version = sys.version.split(" ")[0] try: - shell = str(get_ipython()) # type: ignore[name-defined] + shell = str(get_ipython()) # ty: ignore[unresolved-reference] except NameError: return f"Python {python_version}" # Probably standard Python interpreter diff --git a/tilebox-grpc/_tilebox/grpc/error.py b/tilebox-grpc/_tilebox/grpc/error.py index dcc97e0..45e71f0 100644 --- a/tilebox-grpc/_tilebox/grpc/error.py +++ b/tilebox-grpc/_tilebox/grpc/error.py @@ -57,7 +57,7 @@ def with_pythonic_errors(stub: Stub, async_funcs: bool = False) -> Stub: wrap_func = _wrap_rpc if not async_funcs else _async_wrap_rpc for name, rpc in stub.__dict__.items(): if callable(rpc): - setattr(stub, name, wrap_func(rpc)) # type: ignore[assignment] + setattr(stub, name, wrap_func(rpc)) return stub diff --git a/tilebox-grpc/_tilebox/grpc/replay.py b/tilebox-grpc/_tilebox/grpc/replay.py index c79bebd..4be13c8 100644 --- a/tilebox-grpc/_tilebox/grpc/replay.py +++ b/tilebox-grpc/_tilebox/grpc/replay.py @@ -40,7 +40,7 @@ def open_recording_channel(url: str, auth_token: str | None, recording: str | Pa def open_replay_channel(recording: str | Path, assert_request_matches: bool = True) -> Channel: - return _ReplayChannel(recording, assert_request_matches) # type: ignore[return-value] + return _ReplayChannel(recording, assert_request_matches) # ty: ignore[invalid-return-type] # not a subclass, but same interface so works class _ConcreteValue(Future): @@ -87,7 +87,7 @@ def intercept_unary_unary( client_call_details: ClientCallDetails, request: RequestType, ) -> Future: - request_data = base64.b64encode(request.SerializeToString()) # type: ignore[attr-defined] + request_data = base64.b64encode(request.SerializeToString()) # ty: ignore[unresolved-attribute] with self.recording.open("ab") as file: method = client_call_details.method if isinstance(method, str): @@ -162,7 +162,7 @@ def unary_unary_call( if recorded_status != StatusCode.OK.value[0]: # the recorded call was an error, so raise it again code = _STATUS_CODES[recorded_status] - error = AioRpcError(code, None, None, recorded_response.decode()) # type: ignore[arg-type] + error = AioRpcError(code, None, None, recorded_response.decode()) # ty: ignore[invalid-argument-type] raise error return response_deserializer(base64.b64decode(recorded_response)) diff --git a/tilebox-storage/tilebox/storage/aio.py b/tilebox-storage/tilebox/storage/aio.py index 4fa7598..1ad4a9d 100644 --- a/tilebox-storage/tilebox/storage/aio.py +++ b/tilebox-storage/tilebox/storage/aio.py @@ -30,7 +30,7 @@ from tilebox.storage.providers import login try: - from IPython.display import HTML, Image, display # type: ignore[assignment] + from IPython.display import HTML, Image, display except ImportError: # IPython is not available, so we can't display the quicklook image # but let's define stubs for the type checker diff --git a/tilebox-workflows/tests/runner/test_runner.py b/tilebox-workflows/tests/runner/test_runner.py index df915e0..2e28100 100644 --- a/tilebox-workflows/tests/runner/test_runner.py +++ b/tilebox-workflows/tests/runner/test_runner.py @@ -25,7 +25,7 @@ class FibonacciTask(Task): n: int def execute(self, context: ExecutionContext) -> None: - cache: JobCache = context.job_cache # type: ignore[attr-defined] + cache: JobCache = context.job_cache # ty: ignore[unresolved-attribute] key = f"fib_{self.n}" if f"fib_{self.n}" in cache: # If the result is already in the cache, we can skip the calculation @@ -46,7 +46,7 @@ class SumResultTask(Task): n: int def execute(self, context: ExecutionContext) -> None: - cache: JobCache = context.job_cache # type: ignore[attr-defined] + cache: JobCache = context.job_cache # ty: ignore[unresolved-attribute] fib_n_1 = bytes_to_int(cache[f"fib_{self.n - 1}"]) fib_n_2 = bytes_to_int(cache[f"fib_{self.n - 2}"]) @@ -74,7 +74,7 @@ def test_runner_with_fibonacci_workflow() -> None: class FlakyTask(Task): def execute(self, context: ExecutionContext) -> None: - cache: JobCache = context.job_cache # type: ignore[attr-defined] + cache: JobCache = context.job_cache # ty: ignore[unresolved-attribute] if "succeed" in cache: return # finally succeed diff --git a/tilebox-workflows/tests/test_task.py b/tilebox-workflows/tests/test_task.py index 4afc9b9..dc3f873 100644 --- a/tilebox-workflows/tests/test_task.py +++ b/tilebox-workflows/tests/test_task.py @@ -54,7 +54,7 @@ class TaskWithInvalidExecuteSignature(Task): def identifier() -> tuple[str, str]: return "tilebox.tests.TaskWithInvalidExecuteSignature", "v0.1" - def execute(self) -> None: # type: ignore[override] + def execute(self) -> None: # ty: ignore[invalid-method-override] pass @@ -66,7 +66,7 @@ class TaskWithInvalidExecuteSignature(Task): def identifier() -> tuple[str, str]: return "tilebox.tests.TaskWithInvalidExecuteSignature", "v0.1" - def execute(self, context: ExecutionContext, invalid: int) -> None: # type: ignore[override] + def execute(self, context: ExecutionContext, invalid: int) -> None: # ty: ignore[invalid-method-override] pass @@ -78,7 +78,7 @@ class TaskWithInvalidExecuteReturnType(Task): def identifier() -> tuple[str, str]: return "tilebox.tests.TaskWithInvalidExecuteReturnType", "v0.1" - def execute(self, context: ExecutionContext) -> int: # type: ignore[override] + def execute(self, context: ExecutionContext) -> int: # ty: ignore[invalid-method-override] _ = context return 5 @@ -395,15 +395,19 @@ class FieldTypesTest(Task): def test_get_deserialization_field_type() -> None: fields = FieldTypesTest.__dataclass_fields__ - assert _get_deserialization_field_type(fields["field1"].type) is str - assert _get_deserialization_field_type(fields["field2"].type) is str - assert _get_deserialization_field_type(fields["field3"].type) is NestedJson - assert _get_deserialization_field_type(fields["field4"].type) is NestedJson - assert _get_deserialization_field_type(fields["field5"].type) is NestedJson - assert _get_deserialization_field_type(fields["field6"].type) is NestedJson - assert _get_deserialization_field_type(fields["field7"].type) is NestedJson - assert _get_deserialization_field_type(fields["field8"].type) is NestedJson - assert _get_deserialization_field_type(fields["field9"].type) == list[NestedJson] + + def _get_field_type(field_name: str) -> type: + return _get_deserialization_field_type(fields[field_name].type) # ty: ignore[invalid-argument-type] + + assert _get_field_type("field1") is str + assert _get_field_type("field2") is str + assert _get_field_type("field3") is NestedJson + assert _get_field_type("field4") is NestedJson + assert _get_field_type("field5") is NestedJson + assert _get_field_type("field6") is NestedJson + assert _get_field_type("field7") is NestedJson + assert _get_field_type("field8") is NestedJson + assert _get_field_type("field9") == list[NestedJson] class TaskA(Task): @@ -422,7 +426,7 @@ def execute(self, context: ExecutionContext) -> None: def test_merge_future_tasks_to_submissions() -> None: - context = RunnerExecutionContext(None, None, job_cache=InMemoryCache()) # type: ignore[arg-type] + context = RunnerExecutionContext(None, None, job_cache=InMemoryCache()) # ty: ignore[invalid-argument-type] tasks_1 = context.submit_subtasks([TaskA(3, "three"), TaskA(4, "four"), TaskA(5, "five")]) tasks_2 = context.submit_subtasks([TaskB(3.2), TaskB(3.44), TaskB(3.55)], max_retries=1) tasks_3 = context.submit_subtasks([TaskA(6, "six"), TaskB(8.12)], cluster="other") @@ -453,7 +457,7 @@ def test_merge_future_tasks_to_submissions() -> None: def test_merge_future_tasks_to_submissions_dependencies() -> None: - context = RunnerExecutionContext(None, None, job_cache=InMemoryCache()) # type: ignore[arg-type] + context = RunnerExecutionContext(None, None, job_cache=InMemoryCache()) # ty: ignore[invalid-argument-type] tasks_1 = context.submit_subtasks([TaskA(2, "two"), TaskA(3, "three")]) tasks_2 = context.submit_subtasks([TaskA(4, "four"), TaskA(5, "five")]) tasks_3 = context.submit_subtasks([TaskB(3.2)], depends_on=tasks_1) @@ -481,7 +485,7 @@ def test_merge_future_tasks_to_submissions_dependencies() -> None: def test_merge_future_tasks_to_submissions_many_tasks() -> None: - context = RunnerExecutionContext(None, None, job_cache=InMemoryCache()) # type: ignore[arg-type] + context = RunnerExecutionContext(None, None, job_cache=InMemoryCache()) # ty: ignore[invalid-argument-type] n = 100 tasks_1 = context.submit_subtasks([TaskA(i, f"Task {i}") for i in range(n)]) tasks_2 = context.submit_subtasks([TaskB(i / 3) for i in range(n)], depends_on=tasks_1) @@ -496,7 +500,7 @@ def test_merge_future_tasks_to_submissions_many_tasks() -> None: def test_merge_future_tasks_to_submissions_many_non_mergeable_dependency_groups() -> None: - context = RunnerExecutionContext(None, None, job_cache=InMemoryCache()) # type: ignore[arg-type] + context = RunnerExecutionContext(None, None, job_cache=InMemoryCache()) # ty: ignore[invalid-argument-type] n = 100 for i in range(n): task_1 = context.submit_subtasks([TaskA(i, f"Task {i}")]) @@ -508,7 +512,7 @@ def test_merge_future_tasks_to_submissions_many_non_mergeable_dependency_groups( def test_merge_future_tasks_two_separate_branches() -> None: - context = RunnerExecutionContext(None, None, job_cache=InMemoryCache()) # type: ignore[arg-type] + context = RunnerExecutionContext(None, None, job_cache=InMemoryCache()) # ty: ignore[invalid-argument-type] task_a = context.submit_subtasks([TaskA(0, "Task 0")]) # left branch task_b_left = context.submit_subtasks([TaskB(0.0)], depends_on=task_a) diff --git a/tilebox-workflows/tilebox/workflows/automations/cron.py b/tilebox-workflows/tilebox/workflows/automations/cron.py index 8446c95..c466bba 100644 --- a/tilebox-workflows/tilebox/workflows/automations/cron.py +++ b/tilebox-workflows/tilebox/workflows/automations/cron.py @@ -1,9 +1,8 @@ from dataclasses import replace from datetime import datetime, timezone -from typing import cast try: - from typing import Self + from typing import Self # ty: ignore[unresolved-import] except ImportError: # Self is only available in Python 3.11+ from typing_extensions import Self @@ -35,11 +34,11 @@ def _serialize(self) -> bytes: return message.SerializeToString() @classmethod - def _deserialize(cls, task_input: bytes, context: RunnerContext | None = None) -> Self: # noqa: ARG003 + def _deserialize(cls: "type[CronTask]", task_input: bytes, context: RunnerContext | None = None) -> Self: # noqa: ARG003 message = AutomationMessage() message.ParseFromString(task_input) - task = cast(Self, deserialize_task(cls, message.args)) + task = deserialize_task(cls, message.args) event_message = TriggeredCronEventMessage() event_message.ParseFromString(message.trigger_event) diff --git a/tilebox-workflows/tilebox/workflows/automations/storage_event.py b/tilebox-workflows/tilebox/workflows/automations/storage_event.py index 6656d56..b219fc6 100644 --- a/tilebox-workflows/tilebox/workflows/automations/storage_event.py +++ b/tilebox-workflows/tilebox/workflows/automations/storage_event.py @@ -1,9 +1,8 @@ from dataclasses import replace -from typing import cast from uuid import UUID try: - from typing import Self + from typing import Self # ty: ignore[unresolved-import] except ImportError: # Self is only available in Python 3.11+ from typing_extensions import Self @@ -43,11 +42,11 @@ def _serialize(self) -> bytes: return message.SerializeToString() @classmethod - def _deserialize(cls, task_input: bytes, context: RunnerContext | None = None) -> Self: + def _deserialize(cls: "type[StorageEventTask]", task_input: bytes, context: RunnerContext | None = None) -> Self: message = AutomationMessage() message.ParseFromString(task_input) - task = cast(cls, deserialize_task(cls, message.args)) # type: ignore[invalid-type-form] + task = deserialize_task(cls, message.args) event_message = TriggeredStorageEventMessage() event_message.ParseFromString(message.trigger_event) diff --git a/tilebox-workflows/tilebox/workflows/cache.py b/tilebox-workflows/tilebox/workflows/cache.py index 346176d..710ffe4 100644 --- a/tilebox-workflows/tilebox/workflows/cache.py +++ b/tilebox-workflows/tilebox/workflows/cache.py @@ -199,16 +199,16 @@ def _blob(self, key: str) -> Blob: def __contains__(self, key: str) -> bool: # GCS library has some weird typing issues, so let's ignore them for now - return self._blob(key).exists() # type: ignore[arg-type] + return self._blob(key).exists() def __setitem__(self, key: str, value: bytes) -> None: # GCS library has some weird typing issues, so let's ignore them for now - self._blob(key).upload_from_file(BytesIO(value)) # type: ignore[arg-type] + self._blob(key).upload_from_file(BytesIO(value)) def __getitem__(self, key: str) -> bytes: try: # GCS library has some weird typing issues, so let's ignore them for now - return self._blob(key).download_as_bytes() # type: ignore[arg-type] + return self._blob(key).download_as_bytes() except NotFound: raise KeyError(f"{key} is not cached!") from None @@ -224,7 +224,7 @@ def __iter__(self) -> Iterator[str]: # in the "folder", and not the ones in subfolders # GCS library has some weird typing issues, so let's ignore them for now - blobs = self.bucket.list_blobs(prefix=prefix, delimiter="/") # type: ignore[arg-type] + blobs = self.bucket.list_blobs(prefix=prefix, delimiter="/") # make the names relative to the cache prefix (but including the key in the name) for blob in blobs: diff --git a/tilebox-workflows/tilebox/workflows/client.py b/tilebox-workflows/tilebox/workflows/client.py index 4064b01..0eb7f5f 100644 --- a/tilebox-workflows/tilebox/workflows/client.py +++ b/tilebox-workflows/tilebox/workflows/client.py @@ -29,7 +29,7 @@ def __init__(self, *, url: str = "https://api.tilebox.com", token: str | None = token: The API Key to authenticate with. If not set the `TILEBOX_API_KEY` environment variable will be used. """ token = _token_from_env(url, token) - self._auth = {"token": token, "url": url} + self._auth: dict[str, str] = {"token": token, "url": url} self._channel = open_channel(url, token) self._tracer: WorkflowTracer | None = None diff --git a/tilebox-workflows/tilebox/workflows/data.py b/tilebox-workflows/tilebox/workflows/data.py index 5035271..debd862 100644 --- a/tilebox-workflows/tilebox/workflows/data.py +++ b/tilebox-workflows/tilebox/workflows/data.py @@ -200,7 +200,7 @@ class JobState(Enum): _JOB_STATES = {state.value: state for state in JobState} # JobState.QUEUED is deprecated and has been renamed to SUBMITTED, but we keep it around for backwards compatibility -JobState.QUEUED = JobState.SUBMITTED # type: ignore[assignment] +JobState.QUEUED = JobState.SUBMITTED # ty: ignore[unresolved-attribute] @dataclass(order=True, frozen=True) @@ -529,8 +529,8 @@ def read(self, path: str) -> bytes: span.set_attribute("bucket", self.location) span.set_attribute("path", path) # GCS library has some weird typing issues, so let's ignore them for now - blob = runner_context.gcs_client(self.location).blob(path) # type: ignore[arg-type] - return blob.download_as_bytes() # type: ignore[arg-type] + blob = runner_context.gcs_client(self.location).blob(path) + return blob.download_as_bytes() case StorageType.S3: with runner_context.tracer.start_as_current_span("s3.read") as span: span.set_attribute("bucket", self.location) diff --git a/tilebox-workflows/tilebox/workflows/formatting/job.py b/tilebox-workflows/tilebox/workflows/formatting/job.py index 2c95027..b1e18d1 100644 --- a/tilebox-workflows/tilebox/workflows/formatting/job.py +++ b/tilebox-workflows/tilebox/workflows/formatting/job.py @@ -341,8 +341,7 @@ def _progress_indicator_bar(label: str, done: int, total: int, state: JobState) f"{percentage:.0%} " f"({done} / {total})" ) - label = HTML(label_html) - return HBox([progress, label]) + return HBox([progress, HTML(label_html)]) _eye_icon = """ diff --git a/tilebox-workflows/tilebox/workflows/jobs/client.py b/tilebox-workflows/tilebox/workflows/jobs/client.py index b311ec6..6271ab6 100644 --- a/tilebox-workflows/tilebox/workflows/jobs/client.py +++ b/tilebox-workflows/tilebox/workflows/jobs/client.py @@ -19,7 +19,7 @@ from tilebox.workflows.task import Task as TaskInstance try: - from IPython.display import HTML, display # type: ignore[assignment] + from IPython.display import HTML, display except ImportError: class HTML: @@ -192,11 +192,13 @@ def query( id_interval: IDInterval | None = None match temporal_extent: case (str(), str()): + # ty doesn't narrow types on match statements yet, once it does we can remove this cast + str_temporal_extent: tuple[str, str] = temporal_extent # ty: ignore[invalid-assignment] # this is either a tuple of datetimes or a tuple of UUIDs try: - id_interval = IDInterval.parse(temporal_extent) + id_interval = IDInterval.parse(str_temporal_extent) except ValueError: - dataset_time_interval = TimeInterval.parse(temporal_extent) + dataset_time_interval = TimeInterval.parse(str_temporal_extent) time_interval = TimeInterval( start=dataset_time_interval.start, end=dataset_time_interval.end, @@ -206,7 +208,10 @@ def query( case IDInterval(_, _, _, _) | (UUID(), UUID()): id_interval = IDInterval.parse(temporal_extent) case _: - dataset_time_interval = TimeInterval.parse(temporal_extent) + # ty doesn't narrow types on match statements yet, once it does we can remove this cast + # because due to the match statement above we know that temporal_extent is a TimeIntervalLike + time_interval_like: TimeIntervalLike = temporal_extent # ty: ignore[invalid-assignment] + dataset_time_interval = TimeInterval.parse(time_interval_like) time_interval = TimeInterval( start=dataset_time_interval.start, end=dataset_time_interval.end, diff --git a/tilebox-workflows/tilebox/workflows/observability/logging.py b/tilebox-workflows/tilebox/workflows/observability/logging.py index eb02ad8..8c57a4a 100644 --- a/tilebox-workflows/tilebox/workflows/observability/logging.py +++ b/tilebox-workflows/tilebox/workflows/observability/logging.py @@ -110,7 +110,7 @@ def _otel_log_exporter( headers=headers, ) schedule_delay = int(export_interval.total_seconds() * 1000) if export_interval is not None else None - return BatchLogRecordProcessor(exporter, schedule_delay_millis=schedule_delay) # type: ignore[arg-type] + return BatchLogRecordProcessor(exporter, schedule_delay_millis=schedule_delay) def configure_otel_logging( @@ -324,7 +324,7 @@ def get_logger(name: str | None = None, level: int = logging.NOTSET) -> logging. handler.setFormatter(ColorfulConsoleFormatter()) # we set a special attribute, which allows as to remove this handler again as soon # as we configure an actual logging handler - handler._is_default = True # type: ignore[attr-defined] # noqa: SLF001 + handler._is_default = True # ty: ignore[unresolved-attribute] # noqa: SLF001 root_logger.addHandler(handler) logger = logging.getLogger(f"{_LOGGING_NAMESPACE}.{name}") diff --git a/tilebox-workflows/tilebox/workflows/observability/tracing.py b/tilebox-workflows/tilebox/workflows/observability/tracing.py index f8c48ae..5b00694 100644 --- a/tilebox-workflows/tilebox/workflows/observability/tracing.py +++ b/tilebox-workflows/tilebox/workflows/observability/tracing.py @@ -116,7 +116,7 @@ def _otel_span_exporter( headers=headers, ) schedule_delay = int(export_interval.total_seconds() * 1000) if export_interval is not None else None - return BatchSpanProcessor(exporter, schedule_delay_millis=schedule_delay) # type: ignore[arg-type] + return BatchSpanProcessor(exporter, schedule_delay_millis=schedule_delay) class SpanEventLoggingHandler(logging.Handler): diff --git a/tilebox-workflows/tilebox/workflows/runner/task_runner.py b/tilebox-workflows/tilebox/workflows/runner/task_runner.py index aebdaaf..0a6df28 100644 --- a/tilebox-workflows/tilebox/workflows/runner/task_runner.py +++ b/tilebox-workflows/tilebox/workflows/runner/task_runner.py @@ -77,7 +77,7 @@ def _retry_backoff(func: Callable[..., WrappedFnReturnT], stop: stop_base) -> Ca Returns: The wrapped function """ - return retry( # type: ignore[no-any-return] + return retry( retry=retry_if_exception_type(InternalServerError), stop=stop, wait=wait_random_exponential( @@ -159,8 +159,8 @@ def __init__(self, url: str, token: str | None) -> None: # we don't want to fork the current process, but instead spawn a new one # therefore we need to use the spawn context to create the queues ctx = get_context("spawn") - self._new_leases: Queue[tuple[UUID, TaskLease]] = ctx.Queue() # type: ignore[assignment] - self._done_tasks: Queue[UUID] = ctx.Queue() # type: ignore[assignment] + self._new_leases: Queue[tuple[UUID, TaskLease]] = ctx.Queue() + self._done_tasks: Queue[UUID] = ctx.Queue() def run(self) -> None: lease_renewer(self._url, self._token, self._new_leases, self._done_tasks) @@ -467,7 +467,7 @@ def _try_execute( span.update_name(f"task/{task_class.__name__}") try: - task_instance = task_class._deserialize(task.input, self._context) # noqa: SLF001 + task_instance = task_class._deserialize(task.input, self._context) # ty: ignore[possibly-missing-attribute] # noqa: SLF001 except json.JSONDecodeError: self.logger.exception(f"Failed to deserialize input for task execution {task.id}") raise ValueError(f"Failed to deserialize input for task execution {task.id}") from None diff --git a/tilebox-workflows/tilebox/workflows/task.py b/tilebox-workflows/tilebox/workflows/task.py index 0c797f6..e704081 100644 --- a/tilebox-workflows/tilebox/workflows/task.py +++ b/tilebox-workflows/tilebox/workflows/task.py @@ -50,7 +50,7 @@ def __new__(cls, name: str, bases: tuple[type], attrs: dict[str, Any]) -> type: return task_class # Convert the class to a dataclass - task_class = dataclass(task_class) # type: ignore[arg-type] + task_class = dataclass(task_class) # we allow overriding the execute method, but we still want to validate it # so we search for the closest base class that has an execute method and use @@ -118,7 +118,7 @@ def _serialize(self) -> bytes: @classmethod def _deserialize(cls, task_input: bytes, context: RunnerContext | None = None) -> "Task": # noqa: ARG003 - return cast(Task, deserialize_task(cls, task_input)) + return deserialize_task(cls, task_input) def _validate_execute_method( @@ -201,7 +201,7 @@ def identifier() -> tuple[str, str]: class_name = task_class.__name__ if hasattr(task_class, "identifier"): # if the task class has an identifier method, we use that try: - name, version = task_class.identifier() + name, version = task_class.identifier() # ty: ignore[call-non-callable] except TypeError as err: raise ValueError( f"Failed to invoke {class_name}.identifier(). Is it a staticmethod or classmethod without parameters?" @@ -422,12 +422,12 @@ def serialize_task(task: Task) -> bytes: field = json.dumps(field).encode() return field - return json.dumps(_serialize_as_dict(task)).encode() # type: ignore[arg-type] + return json.dumps(_serialize_as_dict(task)).encode() def _serialize_as_dict(task: Task) -> dict[str, Any]: as_dict: dict[str, Any] = {} - for dataclass_field in fields(task): # type: ignore[union-attr] + for dataclass_field in fields(task): # ty: ignore[invalid-argument-type] skip = dataclass_field.metadata.get("skip_serialization", False) if skip: continue @@ -452,11 +452,14 @@ def _serialize_value(value: Any, base64_encode_protobuf: bool) -> Any: # noqa: return b64encode(value.SerializeToString()).decode("ascii") return value.SerializeToString() if is_dataclass(value): - return _serialize_as_dict(value) # type: ignore[arg-type] + return _serialize_as_dict(value) return value -def deserialize_task(task_cls: type, task_input: bytes) -> Task: +_T = TypeVar("_T", bound=Task) + + +def deserialize_task(task_cls: type[_T], task_input: bytes) -> _T: """Deserialize the input of a task from a buffer of bytes. The task_cls is expected to be a dataclass, containing an arbitrary number of fields. @@ -468,22 +471,22 @@ def deserialize_task(task_cls: type, task_input: bytes) -> Task: return task_cls() # empty task if len(task_fields) == 1: # if there is only one field, we deserialize it directly - field_type = _get_deserialization_field_type(task_fields[0].type) # type: ignore[arg-type] + field_type = _get_deserialization_field_type(task_fields[0].type) # ty: ignore[invalid-argument-type] if hasattr(field_type, "FromString"): # protobuf message - value = field_type.FromString(task_input) # type: ignore[arg-type] + value = field_type.FromString(task_input) # ty: ignore[call-non-callable] else: - value = _deserialize_value(field_type, json.loads(task_input.decode())) # type: ignore[arg-type] + value = _deserialize_value(field_type, json.loads(task_input.decode())) return task_cls(**{task_fields[0].name: value}) return _deserialize_dataclass(task_cls, json.loads(task_input.decode())) -def _deserialize_dataclass(cls: type, params: dict[str, Any]) -> Task: +def _deserialize_dataclass(cls: type[_T], params: dict[str, Any]) -> _T: """Deserialize a dataclass, while allowing recursively nested dataclasses or protobuf messages.""" for param in list(params): # recursively deserialize nested dataclasses - field = cls.__dataclass_fields__[param] + field = cls.__dataclass_fields__[param] # ty: ignore[unresolved-attribute] params[field.name] = _deserialize_value(field.type, params[field.name]) return cls(**params) @@ -495,7 +498,7 @@ def _deserialize_value(field_type: type, value: Any) -> Any: # noqa: PLR0911 field_type = _get_deserialization_field_type(field_type) if hasattr(field_type, "FromString"): - return field_type.FromString(b64decode(value)) + return field_type.FromString(b64decode(value)) # ty: ignore[call-non-callable] if is_dataclass(field_type) and isinstance(value, dict): return _deserialize_dataclass(field_type, value) diff --git a/tilebox-workflows/tilebox/workflows/timeseries.py b/tilebox-workflows/tilebox/workflows/timeseries.py index 3874ceb..f09f748 100644 --- a/tilebox-workflows/tilebox/workflows/timeseries.py +++ b/tilebox-workflows/tilebox/workflows/timeseries.py @@ -26,11 +26,11 @@ def _timeseries_dataset_chunk(task: Task, call_next: ForwardExecution, context: if not isinstance(task, TimeseriesTask): raise TypeError("Task is not a timeseries task. Inherit from TimeseriesTask to mark it as such.") - chunk: TimeseriesDatasetChunk = task.timeseries_data # type: ignore[attr-defined] + chunk: TimeseriesDatasetChunk = task.timeseries_data # let's get a collection client datasets_client = context.runner_context.datasets_client - dataset = datasets_client._dataset_by_id(str(chunk.dataset_id)) # type: ignore[attr-defined] # noqa: SLF001 + dataset = datasets_client._dataset_by_id(str(chunk.dataset_id)) # ty: ignore[possibly-missing-attribute] # noqa: SLF001 # we already know the collection id, so we can skip the lookup (we don't know the name, but don't need it) collection_info = CollectionInfo(Collection(chunk.collection_id, "unknown"), None, None) collection = CollectionClient(dataset, collection_info) @@ -50,7 +50,7 @@ def _timeseries_dataset_chunk(task: Task, call_next: ForwardExecution, context: for i in range(datapoints.sizes["time"]): datapoint = datapoints.isel(time=i) - call_next(context, datapoint) # type: ignore[call-arg] + call_next(context, datapoint) # ty: ignore[too-many-positional-arguments] return # we are done @@ -90,7 +90,7 @@ def _timeseries_dataset_chunk(task: Task, call_next: ForwardExecution, context: for sub_chunk_start, sub_chunk_end in pairwise(chunks): sub_chunks.append(replace(chunk, time_interval=TimeInterval(sub_chunk_start, sub_chunk_end))) - subtasks = [replace(task, timeseries_data=sub_chunk) for sub_chunk in sub_chunks] # type: ignore[misc] + subtasks = [replace(task, timeseries_data=sub_chunk) for sub_chunk in sub_chunks] if len(subtasks) > 0: context.submit_subtasks(subtasks) @@ -103,7 +103,7 @@ class TimeseriesTask(Task): timeseries_data: TimeseriesDatasetChunk @override - def execute(self, context: ExecutionContext, datapoint: xr.Dataset) -> None: # type: ignore[override] + def execute(self, context: ExecutionContext, datapoint: xr.Dataset) -> None: # ty: ignore[invalid-method-override] pass @@ -136,14 +136,14 @@ def _time_interval_chunk(task: Task, call_next: ForwardExecution, context: Execu if not isinstance(task, TimeIntervalTask): raise TypeError("Task is not a time interval task. Inherit from TimeIntervalTask to mark it as such.") - chunk: TimeChunk = task.interval # type: ignore[attr-defined] + chunk: TimeChunk = task.interval start = _make_multiple(chunk.time_interval.start, chunk.chunk_size, before=True) end = _make_multiple(chunk.time_interval.end, chunk.chunk_size, before=False) n = (end - start) // chunk.chunk_size if n <= 1: # we are already a leaf task - return call_next(context, TimeInterval(start, end)) # type: ignore[call-arg] + return call_next(context, TimeInterval(start, end)) # ty: ignore[too-many-positional-arguments] chunks: list[datetime] = [] if n < 4: # we are a branch task with less than 4 sub chunks, so a further split is not worth it @@ -158,9 +158,7 @@ def _time_interval_chunk(task: Task, call_next: ForwardExecution, context: Execu TimeChunk(TimeInterval(chunk_start, chunk_end), chunk.chunk_size) for chunk_start, chunk_end in pairwise(chunks) ] - context.submit_subtasks( - [replace(task, interval=time_chunk) for time_chunk in time_chunks] # type: ignore[misc] - ) + context.submit_subtasks([replace(task, interval=time_chunk) for time_chunk in time_chunks]) return None @@ -170,12 +168,12 @@ class TimeIntervalTask(Task): interval: TimeChunk @override - def execute(self, context: ExecutionContext, time_interval: TimeInterval) -> None: # type: ignore[override] + def execute(self, context: ExecutionContext, time_interval: TimeInterval) -> None: # ty: ignore[invalid-method-override] pass def batch_process_time_interval(interval: TimeIntervalLike, chunk_size: timedelta) -> TimeChunk: - return TimeChunk(time_interval=TimeInterval.parse(interval).to_half_open(), chunk_size=chunk_size) # type: ignore[arg-type] + return TimeChunk(time_interval=TimeInterval.parse(interval).to_half_open(), chunk_size=chunk_size) def _make_multiple(time: datetime, duration: timedelta, start: datetime = _EPOCH, before: bool = True) -> datetime: diff --git a/uv.lock b/uv.lock index 035f350..46c5294 100644 --- a/uv.lock +++ b/uv.lock @@ -63,30 +63,30 @@ wheels = [ [[package]] name = "boto3" -version = "1.42.25" +version = "1.42.26" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore" }, { name = "jmespath" }, { name = "s3transfer" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/29/30/755a6c4b27ad4effefa9e407f84c6f0a69f75a21c0090beb25022dfcfd3f/boto3-1.42.25.tar.gz", hash = "sha256:ccb5e757dd62698d25766cc54cf5c47bea43287efa59c93cf1df8c8fbc26eeda", size = 112811, upload-time = "2026-01-09T20:27:44.73Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/ad/06f48f2d0e9ec91d136602c7009f5f68c84be3655cc6e7e2b59aff82ead4/boto3-1.42.26.tar.gz", hash = "sha256:0fbcf1922e62d180f3644bc1139425821b38d93c1e6ec27409325d2ae86131aa", size = 112877, upload-time = "2026-01-12T20:36:39.6Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/14/79/012734f4e510b0a6beec2a3d5f437b3e8ef52174b1d38b1d5fdc542316d7/boto3-1.42.25-py3-none-any.whl", hash = "sha256:8128bde4f9d5ffce129c76d1a2efe220e3af967a2ad30bc305ba088bbc96343d", size = 140575, upload-time = "2026-01-09T20:27:42.788Z" }, + { url = "https://files.pythonhosted.org/packages/fd/0c/094a63b0ab893995b1f2e7ddb5425e11f97403feb90cea0eb770c8905487/boto3-1.42.26-py3-none-any.whl", hash = "sha256:f116cfbe7408e0a9153da363f134d2f1b5008f17ee86af104f0ce59a62be1833", size = 140576, upload-time = "2026-01-12T20:36:38.244Z" }, ] [[package]] name = "boto3-stubs" -version = "1.42.25" +version = "1.42.26" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore-stubs" }, { name = "types-s3transfer" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6d/d4/b36e0c588664a126f7a1d3e9885c606d53f0432e27c3640321f659aa8c44/boto3_stubs-1.42.25.tar.gz", hash = "sha256:fd40c758991ae1bcbd1adbb153d513a028bf525642f193f9a77f71220c493cf6", size = 100896, upload-time = "2026-01-09T20:44:53.587Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/46/35c6b651356f79cce5010dea231806ba4cd866aea2c975cdf26577c4fb5c/boto3_stubs-1.42.26.tar.gz", hash = "sha256:537b38828ae036a40ac103fc2bcc520e933759816da9cabfbfece9ed175d7c7e", size = 100877, upload-time = "2026-01-12T20:40:12.587Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/3e/5279b24cbd0f92dd52a0ad1d81cb7d59c889ab60d19aaaaad6d64c8ad2e7/boto3_stubs-1.42.25-py3-none-any.whl", hash = "sha256:a61a4caaf2199d11510bb18d044254fa0fd1929a6b07817f00faa8e23437adc5", size = 69782, upload-time = "2026-01-09T20:44:47.033Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e2/12fce9d52b3dce78b02e493fb93a655857a096a05c21aefa7bfe62caa9cb/boto3_stubs-1.42.26-py3-none-any.whl", hash = "sha256:009e6763a3fe4013293abb64b8bc92593361f8deb1e961b844ba645b2d6f70f2", size = 69782, upload-time = "2026-01-12T20:40:03.368Z" }, ] [package.optional-dependencies] @@ -102,28 +102,28 @@ essential = [ [[package]] name = "botocore" -version = "1.42.25" +version = "1.42.26" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jmespath" }, { name = "python-dateutil" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2c/b5/8f961c65898deb5417c9e9e908ea6c4d2fe8bb52ff04e552f679c88ed2ce/botocore-1.42.25.tar.gz", hash = "sha256:7ae79d1f77d3771e83e4dd46bce43166a1ba85d58a49cffe4c4a721418616054", size = 14879737, upload-time = "2026-01-09T20:27:34.676Z" } +sdist = { url = "https://files.pythonhosted.org/packages/67/c9/6ce745d4233aeb3abdb18205739b394f7955087f7603cb324a797adbf8d2/botocore-1.42.26.tar.gz", hash = "sha256:1c8855e3e811f015d930ccfe8751d4be295aae0562133d14b6f0b247cd6fd8d3", size = 14882582, upload-time = "2026-01-12T20:36:29.382Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/b0/61e3e61d437c8c73f0821ce8a8e2594edfc1f423e354c38fa56396a4e4ca/botocore-1.42.25-py3-none-any.whl", hash = "sha256:470261966aab1d09a1cd4ba56810098834443602846559ba9504f6613dfa52dc", size = 14553881, upload-time = "2026-01-09T20:27:30.487Z" }, + { url = "https://files.pythonhosted.org/packages/61/43/5993eab2114c0de7bbc21985b745aafe3b912f98fc63726c2a54680bb69d/botocore-1.42.26-py3-none-any.whl", hash = "sha256:71171c2d09ac07739f4efce398b15a4a8bc8769c17fb3bc99625e43ed11ad8b7", size = 14554661, upload-time = "2026-01-12T20:36:26.891Z" }, ] [[package]] name = "botocore-stubs" -version = "1.42.25" +version = "1.42.26" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-awscrt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/44/23/1f30c552bd0af9523abe49d50e849555298ed836b18a8039093ba786c2ef/botocore_stubs-1.42.25.tar.gz", hash = "sha256:70a8a53ba2684ff462c44d5996acd85fc5c7eb969e2cf3c25274441269524298", size = 42415, upload-time = "2026-01-09T20:32:21.78Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/66/f2308aa4b6e7f24ddc788bec0c26c85f03540ae4cbc07299e915f3e47da4/botocore_stubs-1.42.26.tar.gz", hash = "sha256:5b0946681d46ce8acb0a3b8494bdf76d34bc26276f0b7baedcf88a6cf1dd798b", size = 42398, upload-time = "2026-01-12T21:28:07.204Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/15/c5/4c66c8ade8fb180d417e164de54ab75fc26aa0e5543f6e33c8465722feb9/botocore_stubs-1.42.25-py3-none-any.whl", hash = "sha256:49d15529002bd1099a9a099a77d70b7b52859153783440e96eb55791e8147d1b", size = 66761, upload-time = "2026-01-09T20:32:20.512Z" }, + { url = "https://files.pythonhosted.org/packages/b6/47/bee86341e7294c6c977ea1da6cf2ec86dc6129adcf87af30ed1cac8d02de/botocore_stubs-1.42.26-py3-none-any.whl", hash = "sha256:548380a16d31234255c00a4a4a15a5c8cdad360ba1af6dac5202111ec258155c", size = 66762, upload-time = "2026-01-12T21:28:05.555Z" }, ] [[package]] @@ -264,15 +264,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, ] -[[package]] -name = "cfgv" -version = "3.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4e/b5/721b8799b04bf9afe054a3899c6cf4e880fcf8563cc71c15610242490a0c/cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132", size = 7334, upload-time = "2025-11-19T20:55:51.612Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" }, -] - [[package]] name = "cftime" version = "1.6.5" @@ -648,15 +639,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, ] -[[package]] -name = "distlib" -version = "0.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, -] - [[package]] name = "exceptiongroup" version = "1.3.1" @@ -678,15 +660,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" }, ] -[[package]] -name = "filelock" -version = "3.20.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" }, -] - [[package]] name = "folium" version = "0.20.0" @@ -947,15 +920,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/22/18/f43815244cd99b54d8ac9f44f9799bb7c0115e48e29bc7a1899c0589ee48/hypothesis-6.150.1-py3-none-any.whl", hash = "sha256:7badb28a0da323d6afaf25eae1c93932cb8ac06193355f5e080d6e6465a51da5", size = 542374, upload-time = "2026-01-12T08:45:41.854Z" }, ] -[[package]] -name = "identify" -version = "2.6.15" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311, upload-time = "2025-10-02T17:43:40.631Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" }, -] - [[package]] name = "idna" version = "3.11" @@ -1397,15 +1361,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" }, ] -[[package]] -name = "nodeenv" -version = "1.10.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" }, -] - [[package]] name = "numpy" version = "2.2.6" @@ -1825,15 +1780,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/44/3c/d717024885424591d5376220b5e836c2d5293ce2011523c9de23ff7bf068/pip-25.3-py3-none-any.whl", hash = "sha256:9655943313a94722b7774661c21049070f6bbb0a1516bf02f7c8d5d9201514cd", size = 1778622, upload-time = "2025-10-25T00:55:39.247Z" }, ] -[[package]] -name = "platformdirs" -version = "4.5.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, -] - [[package]] name = "pluggy" version = "1.6.0" @@ -1844,19 +1790,27 @@ wheels = [ ] [[package]] -name = "pre-commit" -version = "4.5.1" +name = "prek" +version = "0.2.27" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cfgv" }, - { name = "identify" }, - { name = "nodeenv" }, - { name = "pyyaml" }, - { name = "virtualenv" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/40/f1/6d86a29246dfd2e9b6237f0b5823717f60cad94d47ddc26afa916d21f525/pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61", size = 198232, upload-time = "2025-12-16T21:14:33.552Z" } +sdist = { url = "https://files.pythonhosted.org/packages/87/0b/2a0509d2d8881811e4505227df9ca31b3a4482497689b5c2b7f38faab1e5/prek-0.2.27.tar.gz", hash = "sha256:dfd2a1b040f55402c2449ae36ea28e8c1bb05ca900490d5c0996b1b72297cc0e", size = 283076, upload-time = "2026-01-07T14:23:17.123Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77", size = 226437, upload-time = "2025-12-16T21:14:32.409Z" }, + { url = "https://files.pythonhosted.org/packages/d8/03/01dd50c89aa38bc194bb14073468bcbd1fec1621150967b7d424d2f043a7/prek-0.2.27-py3-none-linux_armv6l.whl", hash = "sha256:3c7ce590289e4fc0119524d0f0f187133a883d6784279b6a3a4080f5851f1612", size = 4799872, upload-time = "2026-01-07T14:23:15.5Z" }, + { url = "https://files.pythonhosted.org/packages/51/86/807267659e4775c384e755274a214a45461266d6a1117ec059fbd245731b/prek-0.2.27-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:df35dee5dcf09a9613c8b9c6f3d79a3ec894eb13172f569773d529a5458887f8", size = 4903805, upload-time = "2026-01-07T14:23:35.199Z" }, + { url = "https://files.pythonhosted.org/packages/1b/5b/cc3c13ed43e7523f27a2f9b14d18c9b557fb1090e7a74689f934cb24d721/prek-0.2.27-py3-none-macosx_11_0_arm64.whl", hash = "sha256:772d84ebe19b70eba1da0f347d7d486b9b03c0a33fe19c2d1bf008e72faa13b3", size = 4629083, upload-time = "2026-01-07T14:23:12.204Z" }, + { url = "https://files.pythonhosted.org/packages/34/d9/86eafc1d7bddf9236263d4428acca76b7bfc7564ccc2dc5e539d1be22b5e/prek-0.2.27-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:571aab2e9c0eace30a51b0667533862f4bdc0a81334d342f6f516796a63fd1e4", size = 4825005, upload-time = "2026-01-07T14:23:28.438Z" }, + { url = "https://files.pythonhosted.org/packages/44/cf/83004be0a9e8ac3c8c927afab5948d9e31760e15442a0fff273f158cae51/prek-0.2.27-py3-none-manylinux_2_24_armv7l.whl", hash = "sha256:cc7a47f40f36c503e77eb6209f7ad5979772f9c7c5e88ba95cf20f0d24ece926", size = 4724850, upload-time = "2026-01-07T14:23:18.276Z" }, + { url = "https://files.pythonhosted.org/packages/73/8c/5c754f4787fc07e7fa6d2c25ac90931cd3692b51f03c45259aca2ea6fd3f/prek-0.2.27-py3-none-manylinux_2_24_i686.whl", hash = "sha256:cd87b034e56f610f9cafd3b7d554dca69f1269a511ad330544d696f08c656eb3", size = 5042584, upload-time = "2026-01-07T14:23:37.892Z" }, + { url = "https://files.pythonhosted.org/packages/4d/80/762283280ae3d2aa35385ed2db76c39518ed789fbaa0b6fb52352764d41c/prek-0.2.27-py3-none-manylinux_2_24_s390x.whl", hash = "sha256:638b4e942dd1cea6fc0ddf4ce5b877e5aa97c6c142b7bf28e9ce6db8f0d06a4a", size = 5511089, upload-time = "2026-01-07T14:23:23.121Z" }, + { url = "https://files.pythonhosted.org/packages/e0/78/1b53b604c188f4054346b237ec1652489718fedc0d465baadecf7907dc42/prek-0.2.27-py3-none-manylinux_2_24_x86_64.whl", hash = "sha256:769b13d7bd11fbb4a5fc5fffd2158aea728518ec9aca7b36723b10ad8b189810", size = 5100175, upload-time = "2026-01-07T14:23:19.643Z" }, + { url = "https://files.pythonhosted.org/packages/86/fc/a9dc29598e664e6e663da316338e1e980e885072107876a3ca8d697f4d65/prek-0.2.27-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:6c0bc38806caf14d47d44980d936ee0cb153bccea703fb141c16bb9be49fb778", size = 4833004, upload-time = "2026-01-07T14:23:36.467Z" }, + { url = "https://files.pythonhosted.org/packages/04/b7/56ca9226f20375519d84a2728a985cc491536f0b872f10cb62bcc55ccea0/prek-0.2.27-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:77c8ac95a0bb1156159edcb3c52b5f852910a7d2ed53d6136ecc1d9d6dc39fe1", size = 4842559, upload-time = "2026-01-07T14:23:31.691Z" }, + { url = "https://files.pythonhosted.org/packages/87/20/71ef2c558daabbe2a4cfe6567597f7942dbbad1a3caca0d786b4ec1304cb/prek-0.2.27-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:5e8d56b386660266c2a31e12af8b52a0901fe21fb71ab05768fdd41b405794ac", size = 4709053, upload-time = "2026-01-07T14:23:26.602Z" }, + { url = "https://files.pythonhosted.org/packages/e8/14/7376117d0e91e35ce0f6581d4427280f634b9564c86615f74b79f242fa79/prek-0.2.27-py3-none-musllinux_1_1_i686.whl", hash = "sha256:3fdeaa1b9f97e21d870ba091914bc7ccf85106a9ef74d81f362a92cdbfe33569", size = 4927803, upload-time = "2026-01-07T14:23:30Z" }, + { url = "https://files.pythonhosted.org/packages/fb/81/87f36898ec2ac1439468b20e9e7061b4956ce0cf518c7cc15ac0457f2971/prek-0.2.27-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:20dd04fe33b9fcfbc2069f4e523ec8d9b4813c1ca4ac9784fe2154dcab42dacb", size = 5210701, upload-time = "2026-01-07T14:23:24.87Z" }, + { url = "https://files.pythonhosted.org/packages/50/5a/53f7828543c09cb70ed35291818ec145a42ef04246fa4f82c128b26abd4f/prek-0.2.27-py3-none-win32.whl", hash = "sha256:15948cacbbccd935f57ca164b36c4c5d7b03c58cd5a335a6113cdbd149b6e50d", size = 4623511, upload-time = "2026-01-07T14:23:33.472Z" }, + { url = "https://files.pythonhosted.org/packages/73/21/3a079075a4d4db58f909eedfd7a79517ba90bb12f7b61f6e84c3c29d4d61/prek-0.2.27-py3-none-win_amd64.whl", hash = "sha256:8225dc8523e7a0e95767b3d3e8cfb3bc160fe6af0ee5115fc16c68428c4e0779", size = 5312713, upload-time = "2026-01-07T14:23:21.116Z" }, + { url = "https://files.pythonhosted.org/packages/39/79/d1c3d96ed4f7dff37ed11101d8336131e8108315c3078246007534dcdd27/prek-0.2.27-py3-none-win_arm64.whl", hash = "sha256:f9192bfb6710db2be10f0e28ff31706a2648c1eb8a450b20b2f55f70ba05e769", size = 4978272, upload-time = "2026-01-07T14:23:13.681Z" }, ] [[package]] @@ -1894,17 +1848,17 @@ wheels = [ [[package]] name = "protobuf" -version = "6.33.3" +version = "6.33.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/5c/f912bdebdd4af4160da6a2c2b1b3aaa1b8c578d0243ba8f694f93c7095f0/protobuf-6.33.3.tar.gz", hash = "sha256:c8794debeb402963fddff41a595e1f649bcd76616ba56c835645cab4539e810e", size = 444318, upload-time = "2026-01-09T23:05:02.79Z" } +sdist = { url = "https://files.pythonhosted.org/packages/53/b8/cda15d9d46d03d4aa3a67cb6bffe05173440ccf86a9541afaf7ac59a1b6b/protobuf-6.33.4.tar.gz", hash = "sha256:dc2e61bca3b10470c1912d166fe0af67bfc20eb55971dcef8dfa48ce14f0ed91", size = 444346, upload-time = "2026-01-12T18:33:40.109Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/56/2a41b9dcc3b92fa672bb89610608f4fd4f71bec075d314956710503b29f5/protobuf-6.33.3-cp310-abi3-win32.whl", hash = "sha256:b4046f9f2ede57ad5b1d9917baafcbcad42f8151a73c755a1e2ec9557b0a764f", size = 425597, upload-time = "2026-01-09T23:04:50.11Z" }, - { url = "https://files.pythonhosted.org/packages/23/07/1f1300fe7d204fd7aaabd9a0aafd54e6358de833b783f5bd161614e8e1e4/protobuf-6.33.3-cp310-abi3-win_amd64.whl", hash = "sha256:1fd18f030ae9df97712fbbb0849b6e54c63e3edd9b88d8c3bb4771f84d8db7a4", size = 436945, upload-time = "2026-01-09T23:04:51.921Z" }, - { url = "https://files.pythonhosted.org/packages/ec/5d/0ef28dded98973a26443a6a7bc49bff6206be8c57dc1d1e28e6c1147b879/protobuf-6.33.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:648b7b0144222eb06cf529a3d7b01333c5f30b4196773b682d388f04db373759", size = 427594, upload-time = "2026-01-09T23:04:53.358Z" }, - { url = "https://files.pythonhosted.org/packages/c5/46/551c69b6ff1957bd703654342bfb776bb97db400bc80afc56fbb64e7c11d/protobuf-6.33.3-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:08a6ca12f60ba99097dd3625ef4275280f99c9037990e47ce9368826b159b890", size = 324469, upload-time = "2026-01-09T23:04:54.332Z" }, - { url = "https://files.pythonhosted.org/packages/ca/6d/ade1cca06c64a421ee9745e082671465ead28164c809efaf2c15bc93f9a0/protobuf-6.33.3-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:642fce7187526c98683c79a3ad68e5d646a5ef5eb004582fe123fc9a33a9456b", size = 339242, upload-time = "2026-01-09T23:04:55.347Z" }, - { url = "https://files.pythonhosted.org/packages/38/8c/6522b8e543ece46f645911c3cebe361d8460134c0fee02ddcf70ebf32999/protobuf-6.33.3-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:6fa9b5f4baa12257542273e5e6f3c3d3867b30bc2770c14ad9ac8315264bf986", size = 323298, upload-time = "2026-01-09T23:04:56.866Z" }, - { url = "https://files.pythonhosted.org/packages/a6/b9/067b8a843569d5605ba6f7c039b9319720a974f82216cd623e13186d3078/protobuf-6.33.3-py3-none-any.whl", hash = "sha256:c2bf221076b0d463551efa2e1319f08d4cffcc5f0d864614ccd3d0e77a637794", size = 170518, upload-time = "2026-01-09T23:05:01.227Z" }, + { url = "https://files.pythonhosted.org/packages/e0/be/24ef9f3095bacdf95b458543334d0c4908ccdaee5130420bf064492c325f/protobuf-6.33.4-cp310-abi3-win32.whl", hash = "sha256:918966612c8232fc6c24c78e1cd89784307f5814ad7506c308ee3cf86662850d", size = 425612, upload-time = "2026-01-12T18:33:29.656Z" }, + { url = "https://files.pythonhosted.org/packages/31/ad/e5693e1974a28869e7cd244302911955c1cebc0161eb32dfa2b25b6e96f0/protobuf-6.33.4-cp310-abi3-win_amd64.whl", hash = "sha256:8f11ffae31ec67fc2554c2ef891dcb561dae9a2a3ed941f9e134c2db06657dbc", size = 436962, upload-time = "2026-01-12T18:33:31.345Z" }, + { url = "https://files.pythonhosted.org/packages/66/15/6ee23553b6bfd82670207ead921f4d8ef14c107e5e11443b04caeb5ab5ec/protobuf-6.33.4-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2fe67f6c014c84f655ee06f6f66213f9254b3a8b6bda6cda0ccd4232c73c06f0", size = 427612, upload-time = "2026-01-12T18:33:32.646Z" }, + { url = "https://files.pythonhosted.org/packages/2b/48/d301907ce6d0db75f959ca74f44b475a9caa8fcba102d098d3c3dd0f2d3f/protobuf-6.33.4-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:757c978f82e74d75cba88eddec479df9b99a42b31193313b75e492c06a51764e", size = 324484, upload-time = "2026-01-12T18:33:33.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/1c/e53078d3f7fe710572ab2dcffd993e1e3b438ae71cfc031b71bae44fcb2d/protobuf-6.33.4-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:c7c64f259c618f0bef7bee042075e390debbf9682334be2b67408ec7c1c09ee6", size = 339256, upload-time = "2026-01-12T18:33:35.231Z" }, + { url = "https://files.pythonhosted.org/packages/e8/8e/971c0edd084914f7ee7c23aa70ba89e8903918adca179319ee94403701d5/protobuf-6.33.4-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:3df850c2f8db9934de4cf8f9152f8dc2558f49f298f37f90c517e8e5c84c30e9", size = 323311, upload-time = "2026-01-12T18:33:36.305Z" }, + { url = "https://files.pythonhosted.org/packages/75/b1/1dc83c2c661b4c62d56cc081706ee33a4fc2835bd90f965baa2663ef7676/protobuf-6.33.4-py3-none-any.whl", hash = "sha256:1fe3730068fcf2e595816a6c34fe66eeedd37d51d0400b72fabc848811fdc1bc", size = 170532, upload-time = "2026-01-12T18:33:39.199Z" }, ] [[package]] @@ -2030,19 +1984,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bd/24/12818598c362d7f300f18e74db45963dbcb85150324092410c8b49405e42/pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913", size = 10216, upload-time = "2024-09-29T09:24:11.978Z" }, ] -[[package]] -name = "pyright" -version = "1.1.400" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "nodeenv" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6c/cb/c306618a02d0ee8aed5fb8d0fe0ecfed0dbf075f71468f03a30b5f4e1fe0/pyright-1.1.400.tar.gz", hash = "sha256:b8a3ba40481aa47ba08ffb3228e821d22f7d391f83609211335858bf05686bdb", size = 3846546, upload-time = "2025-04-24T12:55:18.907Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/a5/5d285e4932cf149c90e3c425610c5efaea005475d5f96f1bfdb452956c62/pyright-1.1.400-py3-none-any.whl", hash = "sha256:c80d04f98b5a4358ad3a35e241dbf2a408eee33a40779df365644f8054d2517e", size = 5563460, upload-time = "2025-04-24T12:55:17.002Z" }, -] - [[package]] name = "pytest" version = "9.0.2" @@ -2473,10 +2414,10 @@ dev = [ { name = "cython" }, { name = "junitparser" }, { name = "pip" }, - { name = "pre-commit" }, + { name = "prek" }, { name = "pyarrow" }, - { name = "pyright" }, { name = "ruff" }, + { name = "ty" }, { name = "types-protobuf" }, ] @@ -2494,10 +2435,10 @@ dev = [ { name = "cython", specifier = ">=3.0.11" }, { name = "junitparser", specifier = ">=3.2.0" }, { name = "pip", specifier = ">=24.2" }, - { name = "pre-commit", specifier = ">=3.8.0" }, + { name = "prek", specifier = ">=0.2.27" }, { name = "pyarrow", specifier = ">=17.0.0" }, - { name = "pyright", specifier = ">=1.1.379,<1.1.401" }, { name = "ruff", specifier = ">=0.11.10" }, + { name = "ty", specifier = ">=0.0.11" }, { name = "types-protobuf", specifier = ">=6.30" }, ] @@ -2664,6 +2605,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" }, ] +[[package]] +name = "ty" +version = "0.0.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/45/5ae578480168d4b3c08cf8e5eac3caf8eb7acdb1a06a9bed7519564bd9b4/ty-0.0.11.tar.gz", hash = "sha256:ebcbc7d646847cb6610de1da4ffc849d8b800e29fd1e9ebb81ba8f3fbac88c25", size = 4920340, upload-time = "2026-01-09T21:06:01.592Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/34/b1d05cdcd01589a8d2e63011e0a1e24dcefdc2a09d024fee3e27755963f6/ty-0.0.11-py3-none-linux_armv6l.whl", hash = "sha256:68f0b8d07b0a2ea7ec63a08ba2624f853e4f9fa1a06fce47fb453fa279dead5a", size = 9521748, upload-time = "2026-01-09T21:06:13.221Z" }, + { url = "https://files.pythonhosted.org/packages/43/21/f52d93f4b3784b91bfbcabd01b84dc82128f3a9de178536bcf82968f3367/ty-0.0.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cbf82d7ef0618e9ae3cc3c37c33abcfa302c9b3e3b8ff11d71076f98481cb1a8", size = 9454903, upload-time = "2026-01-09T21:06:42.363Z" }, + { url = "https://files.pythonhosted.org/packages/ad/01/3a563dba8b1255e474c35e1c3810b7589e81ae8c41df401b6a37c8e2cde9/ty-0.0.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:121987c906e02264c3b511b95cb9f8a3cdd66f3283b8bbab678ca3525652e304", size = 8823417, upload-time = "2026-01-09T21:06:26.315Z" }, + { url = "https://files.pythonhosted.org/packages/6f/b1/99b87222c05d3a28fb7bbfb85df4efdde8cb6764a24c1b138f3a615283dd/ty-0.0.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:999390b6cc045fe5e1b3da1c2c9ae8e8c0def23b69455e7c9191ba9ffd747023", size = 9290785, upload-time = "2026-01-09T21:05:59.028Z" }, + { url = "https://files.pythonhosted.org/packages/3d/9f/598809a8fff2194f907ba6de07ac3d7b7788342592d8f8b98b1b50c2fb49/ty-0.0.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed504d78eb613c49be3c848f236b345b6c13dc6bcfc4b202790a60a97e1d8f35", size = 9359392, upload-time = "2026-01-09T21:06:37.459Z" }, + { url = "https://files.pythonhosted.org/packages/71/3e/aeea2a97b38f3dcd9f8224bf83609848efa4bc2f484085508165567daa7b/ty-0.0.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7fedc8b43cc8a9991e0034dd205f957a8380dd29bfce36f2a35b5d321636dfd9", size = 9852973, upload-time = "2026-01-09T21:06:21.245Z" }, + { url = "https://files.pythonhosted.org/packages/72/40/86173116995e38f954811a86339ac4c00a2d8058cc245d3e4903bc4a132c/ty-0.0.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:0808bdfb7efe09881bf70249b85b0498fb8b75fbb036ce251c496c20adb10075", size = 10796113, upload-time = "2026-01-09T21:06:16.034Z" }, + { url = "https://files.pythonhosted.org/packages/69/71/97c92c401dacae9baa3696163ebe8371635ebf34ba9fda781110d0124857/ty-0.0.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:07185b3e38b18c562056dfbc35fb51d866f872977ea1ebcd64ca24a001b5b4f1", size = 10432137, upload-time = "2026-01-09T21:06:07.498Z" }, + { url = "https://files.pythonhosted.org/packages/18/10/9ab43f3cfc5f7792f6bc97620f54d0a0a81ef700be84ea7f6be330936a99/ty-0.0.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5c72f1ada8eb5be984502a600f71d1a3099e12fb6f3c0607aaba2f86f0e9d80", size = 10240520, upload-time = "2026-01-09T21:06:34.823Z" }, + { url = "https://files.pythonhosted.org/packages/74/18/8dd4fe6df1fd66f3e83b4798eddb1d8482d9d9b105f25099b76703402ebb/ty-0.0.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25f88e8789072830348cb59b761d5ced70642ed5600673b4bf6a849af71eca8b", size = 9973340, upload-time = "2026-01-09T21:06:39.657Z" }, + { url = "https://files.pythonhosted.org/packages/e4/0b/fb2301450cf8f2d7164944d6e1e659cac9ec7021556cc173d54947cf8ef4/ty-0.0.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f370e1047a62dcedcd06e2b27e1f0b16c7f8ea2361d9070fcbf0d0d69baaa192", size = 9262101, upload-time = "2026-01-09T21:06:28.989Z" }, + { url = "https://files.pythonhosted.org/packages/f7/8c/d6374af023541072dee1c8bcfe8242669363a670b7619e6fffcc7415a995/ty-0.0.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:52be34047ed6177bfcef9247459a767ec03d775714855e262bca1fb015895e8a", size = 9382756, upload-time = "2026-01-09T21:06:24.097Z" }, + { url = "https://files.pythonhosted.org/packages/0d/44/edd1e63ffa8d49d720c475c2c1c779084e5efe50493afdc261938705d10a/ty-0.0.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b9e5762ccb3778779378020b8d78f936b3f52ea83f18785319cceba3ae85d8e6", size = 9553944, upload-time = "2026-01-09T21:06:18.426Z" }, + { url = "https://files.pythonhosted.org/packages/35/cd/4afdb0d182d23d07ff287740c4954cc6dde5c3aed150ec3f2a1d72b00f71/ty-0.0.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e9334646ee3095e778e3dbc45fdb2bddfc16acc7804283830ad84991ece16dd7", size = 10060365, upload-time = "2026-01-09T21:06:45.083Z" }, + { url = "https://files.pythonhosted.org/packages/d1/94/a009ad9d8b359933cfea8721c689c0331189be28650d74dcc6add4d5bb09/ty-0.0.11-py3-none-win32.whl", hash = "sha256:44cfb7bb2d6784bd7ffe7b5d9ea90851d9c4723729c50b5f0732d4b9a2013cfc", size = 9040448, upload-time = "2026-01-09T21:06:32.241Z" }, + { url = "https://files.pythonhosted.org/packages/df/04/5a5dfd0aec0ea99ead1e824ee6e347fb623c464da7886aa1e3660fb0f36c/ty-0.0.11-py3-none-win_amd64.whl", hash = "sha256:1bb205db92715d4a13343bfd5b0c59ce8c0ca0daa34fb220ec9120fc66ccbda7", size = 9780112, upload-time = "2026-01-09T21:06:04.69Z" }, + { url = "https://files.pythonhosted.org/packages/ad/07/47d4fccd7bcf5eea1c634d518d6cb233f535a85d0b63fcd66815759e2fa0/ty-0.0.11-py3-none-win_arm64.whl", hash = "sha256:4688bd87b2dc5c85da277bda78daba14af2e66f3dda4d98f3604e3de75519eba", size = 9194038, upload-time = "2026-01-09T21:06:10.152Z" }, +] + [[package]] name = "types-awscrt" version = "0.31.0" @@ -2718,21 +2684,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, ] -[[package]] -name = "virtualenv" -version = "20.36.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "distlib" }, - { name = "filelock" }, - { name = "platformdirs" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/aa/a3/4d310fa5f00863544e1d0f4de93bddec248499ccf97d4791bc3122c9d4f3/virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba", size = 6032239, upload-time = "2026-01-09T18:21:01.296Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/2a/dc2228b2888f51192c7dc766106cd475f1b768c10caaf9727659726f7391/virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f", size = 6008258, upload-time = "2026-01-09T18:20:59.425Z" }, -] - [[package]] name = "wcwidth" version = "0.2.14"