From 9ca983071f62c51c34c5f58c3e32b26ef6618145 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Thu, 23 Oct 2025 19:38:27 -0300 Subject: [PATCH 01/46] Add base class to run generic commands with specific settings --- leverage/modules/runner.py | 124 ++++++++ tests/test_modules/test_runner.py | 478 ++++++++++++++++++++++++++++++ 2 files changed, 602 insertions(+) create mode 100644 leverage/modules/runner.py create mode 100644 tests/test_modules/test_runner.py diff --git a/leverage/modules/runner.py b/leverage/modules/runner.py new file mode 100644 index 0000000..ce68139 --- /dev/null +++ b/leverage/modules/runner.py @@ -0,0 +1,124 @@ +import os +import shutil +import subprocess +from pathlib import Path +from typing import Dict, Optional, Tuple, Union +from leverage import logger + + +class Runner: + """Generic command runner for executing system binaries with environment preservation""" + + def __init__( + self, binary: Union[str, Path], error_message: Optional[str] = None, env_vars: Optional[Dict[str, str]] = None + ): + """ + Initialize Runner with a binary name or path. + + Args: + binary: Name of the binary (searched in PATH) or full path to binary + error_message: Custom error message when binary is not found + env_vars: Environment variables to set for all executions + """ + self.binary_input = binary + self.binary_path = None + self.error_message = error_message + self.instance_env_vars = env_vars or {} + + self._validate_binary() + self._validate_version() + + def _validate_binary(self): + """Check if the required binary exists on the system""" + binary_path = Path(self.binary_input) + + if binary_path.is_absolute() and binary_path.is_file(): + # Absolute path provided and file exists + self.binary_path = binary_path.resolve().as_posix() + else: + # Try to find binary in PATH + self.binary_path = shutil.which(str(self.binary_input)) + + if not self.binary_path: + if self.error_message: + error_msg = self.error_message + else: + error_msg = ( + f"Binary '{self.binary_input}' not found on system. " + f"Please install {self.binary_input} and ensure it's in your PATH." + ) + + logger.error(error_msg) + raise RuntimeError(error_msg) + + def _validate_version(self): + """ + Validate the binary version. Override in child classes for specific requirements. + Base implementation does nothing - validation is optional. + """ + pass + + def run( + self, + *args: str, + env_vars: Optional[Dict[str, str]] = None, + working_dir: Optional[Path] = None, + interactive: bool = True, + ) -> Union[int, Tuple[int, str, str]]: + """ + Execute command with the binary. + + Args: + *args: Command arguments to pass to the binary + env_vars: Environment variables to set during execution (overrides instance env_vars) + working_dir: Working directory for command execution + interactive: If True, run interactively. If False, capture output + + Returns: + If interactive=True: Exit code (int) + If interactive=False: Tuple of (exit_code, stdout, stderr) + """ + command = [self.binary_path, *args] + + # Merge environment variables: instance vars first, then run-time vars (run-time takes precedence) + merged_env_vars = {**self.instance_env_vars} + if env_vars: + merged_env_vars.update(env_vars) + + # Create environment copy with additional variables + env = os.environ.copy() + env.update({k: str(v) for k, v in merged_env_vars.items()}) + + logger.debug(f"[bold cyan]Running command:[/bold cyan] {' '.join(command)}") + logger.debug(f"Working directory: {working_dir or Path.cwd()}") + logger.debug(f"Additional environment variables: {merged_env_vars}") + + if interactive: + # Interactive execution + process = subprocess.run(command, env=env, cwd=working_dir) + return process.returncode + else: + # Silent execution with output capture + process = subprocess.run(command, env=env, cwd=working_dir, capture_output=True, text=True) + return process.returncode, process.stdout.strip(), process.stderr.strip() + + def exec( + self, *args: str, env_vars: Optional[Dict[str, str]] = None, working_dir: Optional[Path] = None + ) -> Tuple[int, str, str]: + """ + Execute command with the binary in non-interactive mode (captures output). + + This is a convenience method that calls run() with interactive=False. + + Args: + *args: Command arguments to pass to the binary + env_vars: Environment variables to set during execution (overrides instance env_vars) + working_dir: Working directory for command execution + + Returns: + Tuple of (exit_code, stdout, stderr) + """ + return self.run(*args, env_vars=env_vars, working_dir=working_dir, interactive=False) + + def __repr__(self): + return f"Runner(binary_input='{self.binary_input}', binary_path='{self.binary_path}')" diff --git a/tests/test_modules/test_runner.py b/tests/test_modules/test_runner.py new file mode 100644 index 0000000..1e8499b --- /dev/null +++ b/tests/test_modules/test_runner.py @@ -0,0 +1,478 @@ +import os +import shutil +from pathlib import Path + +import pytest + +from leverage.modules.runner import Runner + + +def test_init_with_valid_binary_in_path(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/python3") + runner = Runner("python3") + assert runner.binary_input == "python3" + assert runner.binary_path == "/usr/bin/python3" + assert runner.error_message is None + + +def test_init_with_absolute_path_existing_file(tmp_path): + binary_file = tmp_path / "test_binary" + binary_file.touch() + binary_file.chmod(0o755) + + runner = Runner(binary_file) + assert runner.binary_input == binary_file + assert runner.binary_path == str(binary_file) + + +def test_init_with_absolute_path_non_existing_file(tmp_path): + binary_file = tmp_path / "non_existing_binary" + + with pytest.raises(RuntimeError, match="Binary .* not found on system"): + Runner(binary_file) + + +def test_init_with_binary_not_in_path(mocker): + mocker.patch("shutil.which", return_value=None) + with pytest.raises(RuntimeError, match="Binary 'nonexistent' not found on system"): + Runner("nonexistent") + + +def test_init_with_custom_error_message(mocker): + custom_error = "Custom error message for missing binary" + mocker.patch("shutil.which", return_value=None) + with pytest.raises(RuntimeError, match=custom_error): + Runner("nonexistent", error_message=custom_error) + + +def test_init_logs_error_on_missing_binary(mocker): + mock_logger = mocker.patch("leverage.modules.runner.logger") + mocker.patch("shutil.which", return_value=None) + with pytest.raises(RuntimeError): + Runner("nonexistent") + + mock_logger.error.assert_called_once() + + +def test_validate_version_base_implementation_does_nothing(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/python3") + runner = Runner("python3") + # Base implementation should not raise any exceptions + runner._validate_version() + + +@pytest.fixture +def mock_runner(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/test_binary") + return Runner("test_binary") + + +def test_run_interactive_success(mock_runner, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + result = mock_runner.run("arg1", "arg2", interactive=True) + + assert result == 0 + mock_subprocess.assert_called_once_with(["/usr/bin/test_binary", "arg1", "arg2"], env=os.environ.copy(), cwd=None) + + +def test_run_interactive_failure(mock_runner, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 1 + + result = mock_runner.run("arg1", interactive=True) + + assert result == 1 + + +def test_run_non_interactive_success(mock_runner, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + mock_subprocess.return_value.stdout = "output" + mock_subprocess.return_value.stderr = "error" + + result = mock_runner.run("arg1", interactive=False) + + assert result == (0, "output", "error") + mock_subprocess.assert_called_once_with( + ["/usr/bin/test_binary", "arg1"], env=os.environ.copy(), cwd=None, capture_output=True, text=True + ) + + +def test_run_with_env_vars(mock_runner, mocker): + env_vars = {"TEST_VAR": "test_value", "ANOTHER_VAR": 123} + expected_env = os.environ.copy() + expected_env.update({"TEST_VAR": "test_value", "ANOTHER_VAR": "123"}) + + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + mock_runner.run("arg1", env_vars=env_vars, interactive=True) + + mock_subprocess.assert_called_once_with(["/usr/bin/test_binary", "arg1"], env=expected_env, cwd=None) + + +def test_run_with_working_directory(mock_runner, tmp_path, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + mock_runner.run("arg1", working_dir=tmp_path, interactive=True) + + mock_subprocess.assert_called_once_with(["/usr/bin/test_binary", "arg1"], env=os.environ.copy(), cwd=tmp_path) + + +def test_run_with_no_args_defaults_to_empty_list(mock_runner, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + mock_runner.run() + + mock_subprocess.assert_called_once_with(["/usr/bin/test_binary"], env=os.environ.copy(), cwd=None) + + +def test_run_with_none_args_defaults_to_empty_list(mock_runner, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + mock_runner.run() + + mock_subprocess.assert_called_once_with(["/usr/bin/test_binary"], env=os.environ.copy(), cwd=None) + + +def test_run_with_none_env_vars_defaults_to_empty_dict(mock_runner, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + mock_runner.run(env_vars=None) + + mock_subprocess.assert_called_once_with(["/usr/bin/test_binary"], env=os.environ.copy(), cwd=None) + + +def test_run_logs_debug_information(mock_runner, tmp_path, mocker): + mock_logger = mocker.patch("leverage.modules.runner.logger") + env_vars = {"TEST_VAR": "value"} + merged_env_vars = {**mock_runner.instance_env_vars, **env_vars} + + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + mock_runner.run("arg1", env_vars=env_vars, working_dir=tmp_path) + + expected_calls = [ + mocker.call(f"[bold cyan]Running command:[/bold cyan] /usr/bin/test_binary arg1"), + mocker.call(f"Working directory: {tmp_path}"), + mocker.call(f"Additional environment variables: {merged_env_vars}"), + ] + mock_logger.debug.assert_has_calls(expected_calls) + + +def test_run_logs_current_directory_when_no_working_dir(mock_runner, mocker): + mock_logger = mocker.patch("leverage.modules.runner.logger") + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + mocker.patch("pathlib.Path.cwd", return_value=Path("/current/path")) + + mock_runner.run("arg1") + + mock_logger.debug.assert_any_call("Working directory: /current/path") + + +def test_repr(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/python3") + runner = Runner("python3") + expected = "Runner(binary_input='python3', binary_path='/usr/bin/python3')" + assert repr(runner) == expected + + +def test_repr_with_path_object(tmp_path): + binary_file = tmp_path / "test_binary" + binary_file.touch() + + runner = Runner(binary_file) + expected = f"Runner(binary_input='{binary_file}', binary_path='{binary_file}')" + assert repr(runner) == expected + + +@pytest.mark.skipif(shutil.which("echo") is None, reason="echo binary not available") +def test_integration_with_echo_interactive(): + runner = Runner("echo") + result = runner.run("hello", "world", interactive=True) + assert result == 0 + + +@pytest.mark.skipif(shutil.which("echo") is None, reason="echo binary not available") +def test_integration_with_echo_non_interactive(): + runner = Runner("echo") + exit_code, stdout, stderr = runner.run("hello", "world", interactive=False) + assert exit_code == 0 + assert stdout.strip() == "hello world" + assert stderr == "" + + +@pytest.mark.skipif(shutil.which("false") is None, reason="false binary not available") +def test_integration_with_failing_command(): + runner = Runner("false") + result = runner.run(interactive=True) + assert result == 1 + + +@pytest.mark.skipif(shutil.which("env") is None, reason="env binary not available") +def test_integration_with_environment_variables(): + runner = Runner("env") + env_vars = {"TEST_RUNNER_VAR": "test_value"} + exit_code, stdout, stderr = runner.run(env_vars=env_vars, interactive=False) + + assert exit_code == 0 + assert "TEST_RUNNER_VAR=test_value" in stdout + + +def test_binary_input_as_path_object(tmp_path): + binary_file = tmp_path / "test_binary" + binary_file.touch() + + runner = Runner(binary_file) + assert isinstance(runner.binary_input, Path) + assert runner.binary_path == str(binary_file) + + +def test_binary_input_as_string(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/test") + runner = Runner("test") + assert isinstance(runner.binary_input, str) + assert runner.binary_path == "/usr/bin/test" + + +def test_env_vars_converted_to_strings(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/test") + runner = Runner("test") + + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + runner.run(env_vars={"INT_VAR": 42, "FLOAT_VAR": 3.14, "BOOL_VAR": True}) + + called_env = mock_subprocess.call_args[1]["env"] + assert called_env["INT_VAR"] == "42" + assert called_env["FLOAT_VAR"] == "3.14" + assert called_env["BOOL_VAR"] == "True" + + +def test_init_with_instance_env_vars(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/test") + env_vars = {"TEST_VAR": "test_value", "ANOTHER_VAR": "another_value"} + runner = Runner("test", env_vars=env_vars) + assert runner.instance_env_vars == env_vars + + +def test_init_with_none_instance_env_vars(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/test") + runner = Runner("test", env_vars=None) + assert runner.instance_env_vars == {} + + +def test_run_with_instance_env_vars_only(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/test") + instance_env = {"INST_VAR": "inst_value"} + expected_env = os.environ.copy() + expected_env.update(instance_env) + + runner = Runner("test", env_vars=instance_env) + + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + runner.run("arg1") + + mock_subprocess.assert_called_once_with(["/usr/bin/test", "arg1"], env=expected_env, cwd=None) + + +def test_run_merges_instance_and_run_env_vars(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/test") + instance_env = {"INST_VAR": "inst_value", "COMMON_VAR": "instance"} + run_env = {"RUN_VAR": "run_value", "COMMON_VAR": "runtime"} + + expected_env = os.environ.copy() + expected_env.update({"INST_VAR": "inst_value", "RUN_VAR": "run_value", "COMMON_VAR": "runtime"}) + + runner = Runner("test", env_vars=instance_env) + + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + runner.run("arg1", env_vars=run_env) + + mock_subprocess.assert_called_once_with(["/usr/bin/test", "arg1"], env=expected_env, cwd=None) + + +def test_run_env_vars_override_instance_env_vars(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/test") + instance_env = {"VAR": "instance_value"} + run_env = {"VAR": "runtime_value"} + + expected_env = os.environ.copy() + expected_env.update({"VAR": "runtime_value"}) + + runner = Runner("test", env_vars=instance_env) + + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + runner.run("arg1", env_vars=run_env) + + mock_subprocess.assert_called_once_with(["/usr/bin/test", "arg1"], env=expected_env, cwd=None) + + +def test_instance_env_vars_preserved_across_multiple_runs(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/test") + instance_env = {"INST_VAR": "inst_value"} + expected_env = os.environ.copy() + expected_env.update(instance_env) + + runner = Runner("test", env_vars=instance_env) + + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + # First run + runner.run("arg1") + mock_subprocess.assert_called_with(["/usr/bin/test", "arg1"], env=expected_env, cwd=None) + + # Second run - instance env vars should still be present + runner.run("arg2") + assert mock_subprocess.call_count == 2 + mock_subprocess.assert_called_with(["/usr/bin/test", "arg2"], env=expected_env, cwd=None) + + +def test_instance_env_vars_not_modified_by_run(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/test") + instance_env = {"INST_VAR": "inst_value"} + run_env = {"RUN_VAR": "run_value"} + + runner = Runner("test", env_vars=instance_env) + + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + runner.run("arg1", env_vars=run_env) + + # Instance env vars should remain unchanged + assert runner.instance_env_vars == {"INST_VAR": "inst_value"} + + +def test_instance_env_vars_converted_to_strings(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/test") + instance_env = {"INT_VAR": 42, "FLOAT_VAR": 3.14} + + runner = Runner("test", env_vars=instance_env) + + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + runner.run("arg1") + + called_env = mock_subprocess.call_args[1]["env"] + assert called_env["INT_VAR"] == "42" + assert called_env["FLOAT_VAR"] == "3.14" + + +def test_exec_calls_run_with_interactive_false(mock_runner, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + mock_subprocess.return_value.stdout = "test output" + mock_subprocess.return_value.stderr = "test error" + + exit_code, stdout, stderr = mock_runner.exec("arg1", "arg2") + + assert exit_code == 0 + assert stdout == "test output" + assert stderr == "test error" + mock_subprocess.assert_called_once_with( + ["/usr/bin/test_binary", "arg1", "arg2"], env=os.environ.copy(), cwd=None, capture_output=True, text=True + ) + + +def test_exec_with_env_vars(mock_runner, mocker): + env_vars = {"TEST_VAR": "test_value"} + expected_env = os.environ.copy() + expected_env.update(env_vars) + + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + mock_subprocess.return_value.stdout = "output" + mock_subprocess.return_value.stderr = "" + + exit_code, stdout, stderr = mock_runner.exec("arg1", env_vars=env_vars) + + assert exit_code == 0 + assert stdout == "output" + assert stderr == "" + mock_subprocess.assert_called_once_with( + ["/usr/bin/test_binary", "arg1"], env=expected_env, cwd=None, capture_output=True, text=True + ) + + +def test_exec_with_working_directory(mock_runner, tmp_path, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + mock_subprocess.return_value.stdout = "" + mock_subprocess.return_value.stderr = "" + + mock_runner.exec("arg1", working_dir=tmp_path) + + mock_subprocess.assert_called_once_with( + ["/usr/bin/test_binary", "arg1"], env=os.environ.copy(), cwd=tmp_path, capture_output=True, text=True + ) + + +def test_exec_with_instance_env_vars(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/test") + instance_env = {"INST_VAR": "inst_value"} + expected_env = os.environ.copy() + expected_env.update(instance_env) + + runner = Runner("test", env_vars=instance_env) + + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + mock_subprocess.return_value.stdout = "output" + mock_subprocess.return_value.stderr = "" + + exit_code, stdout, stderr = runner.exec("arg1") + + assert exit_code == 0 + mock_subprocess.assert_called_once_with( + ["/usr/bin/test", "arg1"], env=expected_env, cwd=None, capture_output=True, text=True + ) + + +def test_exec_merges_instance_and_run_env_vars(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/test") + instance_env = {"INST_VAR": "inst_value"} + run_env = {"RUN_VAR": "run_value"} + + expected_env = os.environ.copy() + expected_env.update(instance_env) + expected_env.update(run_env) + + runner = Runner("test", env_vars=instance_env) + + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + mock_subprocess.return_value.stdout = "" + mock_subprocess.return_value.stderr = "" + + runner.exec("arg1", env_vars=run_env) + + mock_subprocess.assert_called_once_with( + ["/usr/bin/test", "arg1"], env=expected_env, cwd=None, capture_output=True, text=True + ) + + +@pytest.mark.skipif(shutil.which("echo") is None, reason="echo binary not available") +def test_integration_exec_with_echo(): + runner = Runner("echo") + exit_code, stdout, stderr = runner.exec("hello", "world") + assert exit_code == 0 + assert stdout.strip() == "hello world" + assert stderr == "" From 6de9d4d97b02991f5dbeab00dc652da2ee9c13e1 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Thu, 23 Oct 2025 19:38:48 -0300 Subject: [PATCH 02/46] Specialize runner to TF --- leverage/modules/tfrunner.py | 74 +++++++++ tests/test_modules/test_tfrunner.py | 240 ++++++++++++++++++++++++++++ 2 files changed, 314 insertions(+) create mode 100644 leverage/modules/tfrunner.py create mode 100644 tests/test_modules/test_tfrunner.py diff --git a/leverage/modules/tfrunner.py b/leverage/modules/tfrunner.py new file mode 100644 index 0000000..b54cc73 --- /dev/null +++ b/leverage/modules/tfrunner.py @@ -0,0 +1,74 @@ +from pathlib import Path +from typing import Dict, Optional + +from leverage.modules.runner import Runner + + +class TFRunner(Runner): + """Terraform/OpenTofu runner with appropriate installation guidance""" + + TERRAFORM_INSTALL_URL = "https://developer.hashicorp.com/terraform/install" + OPENTOFU_INSTALL_URL = "https://opentofu.org/docs/intro/install/" + + def __init__(self, terraform: bool = False, env_vars: Optional[Dict[str, str]] = None): + """ + Initialize TFRunner for either Terraform or OpenTofu. + + Args: + terraform: If True, use Terraform. If False, use OpenTofu (default). + env_vars: Environment variables to set for all executions + """ + if terraform: + binary = "terraform" + error_message = ( + f"Terraform binary not found on system. " + f"Please install Terraform following the instructions at: {self.TERRAFORM_INSTALL_URL}" + ) + else: + binary = "tofu" + error_message = ( + f"OpenTofu binary not found on system. " + f"Please install OpenTofu following the instructions at: {self.OPENTOFU_INSTALL_URL}" + ) + + super().__init__(binary=binary, error_message=error_message, env_vars=env_vars) + + def run( + self, + *args: str, + env_vars: Optional[Dict[str, str]] = None, + working_dir: Optional[Path] = None, + interactive: bool = True, + ): + """ + Run the Terraform/OpenTofu binary with the given arguments. + + Args: + *args: Command and arguments to pass (e.g., 'plan', '-out=plan.tfplan') + env_vars: Environment variables for this specific execution + working_dir: Working directory for command execution + interactive: If True, run interactively. If False, capture output + + Returns: + If interactive=True: Exit code (int) + If interactive=False: Tuple of (exit_code, stdout, stderr) + """ + return super().run(*args, env_vars=env_vars, working_dir=working_dir, interactive=interactive) + + def exec( + self, *args: str, env_vars: Optional[Dict[str, str]] = None, working_dir: Optional[Path] = None + ): + """ + Execute the Terraform/OpenTofu binary in non-interactive mode (captures output). + + This is a convenience method that calls run() with interactive=False. + + Args: + *args: Command and arguments to pass (e.g., 'plan', '-out=plan.tfplan') + env_vars: Environment variables for this specific execution + working_dir: Working directory for command execution + + Returns: + Tuple of (exit_code, stdout, stderr) + """ + return self.run(*args, env_vars=env_vars, working_dir=working_dir, interactive=False) diff --git a/tests/test_modules/test_tfrunner.py b/tests/test_modules/test_tfrunner.py new file mode 100644 index 0000000..1f0ed74 --- /dev/null +++ b/tests/test_modules/test_tfrunner.py @@ -0,0 +1,240 @@ +import os + +import pytest + +from leverage.modules.tfrunner import TFRunner + + +@pytest.fixture +def mock_tofu_binary(mocker): + """Mock tofu binary availability""" + mocker.patch("shutil.which", return_value="/usr/bin/tofu") + return "/usr/bin/tofu" + + +@pytest.fixture +def mock_terraform_binary(mocker): + """Mock terraform binary availability""" + mocker.patch("shutil.which", return_value="/usr/bin/terraform") + return "/usr/bin/terraform" + + +def test_init_defaults_to_opentofu(mock_tofu_binary): + runner = TFRunner() + assert runner.binary_input == "tofu" + assert runner.binary_path == mock_tofu_binary + assert runner.instance_env_vars == {} + + +def test_init_with_terraform_flag(mock_terraform_binary): + runner = TFRunner(terraform=True) + assert runner.binary_input == "terraform" + assert runner.binary_path == mock_terraform_binary + + +def test_init_with_env_vars(mock_tofu_binary): + env_vars = {"TF_VAR_region": "us-east-1", "TF_LOG": "DEBUG"} + runner = TFRunner(env_vars=env_vars) + assert runner.instance_env_vars == env_vars + + +def test_init_with_terraform_and_env_vars(mock_terraform_binary): + env_vars = {"TF_VAR_region": "us-west-2"} + runner = TFRunner(terraform=True, env_vars=env_vars) + assert runner.binary_input == "terraform" + assert runner.instance_env_vars == env_vars + + +def test_init_with_none_env_vars(mock_tofu_binary): + runner = TFRunner(env_vars=None) + assert runner.instance_env_vars == {} + + +def test_opentofu_not_found_error_message(mocker): + mocker.patch("shutil.which", return_value=None) + with pytest.raises(RuntimeError) as exc_info: + TFRunner() + + assert "OpenTofu binary not found" in str(exc_info.value) + assert TFRunner.OPENTOFU_INSTALL_URL in str(exc_info.value) + + +def test_terraform_not_found_error_message(mocker): + mocker.patch("shutil.which", return_value=None) + with pytest.raises(RuntimeError) as exc_info: + TFRunner(terraform=True) + + assert "Terraform binary not found" in str(exc_info.value) + assert TFRunner.TERRAFORM_INSTALL_URL in str(exc_info.value) + + +def test_run_without_env_vars(mock_tofu_binary, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + runner = TFRunner() + result = runner.run("plan", "-out=plan.tfplan") + + assert result == 0 + mock_subprocess.assert_called_once_with( + [mock_tofu_binary, "plan", "-out=plan.tfplan"], env=os.environ.copy(), cwd=None + ) + + +def test_run_with_instance_env_vars_only(mock_tofu_binary, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + instance_env = {"TF_VAR_region": "us-east-1", "TF_LOG": "DEBUG"} + expected_env = os.environ.copy() + expected_env.update(instance_env) + + runner = TFRunner(env_vars=instance_env) + result = runner.run("apply", "-auto-approve") + + assert result == 0 + mock_subprocess.assert_called_once_with([mock_tofu_binary, "apply", "-auto-approve"], env=expected_env, cwd=None) + + +def test_run_with_run_env_vars_only(mock_tofu_binary, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + run_env = {"TF_VAR_environment": "production"} + expected_env = os.environ.copy() + expected_env.update(run_env) + + runner = TFRunner() + result = runner.run("plan", env_vars=run_env) + + assert result == 0 + mock_subprocess.assert_called_once_with([mock_tofu_binary, "plan"], env=expected_env, cwd=None) + + +def test_run_merges_instance_and_run_env_vars(mock_tofu_binary, mocker): + """Test that TFRunner properly merges env_vars through the parent class""" + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + instance_env = {"TF_VAR_region": "us-east-1", "TF_LOG": "DEBUG"} + run_env = {"TF_VAR_environment": "production", "TF_VAR_instance_type": "t3.micro"} + + expected_env = os.environ.copy() + expected_env.update(instance_env) + expected_env.update(run_env) + + runner = TFRunner(env_vars=instance_env) + result = runner.run("apply", env_vars=run_env) + + assert result == 0 + mock_subprocess.assert_called_once_with([mock_tofu_binary, "apply"], env=expected_env, cwd=None) + + +def test_run_env_vars_override_instance_env_vars(mock_tofu_binary, mocker): + """Test that run-time env_vars override instance env_vars through parent class""" + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + instance_env = {"TF_VAR_region": "us-east-1", "TF_LOG": "DEBUG"} + run_env = {"TF_VAR_region": "us-west-2"} # Override region + + expected_env = os.environ.copy() + expected_env.update({"TF_VAR_region": "us-west-2", "TF_LOG": "DEBUG"}) + + runner = TFRunner(env_vars=instance_env) + result = runner.run("plan", env_vars=run_env) + + assert result == 0 + mock_subprocess.assert_called_once_with([mock_tofu_binary, "plan"], env=expected_env, cwd=None) + + +def test_run_interactive_false(mock_tofu_binary, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + mock_subprocess.return_value.stdout = "terraform output" + mock_subprocess.return_value.stderr = "" + + runner = TFRunner() + exit_code, stdout, stderr = runner.run("output", "-json", interactive=False) + + assert exit_code == 0 + assert stdout == "terraform output" + assert stderr == "" + mock_subprocess.assert_called_once_with( + [mock_tofu_binary, "output", "-json"], env=os.environ.copy(), cwd=None, capture_output=True, text=True + ) + + +def test_run_with_multiple_args(mock_terraform_binary, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + runner = TFRunner(terraform=True) + result = runner.run("plan", "-var", "region=us-east-1", "-out=plan.tfplan") + + assert result == 0 + mock_subprocess.assert_called_once_with( + [mock_terraform_binary, "plan", "-var", "region=us-east-1", "-out=plan.tfplan"], env=os.environ.copy(), cwd=None + ) + + +def test_run_preserves_instance_env_vars_across_multiple_calls(mock_tofu_binary, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + instance_env = {"TF_VAR_region": "us-east-1"} + expected_env = os.environ.copy() + expected_env.update(instance_env) + + runner = TFRunner(env_vars=instance_env) + + # First call + runner.run("init") + mock_subprocess.assert_called_with([mock_tofu_binary, "init"], env=expected_env, cwd=None) + + # Second call - instance env vars should still be present + runner.run("plan") + assert mock_subprocess.call_count == 2 + mock_subprocess.assert_called_with([mock_tofu_binary, "plan"], env=expected_env, cwd=None) + + +def test_run_does_not_modify_instance_env_vars(mock_tofu_binary, mocker): + """Test that instance_env_vars are preserved (handled by parent class)""" + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + instance_env = {"TF_VAR_region": "us-east-1"} + run_env = {"TF_VAR_environment": "production"} + + runner = TFRunner(env_vars=instance_env) + runner.run("plan", env_vars=run_env) + + # Instance env vars should remain unchanged (verified in parent class) + assert runner.instance_env_vars == {"TF_VAR_region": "us-east-1"} + + +def test_empty_dict_for_none_env_vars_on_run(mock_tofu_binary, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + runner = TFRunner() + runner.run("plan", env_vars=None) + + # Should not raise an error and should pass empty dict + mock_subprocess.assert_called_once() + + +def test_env_vars_converted_to_strings_in_run(mock_tofu_binary, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + + instance_env = {"TF_VAR_count": 5, "TF_VAR_enabled": True} + run_env = {"TF_VAR_timeout": 3.14} + + runner = TFRunner(env_vars=instance_env) + runner.run("plan", env_vars=run_env) + + called_env = mock_subprocess.call_args[1]["env"] + assert called_env["TF_VAR_count"] == "5" + assert called_env["TF_VAR_enabled"] == "True" + assert called_env["TF_VAR_timeout"] == "3.14" From 65aefac84f261c6c985b53698783eddadb6cb857 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Thu, 23 Oct 2025 19:39:29 -0300 Subject: [PATCH 03/46] Create utilities to get and set tf s3 backend key --- leverage/_backend_config.py | 170 ++++++++++++++++++++++++++++++++ tests/test_backend_config.py | 184 +++++++++++++++++++++++++++++++++++ 2 files changed, 354 insertions(+) create mode 100644 leverage/_backend_config.py create mode 100644 tests/test_backend_config.py diff --git a/leverage/_backend_config.py b/leverage/_backend_config.py new file mode 100644 index 0000000..4f867c7 --- /dev/null +++ b/leverage/_backend_config.py @@ -0,0 +1,170 @@ +"""Utilities for modifying Terraform backend configuration.""" + +import re +from pathlib import Path +from typing import Union, Optional + +import hcl2 +import lark + +from leverage._utils import ExitError + + +def set_backend_key(config_file_path: Union[str, Path], key: str) -> None: + """ + Set or update the backend key in a Terraform config.tf file. + + This function modifies the Terraform backend configuration to set the S3 state key. + It preserves all comments, formatting, and other HCL code in the file by using + string manipulation to surgically modify only the key attribute. + + Args: + config_file_path: Path to the config.tf file + key: The backend key value to set (e.g., "apps-devstg/notifications/terraform.tfstate") + + Raises: + ExitError: If the file cannot be found, cannot be parsed, or does not contain a + terraform block with an S3 backend + """ + config_file_path = Path(config_file_path) + + # Validate file exists + if not config_file_path.exists(): + raise ExitError(1, f"Config file not found: {config_file_path}") + + # Read the file content + content = config_file_path.read_text() + + # Parse the config file to validate structure + try: + config_tf = hcl2.loads(content) + except Exception as e: + raise ExitError(1, f"Failed to parse config.tf: {e}") + + # Validate that the file contains a terraform backend block with S3 + if not ( + "terraform" in config_tf + and config_tf["terraform"] + and isinstance(config_tf["terraform"], list) + and "backend" in config_tf["terraform"][0] + and config_tf["terraform"][0]["backend"] + and isinstance(config_tf["terraform"][0]["backend"], list) + and "s3" in config_tf["terraform"][0]["backend"][0] + ): + raise ExitError(1, f"Malformed config.tf: File must contain a terraform block with an S3 backend. " + f"Expected structure:\n" + f"terraform {{\n" + f' backend "s3" {{\n' + f" # configuration\n" + f" }}\n" + f"}}" + ) + + # Check if key already exists + backend_config = config_tf["terraform"][0]["backend"][0]["s3"] + key_exists = "key" in backend_config + + # Modify the file content + modified_content = _modify_backend_key(content, key, key_exists) + + # Write back to file + config_file_path.write_text(modified_content) + + +def _modify_backend_key(content: str, key: str, key_exists: bool) -> str: + """ + Modify the backend key in the HCL content. + + Args: + content: The original file content + key: The new key value + key_exists: Whether the key attribute already exists + + Returns: + Modified content with the key set + """ + # Pattern to find the backend "s3" block + # This matches: backend "s3" { ... } + backend_pattern = r'(backend\s+"s3"\s*\{)' + + if key_exists: + # Update existing key + # Match: key = "anything" or key = "anything" with various whitespace/quotes + key_pattern = r'(\s*key\s*=\s*)"[^"]*"' + replacement = r'\1"' + key + '"' + + modified = re.sub(key_pattern, replacement, content) + + return modified + else: + # Add new key attribute + # Find the backend "s3" block and add the key after the opening brace + def add_key(match): + # Get the matched backend opening + backend_opening = match.group(1) + + # Find the indentation by looking at the next line + remaining = content[match.end() :] + next_line_match = re.search(r"\n(\s*)", remaining) + if next_line_match: + indent = next_line_match.group(1) + else: + indent = " " # Default to 4 spaces + + # Add the key attribute with proper indentation + return f'{backend_opening}\n{indent}key = "{key}"' + + modified = re.sub(backend_pattern, add_key, content, count=1) + + return modified + + +def get_backend_key(config_file: Union[str, Path]) -> Optional[str]: + """ + Get the current backend key from a Terraform config.tf file. + + Args: + config_file: Path to the config.tf file + + Returns: + The backend key if it exists, None otherwise + + Example: + >>> get_backend_key("/path/to/config.tf") + 'apps-devstg/layer/terraform.tfstate' + """ + config_file = Path(config_file) + + if not config_file.exists(): + raise ExitError(1, f"Config file not found: {config_file}") + + try: + config_content = config_file.read_text() + config_tf = hcl2.loads(config_content) + + if ( + "terraform" in config_tf + and config_tf["terraform"] + and isinstance(config_tf["terraform"], list) + and "backend" in config_tf["terraform"][0] + and config_tf["terraform"][0]["backend"] + and isinstance(config_tf["terraform"][0]["backend"], list) + and "s3" in config_tf["terraform"][0]["backend"][0] + ): + return config_tf["terraform"][0]["backend"][0]["s3"].get("key") + else: + raise ExitError( + 1, + f"Malformed [bold]config.tf[/bold] file. Missing backend block.\n" + f"In some cases you may want to skip this check by using the --skip-validation flag, " + f"e.g. the first time you initialize a tf-backend layer." + ) + + except lark.exceptions.UnexpectedInput as error: + raise ExitError( + 1, + f"Possible invalid expression in [bold]config.tf[/bold] near line {error.line}, column {error.column}\n" + f"{error.get_context(config_content)}", + ) + except Exception: + raise ExitError(1, f"Malformed [bold]config.tf[/bold] file. Unable to parse.") diff --git a/tests/test_backend_config.py b/tests/test_backend_config.py new file mode 100644 index 0000000..a1ca13c --- /dev/null +++ b/tests/test_backend_config.py @@ -0,0 +1,184 @@ +"""Tests for backend configuration utilities.""" + +import pytest +from pathlib import Path +from leverage._backend_config import set_backend_key, get_backend_key +from leverage._utils import ExitError + + +@pytest.fixture +def config_without_key(tmp_path): + """Create a config.tf file without a backend key.""" + config_file = tmp_path / "config.tf" + content = """# This is a comment +terraform { + required_version = ">= 1.0" + + # Backend configuration + backend "s3" { + bucket = "my-terraform-state" + region = "us-east-1" + # More config here + } +} + +# Another comment +resource "aws_instance" "example" { + ami = "ami-12345" +} +""" + config_file.write_text(content) + return config_file + + +@pytest.fixture +def config_with_key(tmp_path): + """Create a config.tf file with an existing backend key.""" + config_file = tmp_path / "config.tf" + content = """# This is a comment +terraform { + required_version = ">= 1.0" + + # Backend configuration + backend "s3" { + bucket = "my-terraform-state" + key = "old/path/terraform.tfstate" + region = "us-east-1" + } +} + +# Another comment +resource "aws_instance" "example" { + ami = "ami-12345" +} +""" + config_file.write_text(content) + return config_file + + +@pytest.fixture +def config_without_backend(tmp_path): + """Create a config.tf file without a backend block.""" + config_file = tmp_path / "config.tf" + content = """terraform { + required_version = ">= 1.0" +} +""" + config_file.write_text(content) + return config_file + + +def test_set_backend_key_adds_key_when_missing(config_without_key): + """Test that set_backend_key adds a key when it doesn't exist.""" + new_key = "apps-devstg/notifications/terraform.tfstate" + + set_backend_key(config_without_key, new_key) + + # Verify the key was added + assert get_backend_key(config_without_key) == new_key + + # Verify comments and other content are preserved + content = config_without_key.read_text() + assert "# This is a comment" in content + assert "# Backend configuration" in content + assert "# Another comment" in content + assert 'resource "aws_instance" "example"' in content + assert 'bucket = "my-terraform-state"' in content + + +def test_set_backend_key_updates_existing_key(config_with_key): + """Test that set_backend_key updates an existing key.""" + new_key = "apps-prod/notifications/terraform.tfstate" + + # Verify old key exists + assert get_backend_key(config_with_key) == "old/path/terraform.tfstate" + + set_backend_key(config_with_key, new_key) + + # Verify the key was updated + assert get_backend_key(config_with_key) == new_key + + # Verify comments and other content are preserved + content = config_with_key.read_text() + assert "# This is a comment" in content + assert "# Backend configuration" in content + assert "# Another comment" in content + assert 'resource "aws_instance" "example"' in content + + # Verify old key is not present + assert "old/path/terraform.tfstate" not in content + + +def test_set_backend_key_preserves_formatting(config_without_key): + """Test that formatting is preserved when adding a key.""" + original_content = config_without_key.read_text() + original_lines = original_content.split("\n") + + set_backend_key(config_without_key, "test/key/terraform.tfstate") + + new_content = config_without_key.read_text() + new_lines = new_content.split("\n") + + # When adding a key, we expect exactly one new line to be inserted + # Count how many original lines are still present in the new content + unchanged_lines = sum(1 for line in original_lines if line in new_lines) + + # Most lines should be unchanged (allow for the one added line) + assert unchanged_lines / len(original_lines) > 0.8 + + +def test_get_backend_key_returns_none_for_missing_key(config_without_key): + """Test that get_backend_key returns None when key doesn't exist.""" + assert get_backend_key(config_without_key) is None + + +def test_get_backend_key_raises_for_missing_file(tmp_path): + """Test that get_backend_key raises ExitError for non-existent file.""" + with pytest.raises(ExitError, match="Config file not found"): + get_backend_key(tmp_path / "nonexistent.tf") + + +def test_set_backend_key_raises_for_missing_file(tmp_path): + """Test that set_backend_key raises ExitError for missing file.""" + with pytest.raises(ExitError, match="Config file not found"): + set_backend_key(tmp_path / "nonexistent.tf", "some/key") + + +def test_set_backend_key_raises_for_missing_backend(config_without_backend): + """Test that set_backend_key raises ExitError when backend block is missing.""" + with pytest.raises(ExitError, match="Malformed config.tf"): + set_backend_key(config_without_backend, "some/key") + + +def test_get_backend_key_raises_for_missing_backend(config_without_backend): + """Test that get_backend_key raises ExitError when backend block is missing.""" + with pytest.raises(ExitError, match="Malformed"): + get_backend_key(config_without_backend) + + +def test_set_backend_key_with_complex_formatting(tmp_path): + """Test with various formatting styles.""" + config_file = tmp_path / "config.tf" + content = """terraform { + backend "s3" { + # Comments inside backend + bucket = "my-bucket" # inline comment + region = "us-east-1" + encrypt = true + dynamodb_table = "terraform-locks" + } +} +""" + config_file.write_text(content) + + set_backend_key(config_file, "test/terraform.tfstate") + + # Verify key was added + assert get_backend_key(config_file) == "test/terraform.tfstate" + + # Verify other attributes and comments are preserved + new_content = config_file.read_text() + assert "# Comments inside backend" in new_content + assert "# inline comment" in new_content + assert "bucket" in new_content + assert "dynamodb_table" in new_content From ba9d2b86d7562aaca642e9dba1a25d23f6c71751 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Thu, 23 Oct 2025 19:45:50 -0300 Subject: [PATCH 04/46] Remove docker image version logic --- leverage/__init__.py | 6 ------ leverage/leverage.py | 27 +-------------------------- leverage/modules/credentials.py | 3 --- 3 files changed, 1 insertion(+), 35 deletions(-) diff --git a/leverage/__init__.py b/leverage/__init__.py index 38db623..ca16c0e 100644 --- a/leverage/__init__.py +++ b/leverage/__init__.py @@ -5,12 +5,6 @@ # pylint: disable=wrong-import-position __version__ = "0.0.0" -__toolbox_version__ = "1.3.5-0.2.0" - -MINIMUM_VERSIONS = { - "TERRAFORM": "1.3.5", - "TOOLBOX": "0.2.4", -} import sys from shutil import which diff --git a/leverage/leverage.py b/leverage/leverage.py index 4810bb9..d117b2a 100644 --- a/leverage/leverage.py +++ b/leverage/leverage.py @@ -7,7 +7,7 @@ import click -from leverage import __version__, conf, MINIMUM_VERSIONS +from leverage import __version__, conf from leverage._internals import pass_state from leverage.modules.aws import aws from leverage.modules.credentials import credentials @@ -28,31 +28,6 @@ def leverage(context, state, verbose): # leverage called with no subcommand click.echo(context.get_help()) - # if there is a version restriction set, make sure we satisfy it - try: - config = conf.load() - except NotARepositoryError: - # restrictions are only verified within a leverage project - return - - # check if the current versions are lower than the minimum required - if not (image_tag := config.get("TF_IMAGE_TAG", config.get("TERRAFORM_IMAGE_TAG"))): - # at some points of the project (the init), the config file is not created yet - return - - # validate both TOOLBOX and TF versions - image_versions = image_tag.split("-") - if "tofu" not in image_versions: - versions = zip(MINIMUM_VERSIONS, image_versions) - else: - versions = {"TOOLBOX": image_versions[-1]}.items() - - for key, current in versions: - if Version(current) < Version(MINIMUM_VERSIONS[key]): - rich.print( - f"[red]WARNING[/red]\tYour current {key} version ({current}) is lower than the required minimum ({MINIMUM_VERSIONS[key]})." - ) - # Add modules to leverage leverage.add_command(run) diff --git a/leverage/modules/credentials.py b/leverage/modules/credentials.py index e4a0e09..9f55617 100644 --- a/leverage/modules/credentials.py +++ b/leverage/modules/credentials.py @@ -14,7 +14,6 @@ from questionary import Choice from ruamel.yaml import YAML -from leverage import __toolbox_version__ from leverage import logger from leverage._internals import pass_state from leverage._utils import ExitError @@ -265,8 +264,6 @@ def credentials(state): if short_name is None or not re.match("^[a-z]{2,4}$", short_name): logger.error("Invalid or missing project short name in project.yaml file.") raise Exit(1) - if not build_env.exists(): - build_env.write_text(f"PROJECT={short_name}\nTF_IMAGE_TAG={__toolbox_version__}") elif not build_env.exists(): # project_config is not empty # and build.env does not exist From e8f281f04b99c2cfe9639304f2651cacd36531f9 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Thu, 23 Oct 2025 19:48:58 -0300 Subject: [PATCH 05/46] Make cli internal state hold a path handler and a config reference --- leverage/_internals.py | 27 ++++++++++++++++++++++++++- leverage/leverage.py | 10 ++++++---- 2 files changed, 32 insertions(+), 5 deletions(-) diff --git a/leverage/_internals.py b/leverage/_internals.py index b325095..b6e0700 100644 --- a/leverage/_internals.py +++ b/leverage/_internals.py @@ -27,7 +27,9 @@ class State: def __init__(self): self._verbosity = None self.module = Module() - self.container = None + self.config = None + self.paths = None + self.runner = None @property def verbosity(self): @@ -51,3 +53,26 @@ def new_command(*args, **kwargs): return command(ctx.obj.container, *args, **kwargs) return new_command + +def pass_runner(command): + """Decorator to pass the current runner (Terraform/OpenTofu runner) to the command.""" + + @wraps(command) + def new_command(*args, **kwargs): + ctx = click.get_current_context() + + return command(ctx.obj.runner, *args, **kwargs) + + return new_command + + +def pass_paths(command): + """Decorator to pass the current project paths to the command.""" + + @wraps(command) + def new_command(*args, **kwargs): + ctx = click.get_current_context() + + return command(ctx.obj.paths, *args, **kwargs) + + return new_command diff --git a/leverage/leverage.py b/leverage/leverage.py index d117b2a..f14137c 100644 --- a/leverage/leverage.py +++ b/leverage/leverage.py @@ -9,10 +9,7 @@ from leverage import __version__, conf from leverage._internals import pass_state -from leverage.modules.aws import aws -from leverage.modules.credentials import credentials -from leverage.modules import run, project, tofu, terraform, tfautomv, kubectl, shell -from leverage.path import NotARepositoryError +from leverage.path import NotARepositoryError, PathsHandler @click.group(invoke_without_command=True) @@ -28,6 +25,11 @@ def leverage(context, state, verbose): # leverage called with no subcommand click.echo(context.get_help()) + try: + state.config = conf.load() + except NotARepositoryError: + return + state.paths = PathsHandler(state.config) # Add modules to leverage leverage.add_command(run) From b13f0cc65c3b50542474c57ac54802280f3f6b13 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Thu, 23 Oct 2025 19:52:45 -0300 Subject: [PATCH 06/46] Temporarily reduce the commands in the cli --- leverage/leverage.py | 15 +++++++++------ leverage/modules/__init__.py | 11 +++++++---- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/leverage/leverage.py b/leverage/leverage.py index f14137c..74d5cdf 100644 --- a/leverage/leverage.py +++ b/leverage/leverage.py @@ -9,6 +9,9 @@ from leverage import __version__, conf from leverage._internals import pass_state +# from leverage.modules.credentials import credentials +from leverage.modules import aws, run, tofu, terraform +# from leverage.modules import run, project, tofu, terraform, tfautomv, kubectl, shell from leverage.path import NotARepositoryError, PathsHandler @@ -33,13 +36,13 @@ def leverage(context, state, verbose): # Add modules to leverage leverage.add_command(run) -leverage.add_command(project) +# leverage.add_command(project) leverage.add_command(tofu) leverage.add_command(tofu, name="tf") leverage.add_command(terraform) -leverage.add_command(credentials) +# leverage.add_command(credentials) leverage.add_command(aws) -leverage.add_command(tfautomv) -leverage.add_command(kubectl) -leverage.add_command(kubectl, name="kc") -leverage.add_command(shell) +# leverage.add_command(tfautomv) +# leverage.add_command(kubectl) +# leverage.add_command(kubectl, name="kc") +# leverage.add_command(shell) \ No newline at end of file diff --git a/leverage/modules/__init__.py b/leverage/modules/__init__.py index 49aa720..d925535 100644 --- a/leverage/modules/__init__.py +++ b/leverage/modules/__init__.py @@ -1,6 +1,9 @@ from .run import run -from .project import project + +# from .project import project from .tf import tofu, terraform -from .tfautomv import tfautomv -from .kubectl import kubectl -from .shell import shell +from .aws import aws + +# from .tfautomv import tfautomv +# from .kubectl import kubectl +# from .shell import shell From ac7d2cd8eb4c8a9ff0982229dd8777086f52af15 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Thu, 23 Oct 2025 19:54:01 -0300 Subject: [PATCH 07/46] Drop shell command from cli --- leverage/leverage.py | 3 +-- leverage/modules/shell.py | 44 --------------------------------------- leverage/modules/utils.py | 8 ------- 3 files changed, 1 insertion(+), 54 deletions(-) delete mode 100644 leverage/modules/shell.py diff --git a/leverage/leverage.py b/leverage/leverage.py index 74d5cdf..eaf0242 100644 --- a/leverage/leverage.py +++ b/leverage/leverage.py @@ -44,5 +44,4 @@ def leverage(context, state, verbose): leverage.add_command(aws) # leverage.add_command(tfautomv) # leverage.add_command(kubectl) -# leverage.add_command(kubectl, name="kc") -# leverage.add_command(shell) \ No newline at end of file +# leverage.add_command(kubectl, name="kc") \ No newline at end of file diff --git a/leverage/modules/shell.py b/leverage/modules/shell.py deleted file mode 100644 index bd0fadf..0000000 --- a/leverage/modules/shell.py +++ /dev/null @@ -1,44 +0,0 @@ -import click - -from leverage._utils import CustomEntryPoint -from leverage.container import get_docker_client, TFContainer -from leverage.modules.utils import env_var_option, mount_option, auth_sso, auth_mfa - - -@click.command() -@mount_option -@env_var_option -@auth_mfa -@auth_sso -def shell(mount, env_var, mfa, sso): - """ - Run a shell in a generic container. It supports mounting local paths and injecting arbitrary environment variables. - It also supports AWS credentials injection via mfa/sso. - - Syntax: - leverage shell --mount --env-var - - Example: - leverage shell --mount /home/user/bin/ /usr/bin/ --env-var env dev - - Both mount and env-var parameters can be provided multiple times. - - Example: - leverage shell --mount /home/user/bin/ /usr/bin/ --mount /etc/config.ini /etc/config.ini --env-var init 5 --env-var env dev - """ - if env_var: - env_var = dict(env_var) - # TODO: TFContainer is the only class supporting sso/mfa auth automagically - # Move this capacity into a mixin later - container = TFContainer(get_docker_client(), mounts=mount, env_vars=env_var) - container.ensure_image() - - # auth - container.disable_authentication() - if sso: - container.enable_sso() - if mfa: - container.enable_mfa() - - with CustomEntryPoint(container, entrypoint=""): - container._start(container.SHELL) diff --git a/leverage/modules/utils.py b/leverage/modules/utils.py index 10ce084..4858c58 100644 --- a/leverage/modules/utils.py +++ b/leverage/modules/utils.py @@ -33,11 +33,3 @@ def _handle_subcommand(context, cli_container, args, caller_name=None): context.invoke(subcommand) else: context.forward(subcommand) - - -mount_option = click.option("--mount", multiple=True, type=click.Tuple([str, str])) -env_var_option = click.option("--env-var", multiple=True, type=click.Tuple([str, str])) -auth_mfa = click.option( - "--mfa", is_flag=True, default=False, help="Enable Multi Factor Authentication upon launching shell." -) -auth_sso = click.option("--sso", is_flag=True, default=False, help="Enable SSO Authentication upon launching shell.") From 4679a888ec0758e5ade667a1d7f16e158237d30a Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 24 Oct 2025 11:30:11 -0300 Subject: [PATCH 08/46] Typing and small importing improvement --- leverage/_utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/leverage/_utils.py b/leverage/_utils.py index 9817e92..4dd6b1e 100644 --- a/leverage/_utils.py +++ b/leverage/_utils.py @@ -3,8 +3,8 @@ """ from pathlib import Path -from subprocess import run -from subprocess import PIPE +from subprocess import PIPE, run +from typing import List, Optional import hcl2 import lark @@ -153,12 +153,12 @@ def __exit__(self, exc_type, exc_value, exc_tb): self.docker_client.api.remove_container(self.container_data) -def key_finder(d: dict, target: str, avoid: str = None): +def key_finder(d: dict, target: str, avoid: Optional[str] = None) -> List[str]: """ Iterate over a dict of dicts and/or lists of dicts, looking for a key with value "target". Collect and return all the values that matches "target" as key. """ - values = [] + values: List[str] = [] for key, value in d.items(): if isinstance(value, dict): From e725796ed18726851d4a6acf5c44d7c0885949c1 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 24 Oct 2025 11:32:21 -0300 Subject: [PATCH 09/46] Drop support for containers on PathHandler objects --- leverage/path.py | 73 ++++++++++-------------------------------------- 1 file changed, 14 insertions(+), 59 deletions(-) diff --git a/leverage/path.py b/leverage/path.py index e01aabf..8799cbe 100644 --- a/leverage/path.py +++ b/leverage/path.py @@ -136,8 +136,7 @@ class PathsHandler: ACCOUNT_TF_VARS = "account.tfvars" BACKEND_TF_VARS = "backend.tfvars" - def __init__(self, env_conf: dict, container_user: str): - self.container_user = container_user + def __init__(self, env_conf: dict): self.home = Path.home() self.cwd = Path.cwd() try: @@ -160,16 +159,13 @@ def __init__(self, env_conf: dict, container_user: str): self.project = self.common_conf.get("project", env_conf.get("PROJECT", False)) if not self.project: raise ExitError(1, "Project name has not been set. Exiting.") - - # Project mount location - self.project_long = self.common_conf.get("project_long", "project") - self.guest_base_path = f"/{self.project_long}" + self.project_long = self.common_conf.get("project_long", env_conf.get("PROJECT_LONG", False)) # Ensure credentials directory - self.host_aws_credentials_dir = self.home / ".aws" / self.project - if not self.host_aws_credentials_dir.exists(): - self.host_aws_credentials_dir.mkdir(parents=True) - self.sso_cache = self.host_aws_credentials_dir / "sso" / "cache" + self.aws_credentials_dir = self.home / ".aws" / self.project + if not self.aws_credentials_dir.exists(): + self.aws_credentials_dir.mkdir(parents=True) + self.sso_cache = self.aws_credentials_dir / "sso" / "cache" def update_cwd(self, new_cwd): self.cwd = new_cwd @@ -179,45 +175,29 @@ def update_cwd(self, new_cwd): account_config_path = self.account_config_dir / self.ACCOUNT_TF_VARS self.account_conf = hcl2.loads(account_config_path.read_text()) - @property - def guest_account_base_path(self): - return f"{self.guest_base_path}/{self.account_dir.relative_to(self.root_dir).as_posix()}" - @property def common_tfvars(self): - return f"{self.guest_base_path}/config/{self.COMMON_TF_VARS}" + return f"{self.root_dir}/config/{self.COMMON_TF_VARS}" @property def account_tfvars(self): - return f"{self.guest_account_base_path}/config/{self.ACCOUNT_TF_VARS}" + return f"{self.account_dir}/config/{self.ACCOUNT_TF_VARS}" @property def backend_tfvars(self): - return f"{self.guest_account_base_path}/config/{self.BACKEND_TF_VARS}" - - @property - def guest_aws_credentials_dir(self): - return str(f"/home/{self.container_user}/tmp" / Path(self.project)) - - @property - def host_aws_profiles_file(self): - return f"{self.host_aws_credentials_dir}/config" - - @property - def host_aws_credentials_file(self): - return self.host_aws_credentials_dir / "credentials" + return self.account_config_dir / self.BACKEND_TF_VARS @property - def host_git_config_file(self): - return self.home / ".gitconfig" + def aws_config_file(self): + return self.aws_credentials_dir / "config" @property - def local_backend_tfvars(self): - return self.account_config_dir / self.BACKEND_TF_VARS + def aws_credentials_file(self): + return self.aws_credentials_dir / "credentials" @property def sso_token_file(self): - return f"{self.sso_cache}/token" + return self.sso_cache / "token" def get_location_type(self): """ @@ -246,31 +226,6 @@ def assert_running_leverage_project(self): if self.root_dir == self.account_dir == self.common_config_dir == self.account_config_dir == self.cwd: raise ExitError(1, "Not running in a Leverage project. Exiting.") - def guest_config_file(self, file): - """Map config file in host to location in guest. - - Args: - file (pathlib.Path): File in host to map - - Raises: - Exit: If file is not contained in any valid config directory - - Returns: - str: Path in guest to config file - """ - file_name = file.name - - if file.parent == self.account_config_dir: - return f"{self.guest_account_base_path}/config/{file_name}" - if file.parent == self.common_config_dir: - return f"{self.guest_base_path}/config/{file_name}" - - raise ExitError(1, "File is not part of any config directory.") - - @property - def tf_cache_dir(self): - return os.getenv("TF_PLUGIN_CACHE_DIR") - def check_for_layer_location(self, path: Path = None): """Make sure the command is being run at layer level. If not, bail.""" path = path or self.cwd From 077748733f256e5161b0b26a5582803573b88fbd Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 24 Oct 2025 11:32:59 -0300 Subject: [PATCH 10/46] Make _handle_subcommand work on Runners and not on containers --- leverage/modules/utils.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/leverage/modules/utils.py b/leverage/modules/utils.py index 4858c58..ded87cd 100644 --- a/leverage/modules/utils.py +++ b/leverage/modules/utils.py @@ -1,18 +1,24 @@ -import click +from typing import Optional, Tuple + from click.exceptions import Exit +from click.core import Context + +from leverage.modules.runner import Runner -def _handle_subcommand(context, cli_container, args, caller_name=None): +def _handle_subcommand( + context: Context, runner: Runner, args: Tuple[str, ...], caller_name: Optional[str] = None +) -> None: """Decide if command corresponds to a wrapped one or not and run accordingly. Args: context (click.context): Current context - cli_container (LeverageContainer): Container where commands will be executed + runner (Runner): Runner where commands will be executed args (tuple(str)): Arguments received by Leverage caller_name (str, optional): Calling command. Defaults to None. Raises: - Exit: Whenever container execution returns a non-zero exit code + Exit: Whenever runner execution returns a non-zero exit code """ caller_pos = args.index(caller_name) if caller_name is not None else 0 @@ -21,9 +27,8 @@ def _handle_subcommand(context, cli_container, args, caller_name=None): subcommand = next((arg for arg in args[caller_pos:] if arg in wrapped_subcommands), None) if subcommand is None: - # Pass command to the container directly - exit_code = cli_container.start(" ".join(args)) - if not exit_code: + # Run the command directly + if exit_code := runner.run(args): raise Exit(exit_code) else: From a893e55b74ae199e4f6909fe71c9139aa9fee5ad Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 24 Oct 2025 11:33:54 -0300 Subject: [PATCH 11/46] Add new dependencies --- poetry.lock | 20 +++++++++++++++++++- pyproject.toml | 2 ++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 7dfb53a..eb38254 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1285,6 +1285,24 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] +[[package]] +name = "pytest-mock" +version = "3.15.1" +description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d"}, + {file = "pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -1980,4 +1998,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "~3.9 || ~3.10 || ~3.11 || ~3.12 || ~3.13" -content-hash = "48b29a9d6572a5119a7af19b8c5b7d02e4eb715018fb5b2c8786f995b52ac276" +content-hash = "1f5899f1c878231d764da654eeed24f372eeb90a4eee0efbf84e9579e9af7d1c" diff --git a/pyproject.toml b/pyproject.toml index 8c01293..d16252b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,6 +42,7 @@ docutils = "0.17.1" rich = "14.1.0" requests = "2.31" simple-term-menu = "1.6.4" +python-dateutil = "^2.9.0.post0" [tool.poetry.group.dev.dependencies] pylint = "2.8.3" @@ -50,6 +51,7 @@ pytest-cov = "6.0.0" twine = "6.1.0" black = "23.3.0" pre-commit = "3.5.0" +pytest-mock = "^3.15.1" [build-system] From 0c57669cc6313db32542eb348cd98058f369f08d Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 24 Oct 2025 11:39:41 -0300 Subject: [PATCH 12/46] Adapt auth utilities to new path handling --- leverage/modules/auth.py | 77 ++++++++++++++++++++++++++++++++-------- 1 file changed, 62 insertions(+), 15 deletions(-) diff --git a/leverage/modules/auth.py b/leverage/modules/auth.py index 9f553bd..d9eefb8 100644 --- a/leverage/modules/auth.py +++ b/leverage/modules/auth.py @@ -1,5 +1,8 @@ import time +import json from pathlib import Path +from datetime import datetime +from dateutil.tz import tzutc from configparser import NoSectionError, NoOptionError import boto3 @@ -7,6 +10,7 @@ from configupdater import ConfigUpdater from leverage import logger +from leverage.path import PathsHandler from leverage._utils import key_finder, ExitError, get_or_create_section, parse_tf_file @@ -21,10 +25,14 @@ def get_layer_profile(raw_profile: str, config_updater: ConfigUpdater, tf_profil raise SkipProfile # if it is exactly that variable, we already know the layer profile is tf_profile - layer_profile = tf_profile if raw_profile == "${var.profile}" else None + layer_profile = tf_profile if raw_profile in ("${var.profile}", "each.value.profile") else None # replace variables with their corresponding values - raw = raw_profile.replace("${var.profile}", tf_profile).replace("${var.project}", project) + raw = ( + raw_profile.replace("${var.profile}", tf_profile) + .replace("${var.project}", project) + .replace("each.value.profile", tf_profile) + ) # the project and the role are at the beginning and end of the string _, *account_name, _ = raw.split("-") @@ -57,15 +65,15 @@ def update_config_section(updater: ConfigUpdater, layer_profile: str, data: dict updater.update_file() -def get_profiles(cli): +def get_profiles(paths: PathsHandler): """ Get the AWS profiles present on the layer by parsing some tf files. """ raw_profiles = set() # these are files from the layer we are currently on - for name in ("config.tf", "locals.tf"): + for name in ("config.tf", "locals.tf", "runtime.tf"): try: - tf_config = parse_tf_file(Path(cli.paths.cwd / name)) + tf_config = parse_tf_file(Path(paths.cwd / name)) except FileNotFoundError: continue @@ -74,25 +82,65 @@ def get_profiles(cli): raw_profiles.update(set(key_finder(tf_config, "profile", "lookup"))) # the profile value from /config/backend.tfvars - backend_config = parse_tf_file(cli.paths.local_backend_tfvars) + backend_config = parse_tf_file(paths.backend_tfvars) tf_profile = backend_config["profile"] return tf_profile, raw_profiles -def refresh_layer_credentials(cli): - tf_profile, raw_profiles = get_profiles(cli) +def get_sso_access_token(sso_token_file: Path) -> str: + """ + Get the SSO access token from the token file. + """ + return json.loads(sso_token_file.read_text())["accessToken"] + + +def check_sso_token(paths: PathsHandler): + """Check for the existence and validity of the SSO token to be used to get credentials.""" + + # Adding `token` file name to this function in order to + # meet the requirement regarding to have just one + # token file in the sso/cache + sso_role = paths.account_conf.get("sso_role") + token_file = paths.sso_cache / sso_role + + token_files = list(paths.sso_cache.glob("*")) + if not token_files: + raise ExitError(1, "No AWS SSO token found. Please log in or configure SSO.") + + if token_file not in token_files and paths.sso_token_file not in token_files: + raise ExitError( + 1, + "No valid AWS SSO token found for current account.\n" + "Please log out and reconfigure SSO before proceeding.", + ) + + token = json.loads(paths.sso_token_file.read_text()) + expiry = datetime.strptime(token.get("expiresAt"), "%Y-%m-%dT%H:%M:%SZ") + renewal = datetime.now() + + if expiry < renewal: + raise ExitError( + 1, + "AWS SSO token has expired, please log back in by running [bold]leverage aws sso login[/bold]" + " to refresh your credentials before re-running the last command.", + ) + + +def refresh_layer_credentials(paths: PathsHandler): + tf_profile, raw_profiles = get_profiles(paths) config_updater = ConfigUpdater() - config_updater.read(cli.paths.host_aws_profiles_file) + config_updater.read(paths.aws_config_file) - client = boto3.client("sso", region_name=cli.sso_region_from_main_profile) + region = config_updater.get(f"profile {paths.project}-sso", "sso_region").value + client = boto3.client("sso", region_name=region) for raw in raw_profiles: try: account_id, account_name, sso_role, layer_profile = get_layer_profile( raw, config_updater, tf_profile, - cli.project, + paths.project, ) except SkipProfile: continue @@ -119,7 +167,7 @@ def refresh_layer_credentials(cli): credentials = client.get_role_credentials( roleName=sso_role, accountId=account_id, - accessToken=cli.get_sso_access_token(), + accessToken=get_sso_access_token(paths.sso_token_file), )["roleCredentials"] except ClientError as error: if error.response["Error"]["Code"] in ("AccessDeniedException", "ForbiddenException"): @@ -140,10 +188,9 @@ def refresh_layer_credentials(cli): }, ) # write credentials on aws//credentials (create the file if it doesn't exist first) - creds_path = Path(cli.paths.host_aws_credentials_file) - creds_path.touch(exist_ok=True) + paths.aws_credentials_file.touch(exist_ok=True) credentials_updater = ConfigUpdater() - credentials_updater.read(cli.paths.host_aws_credentials_file) + credentials_updater.read(paths.aws_credentials_file) update_config_section( credentials_updater, From fa918e681da6f89814a4011132a515a90aa3f98e Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 24 Oct 2025 11:40:36 -0300 Subject: [PATCH 13/46] Implement sso login and logout logic using boto3 --- leverage/modules/aws.py | 259 +++++++++++++++++++++++++++++----------- 1 file changed, 186 insertions(+), 73 deletions(-) diff --git a/leverage/modules/aws.py b/leverage/modules/aws.py index 8604bee..2fc40ad 100644 --- a/leverage/modules/aws.py +++ b/leverage/modules/aws.py @@ -1,20 +1,30 @@ +import time +import json +import datetime +import webbrowser +from typing import Any, Dict, Tuple + import boto3 import click -from click.exceptions import Exit +from dateutil.tz import tzutc from configupdater import ConfigUpdater from leverage import logger -from leverage._internals import pass_state -from leverage._internals import pass_container -from leverage._utils import get_or_create_section -from leverage.container import get_docker_client, SSOContainer -from leverage.container import AWSCLIContainer +from leverage.path import PathsHandler +from leverage.modules.runner import Runner from leverage.modules.utils import _handle_subcommand +from leverage.modules.auth import get_sso_access_token +from leverage._utils import get_or_create_section, ExitError +from leverage._internals import pass_state, pass_runner, pass_paths + CONTEXT_SETTINGS = {"ignore_unknown_options": True} -def get_account_roles(sso_client, access_token: str) -> dict: +AWS_SSO_LOGIN_URL = "{sso_url}/#/device?user_code={user_code}" + + +def get_account_roles(sso_client: Any, access_token: str) -> Dict[str, Dict[str, str]]: """ Fetch the accounts and roles from the user. """ @@ -38,7 +48,7 @@ def get_account_roles(sso_client, access_token: str) -> dict: def add_sso_profile( config_updater: ConfigUpdater, section_name: str, role_name: str, account_id: str, region: str, start_url: str -): +) -> None: """ Add a profile to the config file. """ @@ -55,28 +65,29 @@ def add_sso_profile( section[k] = v -def configure_sso_profiles(cli: SSOContainer): +def configure_sso_profiles(paths: PathsHandler) -> None: """ Populate the ~./aws//config file with the sso profiles from the accounts. """ updater = ConfigUpdater() - updater.read(cli.paths.host_aws_profiles_file) + updater.read(paths.aws_config_file) # get values from the default profile first - default_sso_profile_name = f"profile {cli.project}-sso" + default_sso_profile_name = f"profile {paths.project}-sso" default_profile = updater[default_sso_profile_name] region = default_profile["sso_region"].value start_url = default_profile["sso_start_url"].value # then set a profile for each account - access_token = cli.get_sso_access_token() + access_token = get_sso_access_token(paths.sso_token_file) + logger.info(f"Fetching accounts and roles...") client = boto3.client("sso", region_name=region) account_roles = get_account_roles(client, access_token) for acc_name, values in account_roles.items(): # account names comes in the form of: {long project name}-{account name} - short_acc_name = acc_name.replace(cli.paths.project_long + "-", "") - section_name = f"profile {cli.project}-sso-{short_acc_name}" + short_acc_name = acc_name.replace(paths.project_long + "-", "") + section_name = f"profile {paths.project}-sso-{short_acc_name}" logger.info(f"Adding {section_name}") add_sso_profile(updater, section_name, values["role_name"], values["account_id"], region, start_url) @@ -88,74 +99,82 @@ def configure_sso_profiles(cli: SSOContainer): @click.argument("args", nargs=-1, type=click.UNPROCESSED) @pass_state @click.pass_context -def aws(context, state, args): +def aws(context: click.Context, state: Any, args: Tuple[str, ...]) -> None: """Run AWS CLI commands in a custom containerized environment.""" - cli = AWSCLIContainer(get_docker_client()) - state.container = cli - state.container.ensure_image() - _handle_subcommand(context=context, cli_container=cli, args=args) + credentials_env_vars = { + "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), + "AWS_CONFIG_FILE": str(state.paths.aws_config_file), + } + state.runner = Runner( + binary="aws", + error_message=( + f"AWS CLI not found on system. " + f"Please install it following the instructions at: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html" + ), + env_vars=credentials_env_vars, + ) + + _handle_subcommand(context=context, runner=state.runner, args=args) @aws.group(invoke_without_command=True, add_help_option=False, context_settings=CONTEXT_SETTINGS) @click.argument("args", nargs=-1, type=click.UNPROCESSED) -@pass_container +@pass_runner @click.pass_context -def configure(context, cli, args): +def configure(context: click.Context, awscli: Runner, args: Tuple[str, ...]) -> None: """configure""" - _handle_subcommand(context=context, cli_container=cli, args=args, caller_name="configure") + _handle_subcommand(context=context, runner=awscli, args=args, caller_name="configure") @configure.command("sso") -@pass_container +@pass_paths +@pass_runner @click.pass_context -def _sso(context, cli): +def _sso(context: click.Context, awscli: Runner, paths: PathsHandler) -> None: """configure sso""" - cli.paths.check_for_layer_location() + paths.check_for_layer_location() - # region_primary was added in refarch v1 + # region_primary was added in ref-arch v1 # for v2 it was replaced by region at project level region_primary = "region_primary" - if "region_primary" not in cli.paths.common_conf: + if "region_primary" not in paths.common_conf: region_primary = "region" - default_region = cli.paths.common_conf.get(region_primary, cli.paths.common_conf.get("sso_region")) + default_region = paths.common_conf.get(region_primary, paths.common_conf.get("sso_region")) if default_region is None: - logger.error("No primary region configured in global config file.") - raise Exit(1) + raise ExitError(1, "No primary region configured in global config file.") logger.info("Configuring default profile.") default_profile = {"region": default_region, "output": "json"} for key, value in default_profile.items(): - cli.exec(f"configure set {key} {value}", profile="default") + awscli.exec("configure", "set", key, value, "--profile", "default") - if not all(sso_key in cli.paths.common_conf for sso_key in ("sso_start_url", "sso_region")): - logger.error("Missing configuration values for SSO in global config file.") - raise Exit(1) + if not all(sso_key in paths.common_conf for sso_key in ("sso_start_url", "sso_region")): + raise ExitError(1, "Missing configuration values for SSO in global config file.") - sso_role = cli.paths.account_conf.get("sso_role") + sso_role = paths.account_conf.get("sso_role") if not sso_role: - logger.error("Missing SSO role in account config file.") - raise Exit(1) + raise ExitError(1, "Missing SSO role in account config file.") - current_account = cli.paths.account_conf.get("environment") + current_account = paths.account_conf.get("environment") try: - # this is for refarch v1 - account_id = cli.paths.common_conf.get("accounts").get(current_account).get("id") + # this is for ref-arch v1 + account_id = paths.common_conf.get("accounts").get(current_account).get("id") except AttributeError: - # this is for refarch v2 + # this is for ref-arch v2 try: # this is for accounts with no org unit on top of it - account_id = cli.paths.common_conf.get("organization").get("accounts").get(current_account).get("id") + account_id = paths.common_conf.get("organization").get("accounts").get(current_account).get("id") except AttributeError: try: # this is for accounts with no org unit on top of it found = False - for ou in cli.paths.common_conf.get("organization").get("organizational_units"): - if current_account in cli.paths.common_conf.get("organization").get("organizational_units").get( - ou - ).get("accounts"): + for ou in paths.common_conf.get("organization").get("organizational_units"): + if current_account in paths.common_conf.get("organization").get("organizational_units").get(ou).get( + "accounts" + ): account_id = ( - cli.paths.common_conf.get("organization") + paths.common_conf.get("organization") .get("organizational_units") .get(ou) .get("accounts") @@ -167,59 +186,153 @@ def _sso(context, cli): if not found: raise AttributeError except AttributeError: - logger.error(f"Missing account configuration for [bold]{current_account}[/bold] in global config file.") - raise Exit(1) + raise ExitError( + 1, f"Missing account configuration for [bold]{current_account}[/bold] in global config file." + ) if not account_id: - logger.error(f"Missing id for account [bold]{current_account}[/bold].") - raise Exit(1) + raise ExitError(1, f"Missing id for account [bold]{current_account}[/bold].") - logger.info(f"Configuring [bold]{cli.project}-sso[/bold] profile.") + logger.info(f"Configuring [bold]{paths.project}-sso[/bold] profile.") sso_profile = { - "sso_start_url": cli.paths.common_conf.get("sso_start_url"), - "sso_region": cli.paths.common_conf.get("sso_region", cli.paths.common_conf.get(region_primary)), + "sso_start_url": paths.common_conf.get("sso_start_url"), + "sso_region": paths.common_conf.get("sso_region", paths.common_conf.get(region_primary)), "sso_account_id": account_id, "sso_role_name": sso_role, } for key, value in sso_profile.items(): - cli.exec(f"configure set {key} {value}", profile=f"{cli.project}-sso") + awscli.exec("configure", "set", key, value, "--profile", f"{paths.project}-sso") context.invoke(login) logger.info("Storing account information.") - configure_sso_profiles(cli) + configure_sso_profiles(paths) + + logger.info("SSO profiles configured successfully.") @aws.group(invoke_without_command=True, add_help_option=False, context_settings=CONTEXT_SETTINGS) @click.argument("args", nargs=-1, type=click.UNPROCESSED) -@pass_container +@pass_runner @click.pass_context -def sso(context, cli, args): +def sso(context: click.Context, awscli: Runner, args: Tuple[str, ...]) -> None: """sso""" - _handle_subcommand(context=context, cli_container=cli, args=args, caller_name="sso") + _handle_subcommand(context=context, runner=awscli, args=args, caller_name="sso") @sso.command() -@pass_container -def login(cli): +@pass_paths +@pass_runner +def login(awscli: Runner, paths: PathsHandler) -> None: """Login""" - exit_code, region = cli.exec(f"configure get sso_region --profile {cli.project}-sso") + exit_code, region, _ = awscli.exec("configure", "get", "sso_region", "--profile", f"{paths.project}-sso") if exit_code: - logger.error(f"Region configuration for [bold]{cli.project}-sso[/bold] profile not found.") - raise Exit(1) + raise ExitError( + exit_code, + f"Region configuration for [bold]{paths.project}-sso[/bold] profile not found. \n" + f"Please run [bold]leverage configure sso[/bold] to configure the SSO profile.", + ) + + paths.sso_cache.mkdir(parents=True, exist_ok=True) + + logger.info(f"Logging in...") + sso_oidc_client = boto3.client("sso-oidc", region_name=region) + + logger.debug(f"Registering client...") + sso_oidc_client_creds = sso_oidc_client.register_client( + clientName=f"leverage-{datetime.datetime.now().timestamp()}", + clientType="public", + ) + device_authorization = sso_oidc_client.start_device_authorization( + clientId=sso_oidc_client_creds["clientId"], + clientSecret=sso_oidc_client_creds["clientSecret"], + startUrl=paths.common_conf.get("sso_start_url"), + ) - if exit_code := cli.sso_login(): - raise Exit(exit_code) + logger.info( + f"Attempting to automatically open the SSO authorization page in your default browser.\n" + f"If the browser does not open or you wish to use a different device to authorize this request, open the following URL:\n" + f"\n{paths.common_conf.get("sso_start_url")}\n" + f"\nThen enter the code:\n" + f"\n{device_authorization["userCode"]}\n" + ) + webbrowser.open_new_tab( + f"{paths.common_conf.get("sso_start_url")}/#/device?user_code={device_authorization["userCode"]}" + ) + + logger.debug(f"Attempting to create authorization token...") + _wait_interval = device_authorization["interval"] + token = None + while not token: + try: + token_response = sso_oidc_client.create_token( + grantType="urn:ietf:params:oauth:grant-type:device_code", + deviceCode=device_authorization["deviceCode"], + clientId=sso_oidc_client_creds["clientId"], + clientSecret=sso_oidc_client_creds["clientSecret"], + ) + + token_expires_at = datetime.datetime.now(tzutc()) + datetime.timedelta(seconds=token_response["expiresIn"]) + client_expires_at = datetime.datetime.fromtimestamp(sso_oidc_client_creds["clientSecretExpiresAt"], tzutc()) + + token = { + "startUrl": paths.common_conf.get("sso_start_url"), + "region": region, + "accessToken": token_response["accessToken"], + "expiresAt": token_expires_at.strftime("%Y-%m-%dT%H:%M:%SZ"), + "clientId": sso_oidc_client_creds["clientId"], + "clientSecret": sso_oidc_client_creds["clientSecret"], + "registrationExpiresAt": client_expires_at.strftime("%Y-%m-%dT%H:%M:%SZ"), + } + + except sso_oidc_client.exceptions.SlowDownException: + # Polling too frequently. + time.sleep(_wait_interval + 5) + except sso_oidc_client.exceptions.AuthorizationPendingException: + # User hasn't finished logging in. + time.sleep(_wait_interval) + except Exception as e: + raise ExitError( + 1, f"An error occurred while polling for authorization token: {e}\n" f"Aborting login process." + ) + + logger.debug(f"Token expires at: {token['expiresAt']}") + logger.debug(f"Caching token.") + token_file = paths.sso_cache / "token" + token_file.write_text(json.dumps(token)) + + logger.info(f"Successfully logged in!.") @sso.command() -@pass_container -def logout(cli): +@pass_paths +def logout(paths: PathsHandler) -> None: """Logout""" - exit_code = cli.system_start(cli.AWS_SSO_LOGOUT_SCRIPT) - if exit_code: - raise Exit(exit_code) + region = paths.common_conf.get("sso_region") + sso_client = boto3.client("sso", region_name=region) + + logger.debug("Logging out of AWS SSO...") + sso_client.logout(accessToken=get_sso_access_token(paths.sso_token_file)) + + logger.debug("Removing SSO Tokens...") + if paths.sso_cache.exists(): + for file in paths.sso_cache.glob("*"): + file.unlink() + + logger.debug("Wiping current SSO credentials...") + updater = ConfigUpdater() + updater.read(paths.aws_credentials_file) + + sections = updater.sections() + for section in sections: + if section not in ("default", f"{paths.project}-sso"): + updater.remove_section(section) + updater.update_file() + + paths.aws_credentials_file.unlink(missing_ok=True) + + logger.debug("All SSO credentials wiped!.") logger.info( - f"Don't forget to log out of your [bold]AWS SSO[/bold] start page {cli.paths.common_conf.get('sso_start_url')}" - " and your external identity provider portal." + f"Don't forget to log out of your [bold]AWS SSO[/bold] start page {paths.common_conf.get('sso_start_url')}" + f" and your external identity provider portal." ) From 6ce54c10deb73bde7f50599a6bf5b6fb5d1ab933 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 24 Oct 2025 11:41:12 -0300 Subject: [PATCH 14/46] Drop containers for TFRunner for tofu and terraform --- leverage/modules/tf.py | 362 ++++++++++++++++++++--------------------- 1 file changed, 179 insertions(+), 183 deletions(-) diff --git a/leverage/modules/tf.py b/leverage/modules/tf.py index 8300f0c..979cd74 100644 --- a/leverage/modules/tf.py +++ b/leverage/modules/tf.py @@ -1,60 +1,54 @@ import re from pathlib import Path -from typing import Sequence +from typing import Sequence, List import click from click.exceptions import Exit from leverage import logger -from leverage._internals import pass_container, pass_state +from leverage.path import PathsHandler +from leverage.modules.tfrunner import TFRunner from leverage._utils import ExitError, parse_tf_file -from leverage.container import TFContainer -from leverage.container import get_docker_client -from leverage.modules.utils import env_var_option, mount_option, auth_mfa, auth_sso +from leverage._internals import pass_paths, pass_runner, pass_state +from leverage._backend_config import get_backend_key, set_backend_key +from leverage.modules.auth import refresh_layer_credentials, check_sso_token -REGION = ( - r"global|(?:[a-z]{2}-(?:gov-)?" - r"(?:central|north|south|east|west|northeast|northwest|southeast|southwest|secret|topsecret)-[1-4])" -) +REGION = rf"(global|([a-z]{2}(-gov)?)-(central|(north|south)?(east|west)?)-\d)" # ########################################################################### # CREATE THE TOFU AND TERRAFORM GROUPS # ########################################################################### @click.group() -@mount_option -@env_var_option @pass_state -def tofu(state, env_var, mount): +def tofu(state): """Run OpenTofu commands in a custom containerized environment that provides extra functionality when interacting with your cloud provider such as handling multi factor authentication for you. All tofu subcommands that receive extra args will pass the given strings as is to their corresponding OpenTofu counterparts in the container. For example as in `leverage tofu apply -auto-approve` or `leverage tofu init -reconfigure` """ - if env_var: - env_var = dict(env_var) - - state.container = TFContainer(get_docker_client(), mounts=mount, env_vars=env_var) - state.container.ensure_image() + credentials_env_vars = { + "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), + "AWS_CONFIG_FILE": str(state.paths.aws_config_file), + } + state.runner = TFRunner(env_vars=credentials_env_vars) @click.group() -@mount_option -@env_var_option @pass_state -def terraform(state, env_var, mount): +def terraform(state): """Run Terraform commands in a custom containerized environment that provides extra functionality when interacting with your cloud provider such as handling multi factor authentication for you. All terraform subcommands that receive extra args will pass the given strings as is to their corresponding Terraform counterparts in the container. For example as in `leverage terraform apply -auto-approve` or `leverage terraform init -reconfigure` """ - if env_var: - env_var = dict(env_var) - - state.container = TFContainer(get_docker_client(), terraform=True, mounts=mount, env_vars=env_var) - state.container.ensure_image() + credentials_env_vars = { + "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), + "AWS_CONFIG_FILE": str(state.paths.aws_config_file), + } + state.runner = TFRunner(terraform=True, env_vars=credentials_env_vars) CONTEXT_SETTINGS = {"ignore_unknown_options": True} @@ -94,124 +88,135 @@ def terraform(state, env_var, mount): @click.option("--skip-validation", is_flag=True, help="Skip layout validation.") @layers_option @click.argument("args", nargs=-1) -@pass_container -@click.pass_context -def init(context, tf: TFContainer, skip_validation, layers, args): - """ - Initialize this layer. - """ - invoke_for_all_commands(layers, _init, args, skip_validation) +@pass_runner +def init(tf: TFRunner, args: Sequence[str], layers: str, skip_validation: bool): + """Initialize this layer.""" + invoke_for_all_commands(layers, _init, *args, skip_validation=skip_validation) @click.command(context_settings=CONTEXT_SETTINGS) @layers_option @click.argument("args", nargs=-1) -@pass_container -@click.pass_context -def plan(context, tf, layers, args): +@pass_runner +def plan(tf: TFRunner, args: Sequence[str], layers: str): """Generate an execution plan for this layer.""" - invoke_for_all_commands(layers, _plan, args) + invoke_for_all_commands(layers, _plan, *args) @click.command(context_settings=CONTEXT_SETTINGS) @layers_option @click.argument("args", nargs=-1) -@pass_container -@click.pass_context -def apply(context, tf, layers, args): +@pass_runner +def apply(tf: TFRunner, args: Sequence[str], layers: str): """Build or change the infrastructure in this layer.""" - invoke_for_all_commands(layers, _apply, args) + invoke_for_all_commands(layers, _apply, *args) @click.command(context_settings=CONTEXT_SETTINGS) @layers_option @click.argument("args", nargs=-1) -@pass_container -@click.pass_context -def output(context, tf, layers, args): +@pass_runner +def output(tf: TFRunner, args: Sequence[str], layers: str): """Show all output variables of this layer.""" - invoke_for_all_commands(layers, _output, args) + invoke_for_all_commands(layers, _output, *args) @click.command(context_settings=CONTEXT_SETTINGS) @layers_option @click.argument("args", nargs=-1) -@pass_container -@click.pass_context -def destroy(context, tf, layers, args): +@pass_runner +def destroy(tf: TFRunner, args: Sequence[str], layers: str): """Destroy infrastructure in this layer.""" - invoke_for_all_commands(layers, _destroy, args) + invoke_for_all_commands(layers, _destroy, *args) -@click.command() -@pass_container -def version(tf): - """Print version.""" - tf.disable_authentication() - tf.start("version") +@pass_paths +def tf_default_args(paths: PathsHandler) -> tuple: + """ + Returns a tuple of strings containing all valid config files for layer as + parameters for OpenTofu/Terraform. + Args: + paths: PathsHandler object -@click.command() -@auth_mfa -@auth_sso -@pass_container -def shell(tf, mfa, sso): - """Open a shell into the Terraform container in this layer.""" - tf.disable_authentication() - if sso: - tf.enable_sso() + Returns: + tuple: Tuple of strings containing all valid config files for layer as + parameters for OpenTofu/Terraform. + """ + common_config_files = tuple( + f"-var-file={common_file.as_posix()}" + for common_file in paths.common_config_dir.glob("*.tfvars") + ) + account_config_files = tuple( + f"-var-file={account_file.as_posix()}" + for account_file in paths.account_config_dir.glob("*.tfvars") + ) + return common_config_files + account_config_files - if mfa: - tf.enable_mfa() - tf.start_shell() +@click.command() +@pass_runner +def version(tf): + """Print version.""" + tf.run("version") @click.command("format", context_settings=CONTEXT_SETTINGS) @click.argument("args", nargs=-1) -@pass_container +@pass_runner def _format(tf, args): """Check if all files meet the canonical format and rewrite them accordingly.""" args = args if "-recursive" in args else (*args, "-recursive") - tf.disable_authentication() - tf.start("fmt", *args) + tf.run("fmt", *args) + + +@click.command("force-unlock") +@click.argument("lock_id", metavar="LOCK_ID") +@pass_paths +@pass_runner +def force_unlock(tf, paths: PathsHandler, lock_id): + """Force unlock the state file.""" + check_sso_token(paths) + refresh_layer_credentials(paths) + if exit_code := tf.run("force-unlock", *tf_default_args(), lock_id): + raise Exit(exit_code) @click.command() -@pass_container -def validate(tf): +@pass_paths +@pass_runner +def validate(tf, paths: PathsHandler): """Validate code of the current directory. Previous initialization might be needed.""" - tf.disable_authentication() - tf.start("validate") + check_sso_token(paths) + refresh_layer_credentials(paths) + if exit_code := tf.run("validate", *tf_default_args()): + raise Exit(exit_code) @click.command("validate-layout") -@pass_container -def validate_layout(tf): +@pass_paths +def validate_layout(paths): """Validate layer conforms to Leverage convention.""" - tf.set_backend_key() - return _validate_layout() + return _validate_layout(paths.cwd) @click.command("import") @click.argument("address") @click.argument("_id", metavar="ID") -@pass_container +@pass_runner def _import(tf, address, _id): """Import a resource.""" - exit_code = tf.start_in_layer("import", *tf.tf_default_args, address, _id) - - if exit_code: + if exit_code := tf.run("import", *tf_default_args(), address, _id): raise Exit(exit_code) @click.command("refresh-credentials") -@pass_container -def refresh_credentials(tf): +@pass_paths +def refresh_credentials(paths): """Refresh the AWS credentials used on the current layer.""" - tf.paths.check_for_layer_location() - if exit_code := tf.refresh_credentials(): - raise Exit(exit_code) + paths.check_for_layer_location() + check_sso_token(paths) + refresh_layer_credentials(paths) # ########################################################################### @@ -225,8 +230,8 @@ def refresh_credentials(tf): output, destroy, version, - shell, _format, + force_unlock, validate, validate_layout, _import, @@ -239,8 +244,8 @@ def refresh_credentials(tf): # ########################################################################### # HANDLER FOR MANAGING THE BASE COMMANDS (init, plan, apply, destroy, output) # ########################################################################### -@pass_container -def invoke_for_all_commands(tf, layers, command, args, skip_validation=True): +@pass_paths +def invoke_for_all_commands(paths, layers, command, *args: Sequence[str], skip_validation=True): """ Invoke helper for "all" commands. @@ -254,10 +259,10 @@ def invoke_for_all_commands(tf, layers, command, args, skip_validation=True): layers = layers.split(",") if len(layers) > 0 else [] # based on the location type manage the layers parameter - location_type = tf.paths.get_location_type() + location_type = paths.get_location_type() if location_type == "layer" and len(layers) == 0: # running on a layer - layers = [tf.paths.cwd] + layers = [paths.cwd] elif location_type == "layer": # running on a layer but --layers was set raise ExitError(1, "Can not set [bold]--layers[/bold] inside a layer.") @@ -269,52 +274,32 @@ def invoke_for_all_commands(tf, layers, command, args, skip_validation=True): raise ExitError(1, "This command has to be run inside a layer or account directory.") else: # running on an account with --layers set - layers = [tf.paths.cwd / x for x in layers] - - # get current location - original_location = tf.paths.cwd - original_working_dir = tf.container_config["working_dir"] + layers = [paths.cwd / x for x in layers] # validate each layer before calling the execute command for layer in layers: logger.debug(f"Checking for layer {layer}...") - # change to current dir and set it in the container - tf.paths.cwd = layer # check layers existence if not layer.is_dir(): - logger.error(f"Directory [red]{layer}[/red] does not exist or is not a directory\n") - raise Exit(1) - - # set the s3 key - tf.set_backend_key(skip_validation) + raise ExitError(1, f"Directory [red]{layer}[/red] does not exist or is not a directory\n") # validate layer validate_for_all_commands(layer, skip_validation=skip_validation) - # change to original dir and set it in the container - tf.paths.cwd = original_location + # set the s3 key + if not get_backend_key(layer / "config.tf"): + backend_key_base = f"{paths.cwd.relative_to(paths.root_dir).as_posix()}/terraform.tfstate" + backend_key = backend_key_base.replace("/base-", "/").replace("/tools-", "/") + set_backend_key(layer / "config.tf", backend_key) # check layers existence for layer in layers: if len(layers) > 1: logger.info(f"Invoking command for layer {layer}...") - # change to current dir and set it in the container - tf.paths.cwd = layer - - # set the working dir - working_dir = f"{tf.paths.guest_base_path}/{tf.paths.cwd.relative_to(tf.paths.root_dir).as_posix()}" - tf.container_config["working_dir"] = working_dir - # execute the actual command - command(args=args) - - # change to original dir and set it in the container - tf.paths.cwd = original_location - - # change to original working dir - tf.container_config["working_dir"] = original_working_dir + command(args, working_dir=layer) return layers @@ -327,53 +312,52 @@ def validate_for_all_commands(layer, skip_validation=False): Args: layer: a full layer directory """ - logger.debug(f"Checking layer {layer}...") - if not skip_validation and not _validate_layout(): - logger.error( + if not skip_validation and not _validate_layout(layer): + raise ExitError(1, "Layer configuration doesn't seem to be valid. Exiting.\n" "If you are sure your configuration is actually correct " - "you may skip this validation using the --skip-validation flag." - ) - raise Exit(1) + "you may skip this validation using the --skip-validation flag.") # ########################################################################### # BASE COMMAND EXECUTORS # ########################################################################### -@pass_container -def _init(tf, args): +@pass_paths +@pass_runner +def _init(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: Path): """Initialize this layer.""" - args = [ + filtered_args = ( arg - for index, arg in enumerate(args) - if not arg.startswith("-backend-config") or not arg[index - 1] == "-backend-config" - ] - args.append(f"-backend-config={tf.paths.backend_tfvars}") + for index, arg in list(enumerate(args)) + if not str(arg).startswith("-backend-config") or not arg[index - 1] == "-backend-config" + ) + init_args = (*filtered_args, f"-backend-config={paths.backend_tfvars}") - tf.paths.check_for_layer_location() + check_sso_token(paths) + refresh_layer_credentials(paths) - exit_code = tf.start_in_layer("init", *args) - if exit_code: + if exit_code := tf.run("init", *init_args, working_dir=working_dir): raise Exit(exit_code) -@pass_container -def _plan(tf, args): +@pass_paths +@pass_runner +def _plan(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: Path): """Generate an execution plan for this layer.""" - exit_code = tf.start_in_layer("plan", *tf.tf_default_args, *args) + check_sso_token(paths) + refresh_layer_credentials(paths) - if exit_code: + if exit_code := tf.run("plan", *tf_default_args(), *args, working_dir=working_dir): raise Exit(exit_code) - def has_a_plan_file(args: Sequence[str]) -> bool: """Determine whether the list of arguments has a plan file at the end. - Terraform apply arguments have the form "-target ADDRESS" or "-target=ADDRESS" - in one case "-var 'NAME=value'" or "-var='NAME=value'". There are also flags - with the form "-flag". + OpenTofu/Terraform apply arguments have the form "-target ADDRESS" or + "-target=ADDRESS" in one case "-var 'NAME=value'" or "-var='NAME=value'". + There are also flags with the form "-flag". We just need to know if there is or not a plan file as a last argument to decide if we prepend our default terraform arguments or not. @@ -391,9 +375,11 @@ def has_a_plan_file(args: Sequence[str]) -> bool: """ - # Valid 'terraform apply' flags: + # Valid 'apply' flags: # https://developer.hashicorp.com/terraform/cli/commands/apply + # https://opentofu.org/docs/cli/commands/apply tf_flags = [ + # OpenTofu/Terraform flags: "-destroy", "-refresh-only", "-detailed-exitcode", @@ -401,6 +387,11 @@ def has_a_plan_file(args: Sequence[str]) -> bool: "-compact-warnings", "-json", "-no-color", + # OpenTofu exclusive flags: + "-consolidate-warnings", + "-consolidate-errors", + "-concise", + "-show-sensitive", ] if not args or args[-1].startswith("-"): @@ -415,31 +406,39 @@ def has_a_plan_file(args: Sequence[str]) -> bool: return True -@pass_container -def _apply(tf, args: Sequence[str]) -> None: +@pass_paths +@pass_runner +def _apply(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: Path): """Build or change the infrastructure in this layer.""" - default_args = [] if has_a_plan_file(args) else tf.tf_default_args + default_args = () if has_a_plan_file(args) else tf_default_args() logger.debug(f"Default args passed to apply command: {default_args}") - exit_code = tf.start_in_layer("apply", *default_args, *args) + check_sso_token(paths) + refresh_layer_credentials(paths) - if exit_code: - logger.error(f"Command execution failed with exit code: {exit_code}") + if exit_code := tf.run("apply", *default_args, *args, working_dir=working_dir): raise Exit(exit_code) -@pass_container -def _output(tf, args): +@pass_paths +@pass_runner +def _output(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: Path): """Show all output variables of this layer.""" - tf.start_in_layer("output", *args) + check_sso_token(paths) + refresh_layer_credentials(paths) + + if exit_code := tf.run("output", *args, working_dir=working_dir): + raise Exit(exit_code) -@pass_container -def _destroy(tf, args): +@pass_paths +@pass_runner +def _destroy(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: Path): """Destroy infrastructure in this layer.""" - exit_code = tf.start_in_layer("destroy", *tf.tf_default_args, *args) + check_sso_token(paths) + refresh_layer_credentials(paths) - if exit_code: + if exit_code := tf.run("destroy", *tf_default_args(), *args, working_dir=working_dir): raise Exit(exit_code) @@ -455,7 +454,7 @@ def _make_layer_backend_key(cwd, account_dir, account_name): account_name (str): Account Name Returns: - list of lists: Backend bucket key parts + list of strings: Backend bucket key parts """ resp = [] @@ -509,55 +508,52 @@ def _make_layer_backend_key(cwd, account_dir, account_name): curated_layer_paths_withDR.append(curated_layer_path) for layer_path in curated_layer_paths_withDR: - resp.append([account_name, *layer_path]) + resp.append(f"{'/'.join([account_name, *layer_path])}/terraform.tfstate") return resp -@pass_container -def _validate_layout(tf: TFContainer): - tf.paths.check_for_layer_location() +@pass_paths +def _validate_layout(paths, layer: str): + paths.check_for_layer_location() # Check for `environment = ` in account.tfvars - account_name = tf.paths.account_conf.get("environment") + account_name = paths.account_conf.get("environment") logger.info("Checking environment name definition in [bold]account.tfvars[/bold]...") if account_name is None: - logger.error("[red]✘ FAILED[/red]\n") - raise Exit(1) + raise ExitError(1, "[red]✘ FAILED[/red]\n") logger.info("[green]✔ OK[/green]\n") # Check if account directory name matches with environment name - if tf.paths.account_dir.stem != account_name: + if paths.account_dir.stem != account_name: logger.warning( "[yellow]‼[/yellow] Account directory name does not match environment name.\n" - f" Expected [bold]{account_name}[/bold], found [bold]{tf.paths.account_dir.stem}[/bold]\n" + f" Expected [bold]{account_name}[/bold], found [bold]{paths.account_dir.stem}[/bold]\n" ) - backend_key = tf.backend_key.split("/") - # Flag to report layout validity valid_layout = True # Check backend bucket key - expected_backend_keys = _make_layer_backend_key(tf.paths.cwd, tf.paths.account_dir, account_name) - logger.info("Checking backend key...") - logger.info(f"Found: '{'/'.join(backend_key)}'") - backend_key = backend_key[:-1] + if backend_key := get_backend_key(Path(layer) / "config.tf"): + expected_backend_keys = _make_layer_backend_key(Path(layer), paths.account_dir, account_name) + logger.info("Checking backend key...") + logger.info(f"Found: '{backend_key}'") - if backend_key in expected_backend_keys: - logger.info("[green]✔ OK[/green]\n") + if backend_key in expected_backend_keys: + logger.info("[green]✔ OK[/green]\n") + else: + logger.info(f"Expected one of: {'; '.join(expected_backend_keys)}") + logger.error("[red]✘ FAILED[/red]\n") + valid_layout = False else: - exp_message = [f"{'/'.join(x)}/terraform.tfstate" for x in expected_backend_keys] - logger.info(f"Expected one of: {';'.join(exp_message)}") - logger.error("[red]✘ FAILED[/red]\n") - valid_layout = False + logger.info("No backend key found. Skipping backend key validation.\n") - backend_tfvars = Path(tf.paths.local_backend_tfvars) - backend_tfvars = parse_tf_file(backend_tfvars) if backend_tfvars.exists() else {} + backend_tfvars = parse_tf_file(paths.backend_tfvars) if paths.backend_tfvars.exists() else {} logger.info("Checking [bold]backend.tfvars[/bold]:\n") - names_prefix = f"{tf.project}-{account_name}" - names_prefix_bootstrap = f"{tf.project}-bootstrap" + names_prefix = f"{paths.project}-{account_name}" + names_prefix_bootstrap = f"{paths.project}-bootstrap" # Check profile, bucket and dynamo table names: for field in ("profile", "bucket", "dynamodb_table"): From 11917b7c6a13da0139d501eefde9beac50eefb9a Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 24 Oct 2025 11:41:28 -0300 Subject: [PATCH 15/46] Add CLAUDE.md --- CLAUDE.md | 294 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 294 insertions(+) create mode 100644 CLAUDE.md diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..21846ad --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,294 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Development Commands + +### Setup +- `poetry install --with=dev --with=main` - Install all dependencies including dev tools +- `poetry shell` - Activate virtual environment for development + +### Testing +- `poetry run pytest` - Run unit tests +- `poetry run pytest --verbose --cov=./ --cov-report=xml` - Run unit tests with coverage +- `make test-unit` - Run unit tests in Docker (with coverage) +- `make test-unit-no-cov` - Run unit tests in Docker (no coverage) +- `make test-int` - Run integration tests using bats in Docker +- `make tests` - Run full test suite (unit + integration) + +### Code Quality +- `poetry run black .` - Format code with Black (line length: 120) +- `poetry run pylint leverage/` - Run linting +- `poetry run pre-commit install` - Install pre-commit hooks +- `poetry run pre-commit run --all-files` - Run pre-commit checks manually + +### Build and Distribution +- `make build` - Build distributables (cleans first) +- `make check` - Check distributables with twine +- `poetry build` - Build package using Poetry +- `make clean` - Clean build artifacts + +### Docker +- `make build-image` - Build Docker testing image +- All test commands can run in Docker using the testing image + +## Architecture + +Leverage CLI is a Python-based command-line tool for managing Binbash Leverage projects. It uses a dockerized approach to encapsulate infrastructure tools. + +### Core Structure +- `leverage/leverage.py` - Main CLI entry point using Click framework +- `leverage/modules/` - Command modules (aws, terraform, kubectl, etc.) +- `leverage/container.py` - Docker container management and execution +- `leverage/conf.py` - Configuration loading from build.env files +- `leverage/tasks.py` - Task system for build scripts +- `leverage/path.py` - Path utilities and git repository handling + +### Key Components +- **Module System**: Commands are organized in modules under `leverage/modules/` +- **Container Integration**: Heavy use of Docker containers for tool execution +- **Configuration Management**: Hierarchical loading of build.env files +- **Task System**: Decorator-based task definition system for build scripts +- **AWS Integration**: Extensive AWS credential and service management + +### Command Structure +The CLI follows this pattern: +``` +leverage [global-options] [args] +``` + +Key modules include: +- `project` - Project initialization and management +- `terraform`/`tf`/`tofu` - Terraform/OpenTofu operations +- `aws` - AWS service interactions +- `credentials` - Credential management +- `kubectl`/`kc` - Kubernetes operations +- `run` - Custom task execution +- `shell` - Interactive shell access + +### Version Management +- Supports Python 3.9-3.13 +- Version defined in `leverage/__init__.py` +- Minimum tool versions enforced via `MINIMUM_VERSIONS` +- Docker image versioning through `__toolbox_version__` + +### Configuration +- Uses `build.env` files for project configuration +- Hierarchical loading from project root to current directory +- Environment-specific overrides supported + +## Docker Container Architecture for Terraform/OpenTofu + +The CLI uses a containerized approach for all Terraform/OpenTofu operations to ensure consistent tool versions and isolated execution environments. + +### Container Classes + +#### TFContainer (`leverage/container.py:436-687`) +Primary container for Terraform/OpenTofu execution: +- **Image**: `binbash/leverage-toolbox` with user-specific permissions +- **Binaries**: `/bin/terraform` (when `terraform=True`) or `/bin/tofu` (default) +- **Mount Points**: + - Project root → `/leverage` (guest base path) + - AWS credentials directory → `/tmp/.aws` + - Git config file → `/etc/gitconfig` + - Optional: TF plugin cache directory (maintains symlinks) + - Optional: SSH agent socket → `/ssh-agent` + +#### TFautomvContainer (`leverage/container.py:689-717`) +Extends TFContainer for TFAutomv operations: +- **Binary**: `/usr/local/bin/tfautomv` +- Inherits all TFContainer mounts and configuration + +### Configuration File Management + +#### Environment Variables in Containers: +- `COMMON_CONFIG_FILE` → `common.tfvars` +- `ACCOUNT_CONFIG_FILE` → `account.tfvars` +- `BACKEND_CONFIG_FILE` → `backend.tfvars` +- `AWS_SHARED_CREDENTIALS_FILE` → `/tmp/.aws/credentials` +- `AWS_CONFIG_FILE` → `/tmp/.aws/config` +- `SSO_CACHE_DIR` → `/tmp/.aws/sso/cache` + +#### Terraform Variable Files: +The `tf_default_args` property automatically includes: +- All `*.tfvars` files from `common/` directory +- All `*.tfvars` files from account-specific directory + +### Docker Execution Points + +#### Terraform/OpenTofu Commands (`leverage/modules/tf.py`) +- Container creation for `tofu` and `terraform` commands (lines 38, 56) +- Command execution via `tf.start()` for all operations +- **Supported Commands**: `init`, `plan`, `apply`, `destroy`, `output`, `version`, `shell`, `format`, `validate`, `import`, `refresh-credentials` + +#### TFAutomv Commands (`leverage/modules/tfautomv.py`) +- Container creation for `tfautomv` commands (line 24) +- Command execution via `tf.start_in_layer()` (line 36) + +### Container Lifecycle + +1. **Image Verification**: `ensure_image()` builds local image with user permissions +2. **Container Creation**: `_create_container()` with mounted volumes and environment +3. **Authentication Setup**: SSO token validation or MFA credential handling +4. **Command Execution**: Interactive (`_start()`) or silent (`_exec()`) +5. **Cleanup**: Automatic container stop and removal + +### Authentication & Credentials + +#### SSO Authentication: +- Token validation before container execution +- Automatic credential refresh via `refresh_layer_credentials()` +- Browser-based authentication flow with user code + +#### MFA Authentication: +- Script-based authentication via `aws-mfa-entrypoint.sh` +- Environment variable adjustments for credential paths + +#### Credential Mounting: +- Host AWS credentials directory mounted to container +- Separate credential files for different authentication methods + +### Backend Configuration Management + +#### S3 Backend Handling: +- Automatic `backend.tfvars` parameter injection for `init` commands +- Dynamic state key generation based on layer path structure +- Backend block validation in `config.tf` files +- Support for legacy naming conventions (tf- vs terraform-) + +**IMPORTANT**: As of the latest update, Leverage CLI now uses **host-based execution** instead of Docker containers: + +## Host-Based Execution Architecture + +The CLI has been updated to use host-based execution for improved performance and flexibility while maintaining all functionality. + +### Core Runner Classes + +#### Runner (`leverage/modules/runner.py`) +Generic command runner base class: +- **Purpose**: Provides common execution functionality for all binary runners +- **Binary Discovery**: Searches for binaries in PATH or accepts absolute paths +- **Environment Management**: Merges instance-level and run-time environment variables +- **Execution Modes**: + - `run()` - Interactive execution (returns exit code) or silent (returns exit code, stdout, stderr) + - `exec()` - Convenience method for non-interactive execution with output capture +- **Working Directory**: Supports execution in any specified directory +- **Validation**: Automatic binary existence validation on initialization + +#### TFRunner (`leverage/modules/tfrunner.py`) +Terraform/OpenTofu-specific runner extending Runner: +- **Binaries**: Uses system-installed `terraform` or `tofu` binaries +- **Configuration**: Accepts `terraform=True` for Terraform, defaults to OpenTofu +- **Error Messages**: Provides installation URLs when binaries are not found + - Terraform: https://developer.hashicorp.com/terraform/install + - OpenTofu: https://opentofu.org/docs/intro/install/ +- **Environment Variables**: Initialized with AWS credential file paths via `env_vars` parameter +- **No Containers**: Direct binary execution on host system + +### Command Flow Architecture + +#### Terraform/OpenTofu Command Flow (`leverage/modules/tf.py`) + +1. **CLI Entry Points**: + - `@click.group() tofu()` (lines 22-35) - Creates TFRunner with OpenTofu binary + - `@click.group() terraform()` (lines 38-51) - Creates TFRunner with Terraform binary + - Both set up credential environment variables for AWS config and credentials files + +2. **Command Decoration**: + - `@pass_runner` - Injects TFRunner instance from Click context + - `@pass_paths` - Injects PathsHandler instance for file/directory management + +3. **Supported Commands**: + - `init` - Layer initialization with backend configuration injection + - `plan` - Execution plan generation with auto-discovered tfvars + - `apply` - Infrastructure changes with conditional tfvars injection + - `destroy` - Infrastructure destruction + - `output` - Output variable display + - `version` - Binary version display + - `format` - Code formatting (recursive by default) + - `force-unlock` - State file lock removal + - `validate` - Configuration validation + - `validate-layout` - Leverage convention validation + - `import` - Resource import + - `refresh-credentials` - AWS credential refresh + +4. **Multi-Layer Support**: + - `--layers` option for operating on multiple layers from account directory + - Layer validation and backend key management via `invoke_for_all_commands()` + - Automatic backend key generation based on layer path structure + +### Authentication Management + +#### SSO Authentication (`leverage/modules/auth.py`) + +**Token Validation** (`check_sso_token()` - lines 98-127): +- Validates SSO token existence in cache directory +- Checks token expiration against current time +- Provides clear error messages for missing or expired tokens +- Token file location: `~/.aws/sso/cache/` + +**Credential Refresh** (`refresh_layer_credentials()` - lines 130-204): +- Parses Terraform files to discover required AWS profiles +- Uses boto3 SSO client to retrieve temporary credentials +- Updates AWS config file with credential expiration timestamps +- Writes temporary credentials to AWS credentials file +- Implements 30-minute early renewal to avoid mid-operation expiration +- Supports cross-account profile resolution + +**Profile Discovery** (`get_profiles()` - lines 68-88): +- Scans `config.tf`, `locals.tf`, `runtime.tf` for profile references +- Extracts profile variables from Terraform configurations +- Reads backend profile from `backend.tfvars` + +### Configuration Management + +#### Automatic tfvars Discovery (`tf_default_args()` - lines 133-154): +- Discovers all `*.tfvars` files in `common/` directory +- Discovers all `*.tfvars` files in account-specific directory +- Returns as `-var-file=` arguments for Terraform/OpenTofu +- Used automatically in plan, destroy, validate, and conditionally in apply + +#### Backend Configuration: +- Backend config file path injected during `init` command (line 336) +- Automatic backend key generation in `invoke_for_all_commands()` (lines 291-294) +- Backend key validation in `validate_layout()` (lines 538-550) +- Support for legacy naming conventions (tf- vs terraform-, base- vs tools-) + +### Execution Flow + +**Standard Command Execution**: +1. User runs `leverage tofu|terraform [args]` +2. Click creates TFRunner instance with credential environment variables +3. Command function decorated with `@pass_runner` and `@pass_paths` +4. Authentication check via `check_sso_token(paths)` +5. Credential refresh via `refresh_layer_credentials(paths)` +6. TFRunner.run() executes binary with: + - Merged environment variables (instance + runtime) + - Specified working directory + - Auto-discovered tfvars (for applicable commands) + - User-provided arguments +7. Exit code returned to CLI + +**Multi-Layer Execution**: +1. User runs command with `--layers layer1,layer2` from account directory +2. `invoke_for_all_commands()` validates all layers +3. Backend keys generated/validated for each layer +4. Command executed sequentially for each layer with layer-specific working directory + +### Benefits of Host-Based Execution + +- **Performance**: No container startup overhead or image building +- **Flexibility**: Use any installed tool version (including custom builds) +- **IDE Integration**: Better debugging and tooling support +- **Simplicity**: Direct binary execution with standard environment variables +- **Plugin Compatibility**: Native Terraform/OpenTofu plugin caching +- **Development Speed**: Faster iteration during development + +### Host Requirements + +For full functionality, ensure the following binaries are installed and available in PATH: +- `terraform` or `tofu` (for Terraform/OpenTofu operations) +- `aws` CLI (for SSO authentication via boto3) + +Optional binaries: +- `tfautomv` (for TFAutomv operations) \ No newline at end of file From 81581a9431bcc75b1cc4454ef4d9f0cb76c1fe35 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 24 Oct 2025 11:59:27 -0300 Subject: [PATCH 16/46] Formatting --- leverage/_backend_config.py | 8 +++++--- leverage/leverage.py | 5 ++++- leverage/modules/aws.py | 6 +++--- leverage/modules/tf.py | 13 +++++++------ leverage/modules/tfrunner.py | 4 +--- 5 files changed, 20 insertions(+), 16 deletions(-) diff --git a/leverage/_backend_config.py b/leverage/_backend_config.py index 4f867c7..fe50368 100644 --- a/leverage/_backend_config.py +++ b/leverage/_backend_config.py @@ -51,13 +51,15 @@ def set_backend_key(config_file_path: Union[str, Path], key: str) -> None: and isinstance(config_tf["terraform"][0]["backend"], list) and "s3" in config_tf["terraform"][0]["backend"][0] ): - raise ExitError(1, f"Malformed config.tf: File must contain a terraform block with an S3 backend. " + raise ExitError( + 1, + f"Malformed config.tf: File must contain a terraform block with an S3 backend. " f"Expected structure:\n" f"terraform {{\n" f' backend "s3" {{\n' f" # configuration\n" f" }}\n" - f"}}" + f"}}", ) # Check if key already exists @@ -157,7 +159,7 @@ def get_backend_key(config_file: Union[str, Path]) -> Optional[str]: 1, f"Malformed [bold]config.tf[/bold] file. Missing backend block.\n" f"In some cases you may want to skip this check by using the --skip-validation flag, " - f"e.g. the first time you initialize a tf-backend layer." + f"e.g. the first time you initialize a tf-backend layer.", ) except lark.exceptions.UnexpectedInput as error: diff --git a/leverage/leverage.py b/leverage/leverage.py index eaf0242..4353ed2 100644 --- a/leverage/leverage.py +++ b/leverage/leverage.py @@ -9,8 +9,10 @@ from leverage import __version__, conf from leverage._internals import pass_state + # from leverage.modules.credentials import credentials from leverage.modules import aws, run, tofu, terraform + # from leverage.modules import run, project, tofu, terraform, tfautomv, kubectl, shell from leverage.path import NotARepositoryError, PathsHandler @@ -34,6 +36,7 @@ def leverage(context, state, verbose): return state.paths = PathsHandler(state.config) + # Add modules to leverage leverage.add_command(run) # leverage.add_command(project) @@ -44,4 +47,4 @@ def leverage(context, state, verbose): leverage.add_command(aws) # leverage.add_command(tfautomv) # leverage.add_command(kubectl) -# leverage.add_command(kubectl, name="kc") \ No newline at end of file +# leverage.add_command(kubectl, name="kc") diff --git a/leverage/modules/aws.py b/leverage/modules/aws.py index 2fc40ad..202d076 100644 --- a/leverage/modules/aws.py +++ b/leverage/modules/aws.py @@ -251,12 +251,12 @@ def login(awscli: Runner, paths: PathsHandler) -> None: logger.info( f"Attempting to automatically open the SSO authorization page in your default browser.\n" f"If the browser does not open or you wish to use a different device to authorize this request, open the following URL:\n" - f"\n{paths.common_conf.get("sso_start_url")}\n" + f"\n{paths.common_conf.get('sso_start_url')}\n" f"\nThen enter the code:\n" - f"\n{device_authorization["userCode"]}\n" + f"\n{device_authorization['userCode']}\n" ) webbrowser.open_new_tab( - f"{paths.common_conf.get("sso_start_url")}/#/device?user_code={device_authorization["userCode"]}" + f"{paths.common_conf.get('sso_start_url')}/#/device?user_code={device_authorization['userCode']}" ) logger.debug(f"Attempting to create authorization token...") diff --git a/leverage/modules/tf.py b/leverage/modules/tf.py index 979cd74..e716f19 100644 --- a/leverage/modules/tf.py +++ b/leverage/modules/tf.py @@ -144,12 +144,10 @@ def tf_default_args(paths: PathsHandler) -> tuple: parameters for OpenTofu/Terraform. """ common_config_files = tuple( - f"-var-file={common_file.as_posix()}" - for common_file in paths.common_config_dir.glob("*.tfvars") + f"-var-file={common_file.as_posix()}" for common_file in paths.common_config_dir.glob("*.tfvars") ) account_config_files = tuple( - f"-var-file={account_file.as_posix()}" - for account_file in paths.account_config_dir.glob("*.tfvars") + f"-var-file={account_file.as_posix()}" for account_file in paths.account_config_dir.glob("*.tfvars") ) return common_config_files + account_config_files @@ -314,10 +312,12 @@ def validate_for_all_commands(layer, skip_validation=False): """ logger.debug(f"Checking layer {layer}...") if not skip_validation and not _validate_layout(layer): - raise ExitError(1, + raise ExitError( + 1, "Layer configuration doesn't seem to be valid. Exiting.\n" "If you are sure your configuration is actually correct " - "you may skip this validation using the --skip-validation flag.") + "you may skip this validation using the --skip-validation flag.", + ) # ########################################################################### @@ -352,6 +352,7 @@ def _plan(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: P if exit_code := tf.run("plan", *tf_default_args(), *args, working_dir=working_dir): raise Exit(exit_code) + def has_a_plan_file(args: Sequence[str]) -> bool: """Determine whether the list of arguments has a plan file at the end. diff --git a/leverage/modules/tfrunner.py b/leverage/modules/tfrunner.py index b54cc73..6c02488 100644 --- a/leverage/modules/tfrunner.py +++ b/leverage/modules/tfrunner.py @@ -55,9 +55,7 @@ def run( """ return super().run(*args, env_vars=env_vars, working_dir=working_dir, interactive=interactive) - def exec( - self, *args: str, env_vars: Optional[Dict[str, str]] = None, working_dir: Optional[Path] = None - ): + def exec(self, *args: str, env_vars: Optional[Dict[str, str]] = None, working_dir: Optional[Path] = None): """ Execute the Terraform/OpenTofu binary in non-interactive mode (captures output). From c4d9e5480b67ae20cbe12772579213272bfee738 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Tue, 11 Nov 2025 15:53:48 -0300 Subject: [PATCH 17/46] Replace RuntimeError in Runner with ExitError --- leverage/modules/runner.py | 4 ++-- tests/test_modules/test_runner.py | 35 +++++++++++++++++++++++++------ 2 files changed, 31 insertions(+), 8 deletions(-) diff --git a/leverage/modules/runner.py b/leverage/modules/runner.py index ce68139..4c9da9c 100644 --- a/leverage/modules/runner.py +++ b/leverage/modules/runner.py @@ -4,6 +4,7 @@ from pathlib import Path from typing import Dict, Optional, Tuple, Union from leverage import logger +from leverage._utils import ExitError class Runner: @@ -48,8 +49,7 @@ def _validate_binary(self): f"Please install {self.binary_input} and ensure it's in your PATH." ) - logger.error(error_msg) - raise RuntimeError(error_msg) + raise ExitError(1, error_msg) def _validate_version(self): """ diff --git a/tests/test_modules/test_runner.py b/tests/test_modules/test_runner.py index 1e8499b..b588e93 100644 --- a/tests/test_modules/test_runner.py +++ b/tests/test_modules/test_runner.py @@ -5,6 +5,7 @@ import pytest from leverage.modules.runner import Runner +from leverage._utils import ExitError def test_init_with_valid_binary_in_path(mocker): @@ -25,33 +26,55 @@ def test_init_with_absolute_path_existing_file(tmp_path): assert runner.binary_path == str(binary_file) -def test_init_with_absolute_path_non_existing_file(tmp_path): +def test_init_with_absolute_path_non_existing_file(tmp_path, mocker): binary_file = tmp_path / "non_existing_binary" + mock_logger = mocker.patch("leverage._utils.logger") - with pytest.raises(RuntimeError, match="Binary .* not found on system"): + with pytest.raises(ExitError): Runner(binary_file) + mock_logger.error.assert_called_once() + error_msg = mock_logger.error.call_args[0][0] + assert "not found on system" in error_msg + assert str(binary_file) in error_msg + def test_init_with_binary_not_in_path(mocker): + mock_logger = mocker.patch("leverage._utils.logger") mocker.patch("shutil.which", return_value=None) - with pytest.raises(RuntimeError, match="Binary 'nonexistent' not found on system"): + + with pytest.raises(ExitError): Runner("nonexistent") + mock_logger.error.assert_called_once() + error_msg = mock_logger.error.call_args[0][0] + assert "Binary 'nonexistent' not found on system" in error_msg + assert "Please install nonexistent" in error_msg + def test_init_with_custom_error_message(mocker): custom_error = "Custom error message for missing binary" + mock_logger = mocker.patch("leverage._utils.logger") mocker.patch("shutil.which", return_value=None) - with pytest.raises(RuntimeError, match=custom_error): + + with pytest.raises(ExitError): Runner("nonexistent", error_message=custom_error) + mock_logger.error.assert_called_once() + error_msg = mock_logger.error.call_args[0][0] + assert error_msg == custom_error + def test_init_logs_error_on_missing_binary(mocker): - mock_logger = mocker.patch("leverage.modules.runner.logger") + mock_logger = mocker.patch("leverage._utils.logger") mocker.patch("shutil.which", return_value=None) - with pytest.raises(RuntimeError): + + with pytest.raises(ExitError): Runner("nonexistent") mock_logger.error.assert_called_once() + error_msg = mock_logger.error.call_args[0][0] + assert "Binary 'nonexistent' not found on system" in error_msg def test_validate_version_base_implementation_does_nothing(mocker): From 998c200355412ad102e1abb8bf943d5ba8f1e8a0 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Tue, 11 Nov 2025 15:54:54 -0300 Subject: [PATCH 18/46] Fix faulty regex for s3 backend key --- leverage/modules/tf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/leverage/modules/tf.py b/leverage/modules/tf.py index e716f19..a909aec 100644 --- a/leverage/modules/tf.py +++ b/leverage/modules/tf.py @@ -13,7 +13,7 @@ from leverage._backend_config import get_backend_key, set_backend_key from leverage.modules.auth import refresh_layer_credentials, check_sso_token -REGION = rf"(global|([a-z]{2}(-gov)?)-(central|(north|south)?(east|west)?)-\d)" +REGION = r"(global|([a-z]{2}(-gov)?)-(central|(north|south)?(east|west)?)-\d)" # ########################################################################### From bf3d5d55027071c48af3c4190da3adee41432f8f Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Tue, 11 Nov 2025 15:55:31 -0300 Subject: [PATCH 19/46] Remove default args for force-unlock command --- leverage/modules/tf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/leverage/modules/tf.py b/leverage/modules/tf.py index a909aec..cdc109c 100644 --- a/leverage/modules/tf.py +++ b/leverage/modules/tf.py @@ -176,7 +176,7 @@ def force_unlock(tf, paths: PathsHandler, lock_id): """Force unlock the state file.""" check_sso_token(paths) refresh_layer_credentials(paths) - if exit_code := tf.run("force-unlock", *tf_default_args(), lock_id): + if exit_code := tf.run("force-unlock", lock_id): raise Exit(exit_code) From 4b0d0e69e2761014dbfd0972f2e7ed8d57b32d0d Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Tue, 11 Nov 2025 15:55:59 -0300 Subject: [PATCH 20/46] Catch previously uncaught errors on sso client get_role_credentials --- leverage/modules/auth.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/leverage/modules/auth.py b/leverage/modules/auth.py index d9eefb8..1d3079c 100644 --- a/leverage/modules/auth.py +++ b/leverage/modules/auth.py @@ -177,6 +177,8 @@ def refresh_layer_credentials(paths: PathsHandler): " in this account.\nPlease check with your administrator or try" " running [bold]leverage aws configure sso[/bold].", ) + else: + raise ExitError(50, f"Error retrieving role credentials: {error}") # update expiration on aws//config logger.info(f"Writing {layer_profile} profile") From 38d0e5fdebdcbafe4fa07e396ef7fea73c904b3f Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Tue, 11 Nov 2025 15:56:11 -0300 Subject: [PATCH 21/46] Remove pass_container decorator --- leverage/_internals.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/leverage/_internals.py b/leverage/_internals.py index b6e0700..037d4ea 100644 --- a/leverage/_internals.py +++ b/leverage/_internals.py @@ -43,17 +43,6 @@ def verbosity(self, verbose): pass_state = click.make_pass_decorator(State, ensure=True) -def pass_container(command): - """Decorator to pass the current container to the command.""" - - @wraps(command) - def new_command(*args, **kwargs): - ctx = click.get_current_context() - - return command(ctx.obj.container, *args, **kwargs) - - return new_command - def pass_runner(command): """Decorator to pass the current runner (Terraform/OpenTofu runner) to the command.""" From 4d4e3d3a355143d742d135e19b4288a5973bce29 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 28 Nov 2025 14:32:16 -0300 Subject: [PATCH 22/46] Prevent the error message from printing when the exception is captured --- leverage/_utils.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/leverage/_utils.py b/leverage/_utils.py index 4dd6b1e..3f5f8b5 100644 --- a/leverage/_utils.py +++ b/leverage/_utils.py @@ -8,7 +8,7 @@ import hcl2 import lark -from click.exceptions import Exit +from click.exceptions import ClickException from configupdater import ConfigUpdater from docker import DockerClient from docker.models.containers import Container @@ -106,15 +106,17 @@ def __exit__(self, *args, **kwargs): ) -class ExitError(Exit): +class ExitError(ClickException): """ Raise an Exit exception but also print an error description. """ def __init__(self, exit_code: int, error_description: str): - logger.error(error_description) - super(ExitError, self).__init__(exit_code) + self.exit_code = exit_code + super(ExitError, self).__init__(message=error_description) + def show(self): + logger.error(self.message) def parse_tf_file(file: Path): """ From 8d8a87f0c1d155cca38bdd3b53427333c4559bbe Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 28 Nov 2025 14:32:43 -0300 Subject: [PATCH 23/46] Drop unused import --- leverage/modules/auth.py | 1 - 1 file changed, 1 deletion(-) diff --git a/leverage/modules/auth.py b/leverage/modules/auth.py index 1d3079c..23964ff 100644 --- a/leverage/modules/auth.py +++ b/leverage/modules/auth.py @@ -2,7 +2,6 @@ import json from pathlib import Path from datetime import datetime -from dateutil.tz import tzutc from configparser import NoSectionError, NoOptionError import boto3 From 8aea1a265bc3a4267e5febbe7b7855b57ed786fe Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 28 Nov 2025 14:36:53 -0300 Subject: [PATCH 24/46] Remove container use in credentials module, add again to commands --- leverage/leverage.py | 11 +-- leverage/modules/__init__.py | 4 +- leverage/modules/credentials.py | 147 ++++++++++++++++++-------------- 3 files changed, 85 insertions(+), 77 deletions(-) diff --git a/leverage/leverage.py b/leverage/leverage.py index 4353ed2..d7e9ee1 100644 --- a/leverage/leverage.py +++ b/leverage/leverage.py @@ -2,19 +2,12 @@ Binbash Leverage Command-line tool. """ -import rich -from packaging.version import Version - import click from leverage import __version__, conf from leverage._internals import pass_state - -# from leverage.modules.credentials import credentials -from leverage.modules import aws, run, tofu, terraform - -# from leverage.modules import run, project, tofu, terraform, tfautomv, kubectl, shell from leverage.path import NotARepositoryError, PathsHandler +from leverage.modules import aws, credentials, run, tofu, terraform @click.group(invoke_without_command=True) @@ -43,7 +36,7 @@ def leverage(context, state, verbose): leverage.add_command(tofu) leverage.add_command(tofu, name="tf") leverage.add_command(terraform) -# leverage.add_command(credentials) +leverage.add_command(credentials) leverage.add_command(aws) # leverage.add_command(tfautomv) # leverage.add_command(kubectl) diff --git a/leverage/modules/__init__.py b/leverage/modules/__init__.py index d925535..0364621 100644 --- a/leverage/modules/__init__.py +++ b/leverage/modules/__init__.py @@ -2,8 +2,6 @@ # from .project import project from .tf import tofu, terraform -from .aws import aws - -# from .tfautomv import tfautomv +from .credentials import credentials # from .kubectl import kubectl # from .shell import shell diff --git a/leverage/modules/credentials.py b/leverage/modules/credentials.py index 9f55617..9b53bc5 100644 --- a/leverage/modules/credentials.py +++ b/leverage/modules/credentials.py @@ -5,23 +5,26 @@ import csv import json import re -from functools import wraps from pathlib import Path +from functools import wraps +from typing import Optional, Union import click import questionary -from click.exceptions import Exit -from questionary import Choice from ruamel.yaml import YAML +from questionary import Choice +from click.exceptions import Exit from leverage import logger -from leverage._internals import pass_state from leverage._utils import ExitError -from leverage.container import AWSCLIContainer -from leverage.container import get_docker_client -from leverage.path import NotARepositoryError -from leverage.path import get_global_config_path -from leverage.path import get_project_root_or_current_dir_path +from leverage.modules.runner import Runner +from leverage._internals import State, pass_runner, pass_paths, pass_state +from leverage.path import ( + NotARepositoryError, + PathsHandler, + get_global_config_path, + get_project_root_or_current_dir_path, +) # Regexes for general validation PROJECT_SHORT = r"[a-z]{2,4}" @@ -212,9 +215,6 @@ def _ask_for_credentials(): return list(credentials.values()) -AWSCLI = None - - def _load_project_yaml(): """Load project.yaml file contents.""" if not PROJECT_CONFIG.exists(): @@ -252,7 +252,7 @@ def credentials(state): raise an exception If we reached the only common.tfvars scenario, we have no project name nor TF_IMAGE_TAG. - So the best chance is to read the common.tfvars directly without a conatiner, e.g. with sed or grep + So the best chance is to read the common.tfvars directly without a container, e.g. with sed or grep """ project_config = _load_project_yaml() build_env = Path(f"{PROJECT_ROOT}/build.env") @@ -287,13 +287,23 @@ def credentials(state): else: logger.info("Reading info from build.env") - state.container = AWSCLIContainer(get_docker_client()) - state.container.ensure_image() - global AWSCLI - AWSCLI = state.container + credentials_env_vars = { + "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), + "AWS_CONFIG_FILE": str(state.paths.aws_config_file), + } + state.runner = Runner( + binary="aws", + error_message=( + f"AWS CLI not found on system. " + f"Please install it following the instructions at: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html" + ), + env_vars=credentials_env_vars, + ) -def _load_configs_for_credentials(): +@pass_state +@pass_paths +def _load_configs_for_credentials(paths: PathsHandler, state: State): """Load all required values to configure credentials. Raises: @@ -306,11 +316,11 @@ def _load_configs_for_credentials(): project_config = _load_project_yaml() logger.info("Loading project environment configuration file.") - env_config = AWSCLI.env_conf + env_config = state.config terraform_config = {} logger.info("Loading tf common configuration.") - terraform_config = AWSCLI.paths.common_conf + terraform_config = paths.common_conf config_values = {} config_values["short_name"] = ( @@ -321,7 +331,7 @@ def _load_configs_for_credentials(): ) config_values["project_name"] = project_config.get("project_name") or terraform_config.get("project_long") - # region_primary was added in refarch v1 + # region_primary was added in ref-arch v1 # for v2 it was replaced by region at project level region_primary = "region_primary" if not "region_primary" in project_config and not "region_primary" in terraform_config: @@ -355,7 +365,8 @@ def _load_configs_for_credentials(): return config_values -def _profile_is_configured(profile): +@pass_runner +def _profile_is_configured(awscli: Runner, profile: str): """Check if given profile is already configured. Args: @@ -364,7 +375,7 @@ def _profile_is_configured(profile): Returns: bool: Whether the profile was already configured or not. """ - exit_code, _ = AWSCLI.exec("configure list", profile) + exit_code, _ = awscli.exec("configure", "list", "--profile", profile) return not exit_code @@ -384,7 +395,7 @@ def _extract_credentials(file): Returns: str, str: Key ID, Secret Key """ - with open(file) as access_keys_file: + with open(file, encoding="utf-8-sig") as access_keys_file: try: keys = next(csv.DictReader(access_keys_file)) @@ -407,19 +418,15 @@ def _extract_credentials(file): return access_key_id, secret_access_key -def _backup_file(filename): - """Create backup of a credential file using docker image. - - Args: - filename (str): File to backup, either `config` or `credentials` - """ - credential_files_env_vars = {"config": "AWS_CONFIG_FILE", "credentials": "AWS_SHARED_CREDENTIALS_FILE"} - env_var = credential_files_env_vars.get(filename) - - AWSCLI.system_exec(f"sh -c 'cp ${env_var} \"${{{env_var}}}.bkp\"'") - - -def configure_credentials(profile, file=None, make_backup=False): +@pass_paths +@pass_runner +def configure_credentials( + awscli: Runner, + paths: PathsHandler, + profile: str, + file: Optional[Union[Path, str]] = None, + make_backup: bool = False, +): """Set credentials in `credentials` file for AWS cli. Make backup if required. Args: @@ -437,17 +444,18 @@ def configure_credentials(profile, file=None, make_backup=False): if make_backup: logger.info("Backing up credentials file.") - _backup_file("credentials") + Path(paths.aws_credentials_file).copy(Path(paths.aws_credentials_file).with_suffix(".bkp")) values = {"aws_access_key_id": key_id, "aws_secret_access_key": secret_key} for key, value in values.items(): - exit_code, output = AWSCLI.exec(f"configure set {key} {value}", profile) + exit_code, output = awscli.exec("configure", "set", key, value, "--profile", profile) if exit_code: raise ExitError(exit_code, f"AWS CLI error: {output}") -def _credentials_are_valid(profile): +@pass_runner +def _credentials_are_valid(awscli: Runner, profile: str): """Check if credentials for given profile are valid. If credentials are invalid, the command output will be as follows: Exit code: @@ -462,12 +470,13 @@ def _credentials_are_valid(profile): Returns: bool: Whether the credentials are valid. """ - error_code, output = AWSCLI.exec("sts get-caller-identity", profile) + error_code, output = awscli.exec("sts", "get-caller-identity", "--profile", profile) return error_code != 255 and "InvalidClientTokenId" not in output -def _get_management_account_id(profile): +@pass_runner +def _get_management_account_id(awscli: Runner, profile: str): """Get management account id through AWS cli. Args: @@ -476,7 +485,7 @@ def _get_management_account_id(profile): Returns: str: Management account id. """ - exit_code, caller_identity = AWSCLI.exec("--output json sts get-caller-identity", profile) + exit_code, caller_identity = awscli.exec("sts", "get-caller-identity", "--output", "json", "--profile", profile) if exit_code: raise ExitError(exit_code, f"AWS CLI error: {caller_identity}") @@ -484,7 +493,8 @@ def _get_management_account_id(profile): return caller_identity["Account"] -def _get_organization_accounts(profile, project_name): +@pass_runner +def _get_organization_accounts(awscli: Runner, profile: str, project_name: str): """Get organization accounts names and ids. Removing the prefixed project name from the account names. Args: @@ -494,7 +504,9 @@ def _get_organization_accounts(profile, project_name): Returns: dict: Mapping of organization accounts names to ids. """ - exit_code, organization_accounts = AWSCLI.exec("--output json organizations list-accounts", profile) + exit_code, organization_accounts = awscli.exec( + "organizations", "list-accounts", "--output", "json", "--profile", profile + ) if exit_code: return {} @@ -511,7 +523,8 @@ def _get_organization_accounts(profile, project_name): return accounts -def _get_mfa_serial(profile): +@pass_runner +def _get_mfa_serial(awscli: Runner, profile: str): """Get MFA serial for the given profile credentials. Args: @@ -520,7 +533,7 @@ def _get_mfa_serial(profile): Returns: str: MFA device serial. """ - exit_code, mfa_devices = AWSCLI.exec("--output json iam list-mfa-devices", profile) + exit_code, mfa_devices = awscli.exec("iam", "list-mfa-devices", "--output", "json", "--profile", profile) if exit_code: raise ExitError(exit_code, f"AWS CLI error: {mfa_devices}") mfa_devices = json.loads(mfa_devices) @@ -538,7 +551,8 @@ def _get_mfa_serial(profile): ) -def configure_profile(profile, values): +@pass_runner +def configure_profile(awscli: Runner, profile: str, values: dict): """Set profile in `config` file for AWS cli. Args: @@ -547,12 +561,20 @@ def configure_profile(profile, values): """ logger.info(f"\tConfiguring profile [bold]{profile}[/bold]") for key, value in values.items(): - exit_code, output = AWSCLI.exec(f"configure set {key} {value}", profile) + exit_code, output = awscli.exec("configure", "set", key, value, "--profile", profile) if exit_code: raise ExitError(exit_code, f"AWS CLI error: {output}") -def configure_accounts_profiles(profile, region, organization_accounts, project_accounts, fetch_mfa_device): +@pass_paths +def configure_accounts_profiles( + paths: PathsHandler, + profile: str, + region: str, + organization_accounts: dict, + project_accounts: list, + fetch_mfa_device: bool, +): """Set up the required profiles for all accounts to be used with AWS cli. Backup previous profiles. Args: @@ -595,16 +617,16 @@ def configure_accounts_profiles(profile, region, organization_accounts, project_ account_profiles[f"{short_name}-{account_name}-{PROFILES[_type]['profile_role']}"] = account_profile logger.info("Backing up account profiles file.") - _backup_file("config") + Path(paths.aws_config_file).copy(Path(paths.aws_config_file).with_suffix(".bkp")) for profile_identifier, profile_values in account_profiles.items(): configure_profile(profile_identifier, profile_values) -def _update_account_ids(config): +@pass_paths +def _update_account_ids(paths: PathsHandler, config: dict): """Update accounts ids in global configuration file. - It updates both `[account name]_account_id` and `accounts` variables. - This last one maintaning the format: + It updates `accounts` variables maintaining the format: ``` account = { account_name = { @@ -617,24 +639,15 @@ def _update_account_ids(config): Args: config (dict): Project configuration values. """ - if not PROJECT_COMMON_TFVARS.exists(): + if not paths.common_tfvars.exists(): return - container_base_dir = f"/{config['project_name']}/config" - container_common_tfvars_file = f"{container_base_dir}/{PROJECT_COMMON_TFVARS_FILE}" - accs = [] for account in config["organization"]["accounts"]: acc_name, acc_email, acc_id = account.values() acc = [f'\n email = "{acc_email}"'] if acc_id: - AWSCLI.system_exec( - "hcledit " - f"-f {container_common_tfvars_file} -u" - f' attribute set {acc_name}_account_id "\\"{acc_id}\\""' - ) - acc.append(f' id = "{acc_id}"') acc = ",\n".join(acc) @@ -643,7 +656,11 @@ def _update_account_ids(config): accs = ",".join(accs) accs = f"{{{accs}\n}}" - AWSCLI.system_exec("hcledit " f"-f {container_common_tfvars_file} -u" f" attribute set accounts '{accs}'") + common_tfvars = paths.common_tfvars.read_text() + common_tfvars = re.sub( + r"accounts\s*=\s*\{.*?\}(?=\s*(?:\n|$))", f"accounts = {accs}", common_tfvars, flags=re.DOTALL + ) + paths.common_tfvars.write_text(common_tfvars) def mutually_exclusive(context, param, value): From a0da3dd8dff0cdedac355601abcbd57301bc591d Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 28 Nov 2025 14:38:55 -0300 Subject: [PATCH 25/46] Add authentication process to aws command --- leverage/modules/__init__.py | 1 + leverage/modules/aws.py | 21 ++++++++++++++++++--- leverage/modules/utils.py | 10 ++++++---- 3 files changed, 25 insertions(+), 7 deletions(-) diff --git a/leverage/modules/__init__.py b/leverage/modules/__init__.py index 0364621..7c3aa9f 100644 --- a/leverage/modules/__init__.py +++ b/leverage/modules/__init__.py @@ -1,6 +1,7 @@ from .run import run # from .project import project +from .aws import aws from .tf import tofu, terraform from .credentials import credentials # from .kubectl import kubectl diff --git a/leverage/modules/aws.py b/leverage/modules/aws.py index 202d076..2ffa13a 100644 --- a/leverage/modules/aws.py +++ b/leverage/modules/aws.py @@ -13,9 +13,9 @@ from leverage.path import PathsHandler from leverage.modules.runner import Runner from leverage.modules.utils import _handle_subcommand -from leverage.modules.auth import get_sso_access_token from leverage._utils import get_or_create_section, ExitError from leverage._internals import pass_state, pass_runner, pass_paths +from leverage.modules.auth import get_sso_access_token, check_sso_token, refresh_layer_credentials CONTEXT_SETTINGS = {"ignore_unknown_options": True} @@ -24,6 +24,21 @@ AWS_SSO_LOGIN_URL = "{sso_url}/#/device?user_code={user_code}" +@pass_paths +def refresh_aws_credentials(paths: PathsHandler) -> None: + """ + Refresh the AWS credentials for the current project. + """ + check_sso_token(paths) + + try: # if we are not in a layer, we don't need to refresh the credentials + paths.check_for_layer_location() + except ExitError: + return + + refresh_layer_credentials(paths) + + def get_account_roles(sso_client: Any, access_token: str) -> Dict[str, Dict[str, str]]: """ Fetch the accounts and roles from the user. @@ -100,7 +115,7 @@ def configure_sso_profiles(paths: PathsHandler) -> None: @pass_state @click.pass_context def aws(context: click.Context, state: Any, args: Tuple[str, ...]) -> None: - """Run AWS CLI commands in a custom containerized environment.""" + """Run AWS CLI commands in the context of the current project.""" credentials_env_vars = { "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), @@ -115,7 +130,7 @@ def aws(context: click.Context, state: Any, args: Tuple[str, ...]) -> None: env_vars=credentials_env_vars, ) - _handle_subcommand(context=context, runner=state.runner, args=args) + _handle_subcommand(context=context, runner=state.runner, args=args, pre_invocation_callback=refresh_aws_credentials) @aws.group(invoke_without_command=True, add_help_option=False, context_settings=CONTEXT_SETTINGS) diff --git a/leverage/modules/utils.py b/leverage/modules/utils.py index ded87cd..fb9e39e 100644 --- a/leverage/modules/utils.py +++ b/leverage/modules/utils.py @@ -1,4 +1,4 @@ -from typing import Optional, Tuple +from typing import Optional, Tuple, Callable from click.exceptions import Exit from click.core import Context @@ -7,7 +7,7 @@ def _handle_subcommand( - context: Context, runner: Runner, args: Tuple[str, ...], caller_name: Optional[str] = None + context: Context, runner: Runner, args: Tuple[str, ...], caller_name: Optional[str] = None, pre_invocation_callback: Optional[Callable] = None ) -> None: """Decide if command corresponds to a wrapped one or not and run accordingly. @@ -16,7 +16,7 @@ def _handle_subcommand( runner (Runner): Runner where commands will be executed args (tuple(str)): Arguments received by Leverage caller_name (str, optional): Calling command. Defaults to None. - + pre_invocation_callback (Callable, optional): Callback to be called before the invocation. Defaults to None. Raises: Exit: Whenever runner execution returns a non-zero exit code """ @@ -28,7 +28,9 @@ def _handle_subcommand( if subcommand is None: # Run the command directly - if exit_code := runner.run(args): + if pre_invocation_callback: + pre_invocation_callback() + if exit_code := runner.run(*args): raise Exit(exit_code) else: From 4c7501c72df2b306558c967131ce07d22a89730e Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 28 Nov 2025 14:41:06 -0300 Subject: [PATCH 26/46] Remove docker usage from project command and re-add to leverage --- leverage/leverage.py | 4 ++-- leverage/modules/__init__.py | 3 +-- leverage/modules/project.py | 26 ++++++++++++++------------ 3 files changed, 17 insertions(+), 16 deletions(-) diff --git a/leverage/leverage.py b/leverage/leverage.py index d7e9ee1..f288208 100644 --- a/leverage/leverage.py +++ b/leverage/leverage.py @@ -7,7 +7,7 @@ from leverage import __version__, conf from leverage._internals import pass_state from leverage.path import NotARepositoryError, PathsHandler -from leverage.modules import aws, credentials, run, tofu, terraform +from leverage.modules import aws, credentials, run, project, tofu, terraform @click.group(invoke_without_command=True) @@ -32,7 +32,7 @@ def leverage(context, state, verbose): # Add modules to leverage leverage.add_command(run) -# leverage.add_command(project) +leverage.add_command(project) leverage.add_command(tofu) leverage.add_command(tofu, name="tf") leverage.add_command(terraform) diff --git a/leverage/modules/__init__.py b/leverage/modules/__init__.py index 7c3aa9f..06d2c61 100644 --- a/leverage/modules/__init__.py +++ b/leverage/modules/__init__.py @@ -1,6 +1,5 @@ from .run import run - -# from .project import project +from .project import project from .aws import aws from .tf import tofu, terraform from .credentials import credentials diff --git a/leverage/modules/project.py b/leverage/modules/project.py index aee7bfb..3f5973b 100644 --- a/leverage/modules/project.py +++ b/leverage/modules/project.py @@ -14,14 +14,11 @@ from jinja2 import Environment from jinja2 import FileSystemLoader -from leverage import __toolbox_version__ from leverage import logger -from leverage.logger import console -from leverage.path import get_root_path, get_project_root_or_current_dir_path +from leverage.path import get_project_root_or_current_dir_path from leverage.path import NotARepositoryError +from leverage.modules.tfrunner import TFRunner from leverage._utils import git, ExitError -from leverage.container import get_docker_client -from leverage.container import TFContainer # Leverage related base definitions LEVERAGE_DIR = Path.home() / ".leverage" @@ -204,7 +201,7 @@ def _render_templates(template_files, config, source=TEMPLATE_DIR, destination=P template = JINJA_ENV.get_template(template_location.as_posix()) if "terraform_image_tag" not in config: - config["terraform_image_tag"] = __toolbox_version__ + config["terraform_image_tag"] = "" rendered_template = template.render(config) @@ -326,12 +323,17 @@ def create(): # Format the code correctly logger.info("Reformatting configuration to the standard style.") - - terraform = TFContainer(get_docker_client()) - terraform.ensure_image() - terraform.disable_authentication() - with console.status("Formatting..."): - terraform.exec("fmt", "-recursive") + # TODO: Get rid of this ugly workaround + try: + TFRunner(binary="tofu").run("fmt", "-recursive") + except ExitError: + try: + TFRunner(binary="terraform").run("fmt", "-recursive") + except ExitError: + logger.warning( + "Could not reformat configuration to the standard style." + "\nPlease run `terraform fmt -recursive` or `tofu fmt -recursive` manually." + ) logger.info("Finished setting up project.") From f87e8ece96e5f6185802eebbce0167d4c5af8c2a Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 28 Nov 2025 14:43:06 -0300 Subject: [PATCH 27/46] Remove container usage form tfautomv command and re add to leverage --- leverage/leverage.py | 4 +-- leverage/modules/__init__.py | 1 + leverage/modules/tfautomv.py | 65 +++++++++++++++++++----------------- 3 files changed, 38 insertions(+), 32 deletions(-) diff --git a/leverage/leverage.py b/leverage/leverage.py index f288208..2b129a8 100644 --- a/leverage/leverage.py +++ b/leverage/leverage.py @@ -7,7 +7,7 @@ from leverage import __version__, conf from leverage._internals import pass_state from leverage.path import NotARepositoryError, PathsHandler -from leverage.modules import aws, credentials, run, project, tofu, terraform +from leverage.modules import aws, credentials, run, project, tofu, terraform, tfautomv @click.group(invoke_without_command=True) @@ -38,6 +38,6 @@ def leverage(context, state, verbose): leverage.add_command(terraform) leverage.add_command(credentials) leverage.add_command(aws) -# leverage.add_command(tfautomv) +leverage.add_command(tfautomv) # leverage.add_command(kubectl) # leverage.add_command(kubectl, name="kc") diff --git a/leverage/modules/__init__.py b/leverage/modules/__init__.py index 06d2c61..383ca09 100644 --- a/leverage/modules/__init__.py +++ b/leverage/modules/__init__.py @@ -3,5 +3,6 @@ from .aws import aws from .tf import tofu, terraform from .credentials import credentials +from .tfautomv import tfautomv # from .kubectl import kubectl # from .shell import shell diff --git a/leverage/modules/tfautomv.py b/leverage/modules/tfautomv.py index c01ccd8..88a1cf4 100644 --- a/leverage/modules/tfautomv.py +++ b/leverage/modules/tfautomv.py @@ -2,38 +2,43 @@ from click.exceptions import Exit from leverage._internals import pass_state -from leverage._internals import pass_container -from leverage.container import get_docker_client -from leverage.container import TFautomvContainer +from leverage.modules.runner import Runner +from leverage.modules.tf import tf_default_args +from leverage.modules.auth import check_sso_token, refresh_layer_credentials -REGION = ( - r"global|(?:[a-z]{2}-(?:gov-)?" - r"(?:central|north|south|east|west|northeast|northwest|southeast|southwest|secret|topsecret)-[1-4])" -) - -@click.group() +@click.command() +@click.argument("args", nargs=-1) @pass_state -def tfautomv(state): - """Run TFAutomv commands in a custom containerized environment that provides extra functionality when interacting - with your cloud provider such as handling multi factor authentication for you. - All terraform subcommands that receive extra args will pass the given strings as is to their corresponding Terraform - counterparts in the container. For example as in `leverage terraform apply -auto-approve` or - `leverage terraform init -reconfigure` +def tfautomv(state, args): + """Run TFAutomv commands in the context of the current project.` """ - state.container = TFautomvContainer(get_docker_client()) - state.container.ensure_image() - - -CONTEXT_SETTINGS = {"ignore_unknown_options": True} - - -@tfautomv.command(context_settings=CONTEXT_SETTINGS) -@click.argument("args", nargs=-1) -@pass_container -def run(tf, args): - """Generate a move tf file for this layer.""" - exit_code = tf.start_in_layer(*args) - - if exit_code: + tf_default_args_string = " ".join(tf_default_args()) + credentials_env_vars = { + "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), + "AWS_CONFIG_FILE": str(state.paths.aws_config_file), + "TF_CLI_ARGS_init": tf_default_args_string, + "TF_CLI_ARGS_plan": tf_default_args_string, + } + state.runner = Runner( + binary="tfautomv", + error_message=( + f"TFAutomv not found on system. " + f"Please install it following the instructions at: https://github.com/busser/tfautomv?tab=readme-ov-file#installation" + ), + env_vars=credentials_env_vars, + ) + + tf_binary = "tofu" if not state.paths.tf_binary else state.paths.tf_binary + filtered_args = ( + arg + for index, arg in list(enumerate(args)) + if not str(arg).startswith("--terraform-bin") or not arg[index - 1] == "--terraform-bin" + ) + tfautomv_args = (*filtered_args, f"--terraform-bin={tf_binary}") + + check_sso_token(state.paths) + refresh_layer_credentials(state.paths) + + if exit_code := state.runner.run(*tfautomv_args): raise Exit(exit_code) From 87b8839372e3415c135d64b50d6507f50e896760 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 28 Nov 2025 14:44:33 -0300 Subject: [PATCH 28/46] Allow for TF binary to be specified from TF_BINARY var in build.env --- leverage/modules/tf.py | 4 +- leverage/modules/tfrunner.py | 24 +++- leverage/path.py | 10 ++ tests/test_modules/test_tfrunner.py | 204 ++++++++++++++++++++-------- 4 files changed, 178 insertions(+), 64 deletions(-) diff --git a/leverage/modules/tf.py b/leverage/modules/tf.py index cdc109c..c380fe3 100644 --- a/leverage/modules/tf.py +++ b/leverage/modules/tf.py @@ -32,7 +32,7 @@ def tofu(state): "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), "AWS_CONFIG_FILE": str(state.paths.aws_config_file), } - state.runner = TFRunner(env_vars=credentials_env_vars) + state.runner = TFRunner(binary=state.paths.tf_binary, env_vars=credentials_env_vars) @click.group() @@ -48,7 +48,7 @@ def terraform(state): "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), "AWS_CONFIG_FILE": str(state.paths.aws_config_file), } - state.runner = TFRunner(terraform=True, env_vars=credentials_env_vars) + state.runner = TFRunner(binary=state.paths.tf_binary, terraform=True, env_vars=credentials_env_vars) CONTEXT_SETTINGS = {"ignore_unknown_options": True} diff --git a/leverage/modules/tfrunner.py b/leverage/modules/tfrunner.py index 6c02488..351a43a 100644 --- a/leverage/modules/tfrunner.py +++ b/leverage/modules/tfrunner.py @@ -1,6 +1,8 @@ +import subprocess from pathlib import Path from typing import Dict, Optional +from leverage._utils import ExitError from leverage.modules.runner import Runner @@ -10,22 +12,24 @@ class TFRunner(Runner): TERRAFORM_INSTALL_URL = "https://developer.hashicorp.com/terraform/install" OPENTOFU_INSTALL_URL = "https://opentofu.org/docs/intro/install/" - def __init__(self, terraform: bool = False, env_vars: Optional[Dict[str, str]] = None): + def __init__(self, binary: str, terraform: bool = False, env_vars: Optional[Dict[str, str]] = None): """ Initialize TFRunner for either Terraform or OpenTofu. Args: - terraform: If True, use Terraform. If False, use OpenTofu (default). + terraform: If True, treat the binary as Terraform. If False, as OpenTofu (default). env_vars: Environment variables to set for all executions """ - if terraform: - binary = "terraform" + self.__terraform = terraform + + if not binary: + binary = "tofu" if not self.__terraform else "terraform" + if self.__terraform: error_message = ( f"Terraform binary not found on system. " f"Please install Terraform following the instructions at: {self.TERRAFORM_INSTALL_URL}" ) else: - binary = "tofu" error_message = ( f"OpenTofu binary not found on system. " f"Please install OpenTofu following the instructions at: {self.OPENTOFU_INSTALL_URL}" @@ -33,6 +37,16 @@ def __init__(self, terraform: bool = False, env_vars: Optional[Dict[str, str]] = super().__init__(binary=binary, error_message=error_message, env_vars=env_vars) + def _validate_binary(self): + super()._validate_binary() + + binary_version_stdout = subprocess.run([self.binary_path, "--version"], capture_output=True, text=True).stdout + + if self.__terraform and "Terraform" not in binary_version_stdout: + raise ExitError(1, "The provided binary does not seem to be Terraform.") + elif not self.__terraform and "OpenTofu" not in binary_version_stdout: + raise ExitError(1, "The provided binary does not seem to be OpenTofu.") + def run( self, *args: str, diff --git a/leverage/path.py b/leverage/path.py index 8799cbe..b5cd0bd 100644 --- a/leverage/path.py +++ b/leverage/path.py @@ -167,6 +167,16 @@ def __init__(self, env_conf: dict): self.aws_credentials_dir.mkdir(parents=True) self.sso_cache = self.aws_credentials_dir / "sso" / "cache" + # If not empty make the binary path absolute + if tf_binary := env_conf.get("TF_BINARY", ""): + binary_path = Path(tf_binary) + if tf_binary.startswith("~"): + self.tf_binary = str(binary_path.expanduser()) + elif not binary_path.is_absolute() and len(binary_path.parts) > 1: + self.tf_binary = str((self.root_dir / tf_binary).resolve()) + else: + self.tf_binary = tf_binary + def update_cwd(self, new_cwd): self.cwd = new_cwd acc_folder = new_cwd.relative_to(self.root_dir).parts[0] diff --git a/tests/test_modules/test_tfrunner.py b/tests/test_modules/test_tfrunner.py index 1f0ed74..b7a1706 100644 --- a/tests/test_modules/test_tfrunner.py +++ b/tests/test_modules/test_tfrunner.py @@ -3,57 +3,62 @@ import pytest from leverage.modules.tfrunner import TFRunner +from leverage._utils import ExitError @pytest.fixture def mock_tofu_binary(mocker): - """Mock tofu binary availability""" + """Mock tofu binary availability and version check""" mocker.patch("shutil.which", return_value="/usr/bin/tofu") + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.stdout = "OpenTofu v1.6.0" return "/usr/bin/tofu" @pytest.fixture def mock_terraform_binary(mocker): - """Mock terraform binary availability""" + """Mock terraform binary availability and version check""" mocker.patch("shutil.which", return_value="/usr/bin/terraform") + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.stdout = "Terraform v1.6.0" return "/usr/bin/terraform" def test_init_defaults_to_opentofu(mock_tofu_binary): - runner = TFRunner() + runner = TFRunner(binary="") assert runner.binary_input == "tofu" assert runner.binary_path == mock_tofu_binary assert runner.instance_env_vars == {} def test_init_with_terraform_flag(mock_terraform_binary): - runner = TFRunner(terraform=True) + runner = TFRunner(binary="", terraform=True) assert runner.binary_input == "terraform" assert runner.binary_path == mock_terraform_binary def test_init_with_env_vars(mock_tofu_binary): env_vars = {"TF_VAR_region": "us-east-1", "TF_LOG": "DEBUG"} - runner = TFRunner(env_vars=env_vars) + runner = TFRunner(binary="", env_vars=env_vars) assert runner.instance_env_vars == env_vars def test_init_with_terraform_and_env_vars(mock_terraform_binary): env_vars = {"TF_VAR_region": "us-west-2"} - runner = TFRunner(terraform=True, env_vars=env_vars) + runner = TFRunner(binary="", terraform=True, env_vars=env_vars) assert runner.binary_input == "terraform" assert runner.instance_env_vars == env_vars def test_init_with_none_env_vars(mock_tofu_binary): - runner = TFRunner(env_vars=None) + runner = TFRunner(binary="", env_vars=None) assert runner.instance_env_vars == {} def test_opentofu_not_found_error_message(mocker): mocker.patch("shutil.which", return_value=None) - with pytest.raises(RuntimeError) as exc_info: - TFRunner() + with pytest.raises(ExitError) as exc_info: + TFRunner(binary="") assert "OpenTofu binary not found" in str(exc_info.value) assert TFRunner.OPENTOFU_INSTALL_URL in str(exc_info.value) @@ -61,59 +66,121 @@ def test_opentofu_not_found_error_message(mocker): def test_terraform_not_found_error_message(mocker): mocker.patch("shutil.which", return_value=None) - with pytest.raises(RuntimeError) as exc_info: - TFRunner(terraform=True) + with pytest.raises(ExitError) as exc_info: + TFRunner(binary="", terraform=True) assert "Terraform binary not found" in str(exc_info.value) assert TFRunner.TERRAFORM_INSTALL_URL in str(exc_info.value) -def test_run_without_env_vars(mock_tofu_binary, mocker): +def test_validate_binary_opentofu_success(mocker): + """Test that OpenTofu binary is validated correctly""" + mocker.patch("shutil.which", return_value="/usr/bin/tofu") + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.stdout = "OpenTofu v1.6.0\non linux_amd64" + + runner = TFRunner(binary="") + # Should not raise an exception + assert runner.binary_path == "/usr/bin/tofu" + + +def test_validate_binary_terraform_success(mocker): + """Test that Terraform binary is validated correctly""" + mocker.patch("shutil.which", return_value="/usr/bin/terraform") + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.stdout = "Terraform v1.6.0\non linux_amd64" + + runner = TFRunner(binary="", terraform=True) + # Should not raise an exception + assert runner.binary_path == "/usr/bin/terraform" + + +def test_validate_binary_wrong_binary_for_opentofu(mocker): + """Test that using Terraform binary when expecting OpenTofu raises an error""" + mocker.patch("shutil.which", return_value="/usr/bin/terraform") + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.stdout = "Terraform v1.6.0\non linux_amd64" + + with pytest.raises(ExitError) as exc_info: + TFRunner(binary="") + + assert "does not seem to be OpenTofu" in str(exc_info.value) + + +def test_validate_binary_wrong_binary_for_terraform(mocker): + """Test that using OpenTofu binary when expecting Terraform raises an error""" + mocker.patch("shutil.which", return_value="/usr/bin/tofu") + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.stdout = "OpenTofu v1.6.0\non linux_amd64" + + with pytest.raises(ExitError) as exc_info: + TFRunner(binary="", terraform=True) + + assert "does not seem to be Terraform" in str(exc_info.value) + + +def test_run_without_env_vars(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/tofu") mock_subprocess = mocker.patch("subprocess.run") + # First call is for --version validation, second is for actual run + mock_subprocess.return_value.stdout = "OpenTofu v1.6.0" mock_subprocess.return_value.returncode = 0 - runner = TFRunner() + runner = TFRunner(binary="") result = runner.run("plan", "-out=plan.tfplan") assert result == 0 - mock_subprocess.assert_called_once_with( - [mock_tofu_binary, "plan", "-out=plan.tfplan"], env=os.environ.copy(), cwd=None - ) + # Check the last call (the actual run, not the --version check) + assert mock_subprocess.call_args[0][0] == ["/usr/bin/tofu", "plan", "-out=plan.tfplan"] + assert mock_subprocess.call_args[1]["env"] == os.environ.copy() + assert mock_subprocess.call_args[1]["cwd"] is None -def test_run_with_instance_env_vars_only(mock_tofu_binary, mocker): +def test_run_with_instance_env_vars_only(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/tofu") mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.stdout = "OpenTofu v1.6.0" mock_subprocess.return_value.returncode = 0 instance_env = {"TF_VAR_region": "us-east-1", "TF_LOG": "DEBUG"} expected_env = os.environ.copy() expected_env.update(instance_env) - runner = TFRunner(env_vars=instance_env) + runner = TFRunner(binary="", env_vars=instance_env) result = runner.run("apply", "-auto-approve") assert result == 0 - mock_subprocess.assert_called_once_with([mock_tofu_binary, "apply", "-auto-approve"], env=expected_env, cwd=None) + # Check the last call (the actual run) + assert mock_subprocess.call_args[0][0] == ["/usr/bin/tofu", "apply", "-auto-approve"] + assert mock_subprocess.call_args[1]["env"] == expected_env + assert mock_subprocess.call_args[1]["cwd"] is None -def test_run_with_run_env_vars_only(mock_tofu_binary, mocker): +def test_run_with_run_env_vars_only(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/tofu") mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.stdout = "OpenTofu v1.6.0" mock_subprocess.return_value.returncode = 0 run_env = {"TF_VAR_environment": "production"} expected_env = os.environ.copy() expected_env.update(run_env) - runner = TFRunner() + runner = TFRunner(binary="") result = runner.run("plan", env_vars=run_env) assert result == 0 - mock_subprocess.assert_called_once_with([mock_tofu_binary, "plan"], env=expected_env, cwd=None) + # Check the last call + assert mock_subprocess.call_args[0][0] == ["/usr/bin/tofu", "plan"] + assert mock_subprocess.call_args[1]["env"] == expected_env + assert mock_subprocess.call_args[1]["cwd"] is None -def test_run_merges_instance_and_run_env_vars(mock_tofu_binary, mocker): +def test_run_merges_instance_and_run_env_vars(mocker): """Test that TFRunner properly merges env_vars through the parent class""" + mocker.patch("shutil.which", return_value="/usr/bin/tofu") mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.stdout = "OpenTofu v1.6.0" mock_subprocess.return_value.returncode = 0 instance_env = {"TF_VAR_region": "us-east-1", "TF_LOG": "DEBUG"} @@ -123,16 +190,21 @@ def test_run_merges_instance_and_run_env_vars(mock_tofu_binary, mocker): expected_env.update(instance_env) expected_env.update(run_env) - runner = TFRunner(env_vars=instance_env) + runner = TFRunner(binary="", env_vars=instance_env) result = runner.run("apply", env_vars=run_env) assert result == 0 - mock_subprocess.assert_called_once_with([mock_tofu_binary, "apply"], env=expected_env, cwd=None) + # Check the last call + assert mock_subprocess.call_args[0][0] == ["/usr/bin/tofu", "apply"] + assert mock_subprocess.call_args[1]["env"] == expected_env + assert mock_subprocess.call_args[1]["cwd"] is None -def test_run_env_vars_override_instance_env_vars(mock_tofu_binary, mocker): +def test_run_env_vars_override_instance_env_vars(mocker): """Test that run-time env_vars override instance env_vars through parent class""" + mocker.patch("shutil.which", return_value="/usr/bin/tofu") mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.stdout = "OpenTofu v1.6.0" mock_subprocess.return_value.returncode = 0 instance_env = {"TF_VAR_region": "us-east-1", "TF_LOG": "DEBUG"} @@ -141,97 +213,115 @@ def test_run_env_vars_override_instance_env_vars(mock_tofu_binary, mocker): expected_env = os.environ.copy() expected_env.update({"TF_VAR_region": "us-west-2", "TF_LOG": "DEBUG"}) - runner = TFRunner(env_vars=instance_env) + runner = TFRunner(binary="", env_vars=instance_env) result = runner.run("plan", env_vars=run_env) assert result == 0 - mock_subprocess.assert_called_once_with([mock_tofu_binary, "plan"], env=expected_env, cwd=None) + # Check the last call + assert mock_subprocess.call_args[0][0] == ["/usr/bin/tofu", "plan"] + assert mock_subprocess.call_args[1]["env"] == expected_env + assert mock_subprocess.call_args[1]["cwd"] is None -def test_run_interactive_false(mock_tofu_binary, mocker): +def test_run_interactive_false(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/tofu") mock_subprocess = mocker.patch("subprocess.run") - mock_subprocess.return_value.returncode = 0 - mock_subprocess.return_value.stdout = "terraform output" - mock_subprocess.return_value.stderr = "" + # First call for --version, second for the actual command + version_output = type('obj', (object,), {'stdout': 'OpenTofu v1.6.0', 'returncode': 0})() + run_output = type('obj', (object,), {'stdout': 'terraform output', 'stderr': '', 'returncode': 0})() + mock_subprocess.side_effect = [version_output, run_output] - runner = TFRunner() + runner = TFRunner(binary="") exit_code, stdout, stderr = runner.run("output", "-json", interactive=False) assert exit_code == 0 - assert stdout == "terraform output" + assert stdout == "terraform output" # Already stripped assert stderr == "" - mock_subprocess.assert_called_once_with( - [mock_tofu_binary, "output", "-json"], env=os.environ.copy(), cwd=None, capture_output=True, text=True - ) + # Check the last call (the actual run) + assert mock_subprocess.call_args[0][0] == ["/usr/bin/tofu", "output", "-json"] + assert "capture_output" in mock_subprocess.call_args[1] + assert mock_subprocess.call_args[1]["capture_output"] is True -def test_run_with_multiple_args(mock_terraform_binary, mocker): +def test_run_with_multiple_args(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/terraform") mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.stdout = "Terraform v1.6.0" mock_subprocess.return_value.returncode = 0 - runner = TFRunner(terraform=True) + runner = TFRunner(binary="", terraform=True) result = runner.run("plan", "-var", "region=us-east-1", "-out=plan.tfplan") assert result == 0 - mock_subprocess.assert_called_once_with( - [mock_terraform_binary, "plan", "-var", "region=us-east-1", "-out=plan.tfplan"], env=os.environ.copy(), cwd=None - ) + # Check the last call + assert mock_subprocess.call_args[0][0] == ["/usr/bin/terraform", "plan", "-var", "region=us-east-1", "-out=plan.tfplan"] + assert mock_subprocess.call_args[1]["env"] == os.environ.copy() + assert mock_subprocess.call_args[1]["cwd"] is None -def test_run_preserves_instance_env_vars_across_multiple_calls(mock_tofu_binary, mocker): +def test_run_preserves_instance_env_vars_across_multiple_calls(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/tofu") mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.stdout = "OpenTofu v1.6.0" mock_subprocess.return_value.returncode = 0 instance_env = {"TF_VAR_region": "us-east-1"} expected_env = os.environ.copy() expected_env.update(instance_env) - runner = TFRunner(env_vars=instance_env) + runner = TFRunner(binary="", env_vars=instance_env) - # First call + # First call (after --version check) runner.run("init") - mock_subprocess.assert_called_with([mock_tofu_binary, "init"], env=expected_env, cwd=None) - # Second call - instance env vars should still be present runner.run("plan") - assert mock_subprocess.call_count == 2 - mock_subprocess.assert_called_with([mock_tofu_binary, "plan"], env=expected_env, cwd=None) + # Check that we have 3 calls total (1 --version + 2 actual commands) + assert mock_subprocess.call_count == 3 + # Verify the last call has the right env vars + assert mock_subprocess.call_args[1]["env"] == expected_env -def test_run_does_not_modify_instance_env_vars(mock_tofu_binary, mocker): + +def test_run_does_not_modify_instance_env_vars(mocker): """Test that instance_env_vars are preserved (handled by parent class)""" + mocker.patch("shutil.which", return_value="/usr/bin/tofu") mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.stdout = "OpenTofu v1.6.0" mock_subprocess.return_value.returncode = 0 instance_env = {"TF_VAR_region": "us-east-1"} run_env = {"TF_VAR_environment": "production"} - runner = TFRunner(env_vars=instance_env) + runner = TFRunner(binary="", env_vars=instance_env) runner.run("plan", env_vars=run_env) # Instance env vars should remain unchanged (verified in parent class) assert runner.instance_env_vars == {"TF_VAR_region": "us-east-1"} -def test_empty_dict_for_none_env_vars_on_run(mock_tofu_binary, mocker): +def test_empty_dict_for_none_env_vars_on_run(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/tofu") mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.stdout = "OpenTofu v1.6.0" mock_subprocess.return_value.returncode = 0 - runner = TFRunner() + runner = TFRunner(binary="") runner.run("plan", env_vars=None) - # Should not raise an error and should pass empty dict - mock_subprocess.assert_called_once() + # Should not raise an error - check we have 2 calls (--version + actual run) + assert mock_subprocess.call_count == 2 -def test_env_vars_converted_to_strings_in_run(mock_tofu_binary, mocker): +def test_env_vars_converted_to_strings_in_run(mocker): + mocker.patch("shutil.which", return_value="/usr/bin/tofu") mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.stdout = "OpenTofu v1.6.0" mock_subprocess.return_value.returncode = 0 instance_env = {"TF_VAR_count": 5, "TF_VAR_enabled": True} run_env = {"TF_VAR_timeout": 3.14} - runner = TFRunner(env_vars=instance_env) + runner = TFRunner(binary="", env_vars=instance_env) runner.run("plan", env_vars=run_env) called_env = mock_subprocess.call_args[1]["env"] From 48858f143b8fd71b3936a721a672fbcb1e8e4d5c Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 28 Nov 2025 14:47:17 -0300 Subject: [PATCH 29/46] Remove container usage from kubectl command and re add to leverage --- leverage/leverage.py | 6 +- leverage/modules/__init__.py | 3 +- leverage/modules/kubectl.py | 183 +++++++++++++++++++++++++++++++---- 3 files changed, 166 insertions(+), 26 deletions(-) diff --git a/leverage/leverage.py b/leverage/leverage.py index 2b129a8..9f32f15 100644 --- a/leverage/leverage.py +++ b/leverage/leverage.py @@ -7,7 +7,7 @@ from leverage import __version__, conf from leverage._internals import pass_state from leverage.path import NotARepositoryError, PathsHandler -from leverage.modules import aws, credentials, run, project, tofu, terraform, tfautomv +from leverage.modules import aws, credentials, run, project, tofu, terraform, tfautomv, kubectl @click.group(invoke_without_command=True) @@ -39,5 +39,5 @@ def leverage(context, state, verbose): leverage.add_command(credentials) leverage.add_command(aws) leverage.add_command(tfautomv) -# leverage.add_command(kubectl) -# leverage.add_command(kubectl, name="kc") +leverage.add_command(kubectl) +leverage.add_command(kubectl, name="kc") diff --git a/leverage/modules/__init__.py b/leverage/modules/__init__.py index 383ca09..ecdccef 100644 --- a/leverage/modules/__init__.py +++ b/leverage/modules/__init__.py @@ -4,5 +4,4 @@ from .tf import tofu, terraform from .credentials import credentials from .tfautomv import tfautomv -# from .kubectl import kubectl -# from .shell import shell +from .kubectl import kubectl diff --git a/leverage/modules/kubectl.py b/leverage/modules/kubectl.py index 381f4ac..947ae5d 100644 --- a/leverage/modules/kubectl.py +++ b/leverage/modules/kubectl.py @@ -1,43 +1,184 @@ -from leverage._internals import pass_state -from leverage._internals import pass_container -from leverage.container import get_docker_client -from leverage.containers.kubectl import KubeCtlContainer +import os +from enum import Enum +from pathlib import Path +from dataclasses import dataclass import click +import ruamel.yaml +import simple_term_menu +from leverage import logger +from leverage.path import PathsHandler +from leverage._utils import ExitError +from leverage.modules.aws import aws +from leverage.modules.runner import Runner +from leverage.modules.tfrunner import TFRunner from leverage.modules.utils import _handle_subcommand +from leverage.modules.auth import check_sso_token, refresh_layer_credentials +from leverage._internals import pass_state, pass_paths + + +@dataclass +class ClusterInfo: + cluster_name: str + profile: str + region: str + + +class MetadataTypes(Enum): + K8S_CLUSTER = "k8s-eks-cluster" + CONTEXT_SETTINGS = {"ignore_unknown_options": True} +METADATA_FILENAME = "metadata.yaml" + + +@pass_paths +def refresh_kubectl_credentials(paths: PathsHandler) -> None: + """ + Refresh the AWS credentials for the current project to be used by kubectl. + """ + check_sso_token(paths) + + try: # if we are not in a layer, we don't need to refresh the credentials + paths.check_for_layer_location() + except ExitError: + return + + refresh_layer_credentials(paths) @click.group(invoke_without_command=True, context_settings={"ignore_unknown_options": True}) @click.argument("args", nargs=-1, type=click.UNPROCESSED) @pass_state @click.pass_context def kubectl(context, state, args): - """Run Kubectl commands in a custom containerized environment.""" - state.container = KubeCtlContainer(get_docker_client()) - if not args or (args and args[0] != "discover"): - state.container.paths.check_for_layer_location() - state.container.ensure_image() - _handle_subcommand(context=context, cli_container=state.container, args=args) + """Run Kubectl commands in the context of the current project.""" + kubeconfig_dir = state.paths.home / ".kube" / state.paths.project + kubeconfig_dir.mkdir(parents=True, exist_ok=True) -@kubectl.command(context_settings=CONTEXT_SETTINGS) -@pass_container -def shell(kctl: KubeCtlContainer): - """Spawn a shell with the kubectl credentials pre-configured.""" - kctl.start_shell() + credentials_env_vars = { + "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), + "AWS_CONFIG_FILE": str(state.paths.aws_config_file), + "KUBECONFIG": str(kubeconfig_dir / "config"), + } + state.runner = Runner( + binary="kubectl", + error_message=( + f"Kubectl not found on system. " + f"Please install it following the instructions at: https://kubernetes.io/docs/tasks/tools/#kubectl" + ), + env_vars=credentials_env_vars, + ) + _handle_subcommand(context=context, runner=state.runner, args=args, pre_invocation_callback=refresh_kubectl_credentials) + + +def _configure(ci: ClusterInfo = None, layer_path: Path = None): + """ + Add the given EKS cluster configuration to the .kube/ files. + """ + if ci: + # if you have the details, generate the command right away + cmd = ["eks", "update-kubeconfig", "--region", ci.region, "--name", ci.cluster_name, "--profile", ci.profile] + else: + # otherwise go get them from the layer + logger.info("Retrieving k8s cluster information...") + cmd = _get_eks_kube_config(layer_path).split(" ")[1:] + + logger.info("Configuring context...") + try: + click.get_current_context().invoke(aws, args=cmd) + except ExitError as e: + raise ExitError(e.exit_code, f"Failed to configure kubectl context: {e.message}") + + logger.info("Done.") + +@pass_paths +def _get_eks_kube_config(paths: PathsHandler, layer_path: Path) -> str: + # TODO: Get rid of this ugly workaround + credentials_env_vars = { + "AWS_SHARED_CREDENTIALS_FILE": str(paths.aws_credentials_file), + "AWS_CONFIG_FILE": str(paths.aws_config_file), + } + try: + tfrunner = TFRunner(binary=paths.tf_binary, env_vars=credentials_env_vars) + except ExitError as e: + try: + tfrunner = TFRunner(binary=paths.tf_binary, terraform=True, env_vars=credentials_env_vars) + except ExitError: + raise ExitError(1, "Could not locate TF binary.") + + refresh_kubectl_credentials() + exit_code, output, error = tfrunner.exec("output", "-no-color", working_dir=layer_path) + if exit_code: + raise ExitError(exit_code, f"Failed to get EKS kube config: {error}") + + region = paths.common_conf.get("region_primary", paths.common_conf.get("region", "")) + if not region: + raise ExitError(1, "No region configured in global config file.") + + aws_eks_cmd = next(op for op in output.splitlines() if op.startswith("aws eks update-kubeconfig")) + return aws_eks_cmd + f" --region {region}" @kubectl.command(context_settings=CONTEXT_SETTINGS) -@pass_container -def configure(kctl: KubeCtlContainer): +@pass_paths +def configure(paths: PathsHandler): """Automatically add the EKS cluster from the layer into your kubectl config file.""" - kctl.configure() + _configure(layer_path=paths.cwd) + + +def _scan_clusters(paths: PathsHandler): + """ + Scan all the subdirectories in search of "cluster" metadata files. + """ + for root, dirs, files in os.walk(paths.paths.cwd): + # exclude hidden directories + dirs[:] = [d for d in dirs if d[0] != "."] + + for file in files: + if file != METADATA_FILENAME: + continue + + cluster_file = Path(root) / file + try: + with open(cluster_file) as cluster_yaml_file: + data = ruamel.yaml.safe_load(cluster_yaml_file) + if data.get("type") != MetadataTypes.K8S_CLUSTER.value: + continue + except Exception as exc: + logger.warning(exc) + continue + else: + yield Path(root), data @kubectl.command(context_settings=CONTEXT_SETTINGS) -@pass_container -def discover(kctl: KubeCtlContainer): - kctl.discover() +@pass_paths +def discover(paths: PathsHandler): + """ + Do a scan down the tree of subdirectories looking for k8s clusters metadata files. + Open up a menu with all the found items, where you can pick up and configure it on your .kubeconfig file. + """ + cluster_files = [(path, data) for path, data in _scan_clusters(paths)] + if not cluster_files: + raise ExitError(1, "No clusters found.") + + terminal_menu = simple_term_menu.TerminalMenu( + [f"{c[1]['data']['cluster_name']}: {str(c[0])}" for c in cluster_files], title="Clusters found:" + ) + menu_entry_index = terminal_menu.show() + if menu_entry_index is None: + # selection cancelled + return + + layer_path = cluster_files[menu_entry_index][0] + cluster_data = cluster_files[menu_entry_index][1] + cluster_info = ClusterInfo( + cluster_name=cluster_data["data"]["cluster_name"], + profile=cluster_data["data"]["profile"], + region=cluster_data["data"]["region"], + ) + + _configure(cluster_info, layer_path) From 06cce994b3813bc17ee8545726d4c3c7b83c77ef Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 28 Nov 2025 14:57:51 -0300 Subject: [PATCH 30/46] Wipe references to containers --- leverage/_utils.py | 63 --- leverage/container.py | 716 --------------------------------- leverage/containers/kubectl.py | 145 ------- leverage/modules/tf.py | 2 +- leverage/path.py | 2 - 5 files changed, 1 insertion(+), 927 deletions(-) delete mode 100644 leverage/container.py delete mode 100644 leverage/containers/kubectl.py diff --git a/leverage/_utils.py b/leverage/_utils.py index 3f5f8b5..a0232a3 100644 --- a/leverage/_utils.py +++ b/leverage/_utils.py @@ -10,8 +10,6 @@ import lark from click.exceptions import ClickException from configupdater import ConfigUpdater -from docker import DockerClient -from docker.models.containers import Container from leverage import logger @@ -64,48 +62,6 @@ def git(command): run(command, stdout=PIPE, stderr=PIPE, check=True) -class CustomEntryPoint: - """ - Set a custom entrypoint on the container while entering the context. - Once outside, return it to its original value. - """ - - def __init__(self, container, entrypoint): - self.container = container - self.old_entrypoint = container.entrypoint - self.new_entrypoint = entrypoint - - def __enter__(self): - self.container.entrypoint = self.new_entrypoint - - def __exit__(self, *args, **kwargs): - self.container.entrypoint = self.old_entrypoint - - -class AwsCredsEntryPoint(CustomEntryPoint): - """ - Fetching AWS credentials by setting the SSO/MFA entrypoints. - """ - - def __init__(self, container, override_entrypoint=None): - auth_method = container.auth_method() - - new_entrypoint = f"{auth_method}{container.entrypoint if override_entrypoint is None else override_entrypoint}" - super(AwsCredsEntryPoint, self).__init__(container, entrypoint=new_entrypoint) - - def __exit__(self, *args, **kwargs): - super(AwsCredsEntryPoint, self).__exit__(*args, **kwargs) - if self.container.mfa_enabled: - self.container.environment.update( - { - "AWS_SHARED_CREDENTIALS_FILE": self.container.environment["AWS_SHARED_CREDENTIALS_FILE"].replace( - ".aws", "tmp" - ), - "AWS_CONFIG_FILE": self.container.environment["AWS_CONFIG_FILE"].replace(".aws", "tmp"), - } - ) - - class ExitError(ClickException): """ Raise an Exit exception but also print an error description. @@ -136,25 +92,6 @@ def parse_tf_file(file: Path): return parsed -class ContainerSession: - """ - Handle the start/stop cycle of a container. - Useful when you need to keep your container alive to share context between multiple commands. - """ - - def __init__(self, docker_client: DockerClient, container_data): - self.docker_client = docker_client - self.container_data = container_data - - def __enter__(self) -> Container: - self.docker_client.api.start(self.container_data) - return self.docker_client.containers.get(self.container_data["Id"]) - - def __exit__(self, exc_type, exc_value, exc_tb): - self.docker_client.api.stop(self.container_data) - self.docker_client.api.remove_container(self.container_data) - - def key_finder(d: dict, target: str, avoid: Optional[str] = None) -> List[str]: """ Iterate over a dict of dicts and/or lists of dicts, looking for a key with value "target". diff --git a/leverage/container.py b/leverage/container.py deleted file mode 100644 index dddf125..0000000 --- a/leverage/container.py +++ /dev/null @@ -1,716 +0,0 @@ -import json -import os -import re -import webbrowser -from io import BytesIO -from datetime import datetime -from time import sleep - -import hcl2 -from click.exceptions import Exit -import dockerpty -from configupdater import ConfigUpdater -from docker import DockerClient -from docker.errors import APIError -from docker.types import Mount -from typing import Tuple - -from leverage import logger -from leverage._utils import AwsCredsEntryPoint, CustomEntryPoint, ExitError, ContainerSession -from leverage.modules.auth import refresh_layer_credentials -from leverage.logger import raw_logger -from leverage.logger import get_script_log_level -from leverage.path import PathsHandler -from leverage.conf import load as load_env - -REGION = ( - r"(.*)" # project folder - # start region - r"(global|(?:[a-z]{2}-(?:gov-)?" - r"(?:central|north|south|east|west|northeast|northwest|southeast|southwest|secret|topsecret)-[1-4]))" - # end region - r"(.*)" # layer -) - - -def get_docker_client(): - """Attempt to get a Docker client from the environment configuration. Halt application otherwise. - - Raises: - Exit: If communication to Docker server could not be established. - - Returns: - docker.DockerClient: Client for Docker daemon. - """ - try: - docker_client = DockerClient.from_env() - docker_client.ping() - - except: - logger.error( - "Docker daemon doesn't seem to be responding. " - "Please check it is up and running correctly before re-running the command." - ) - raise Exit(1) - - return docker_client - - -class LeverageContainer: - """Basic Leverage Container. Holds the minimum information required to run the Docker image that Leverage uses - to perform its operations. Commands can be issued as interactive via `start` for when live output or user input is desired - or the can be simply executed via `exec` to run silently and retrieve the command output. - - NOTE: An aggregation approach to this design should be considered instead of the current inheritance approach. - """ - - LEVERAGE_IMAGE = "binbash/leverage-toolbox" - SHELL = "/bin/bash" - CONTAINER_USER = "leverage" - - def __init__(self, client, mounts: tuple = None, env_vars: dict = None): - """Project related paths are determined and stored. Project configuration is loaded. - - Args: - client (docker.DockerClient): Client to interact with Docker daemon. - """ - self.client = client - # Load configs - self.env_conf = load_env() - - self.paths = PathsHandler(self.env_conf, self.CONTAINER_USER) - self.project = self.paths.project - - # Set image to use - self.image = self.env_conf.get("TF_IMAGE", self.env_conf.get("TERRAFORM_IMAGE", self.LEVERAGE_IMAGE)) - self.image_tag = self.env_conf.get("TF_IMAGE_TAG", self.env_conf.get("TERRAFORM_IMAGE_TAG")) - if not self.image_tag: - logger.error( - "No docker image tag defined.\n" - "Please set `TF_IMAGE_TAG` variable in the project's [bold]build.env[/bold] file before running a Leverage command." - ) - raise Exit(1) - - mounts = [Mount(source=source, target=target, type="bind") for source, target in mounts] if mounts else [] - self.host_config = self.client.api.create_host_config(security_opt=["label=disable"], mounts=mounts) - self.container_config = { - "image": f"{self.image}:{self.local_image_tag}", - "command": "", - "stdin_open": True, - "environment": env_vars or {}, - "entrypoint": "", - "working_dir": f"{self.paths.guest_base_path}/{self.paths.cwd.relative_to(self.paths.root_dir).as_posix()}", - "host_config": self.host_config, - } - - @property - def environment(self): - return self.container_config["environment"] - - @environment.setter - def environment(self, value): - self.container_config["environment"] = value - - @property - def entrypoint(self): - return self.container_config["entrypoint"] - - @entrypoint.setter - def entrypoint(self, value): - self.container_config["entrypoint"] = value - - @property - def mounts(self): - return self.container_config["host_config"]["Mounts"] - - @mounts.setter - def mounts(self, value): - self.container_config["host_config"]["Mounts"] = value - - @property - def region(self): - """ - Return the region of the layer. - """ - if matches := re.match(REGION, self.paths.cwd.as_posix()): - # the region (group 1) is between the projects folders (group 0) and the layers (group 2) - return matches.groups()[1] - - raise ExitError(1, f"No valid region could be found at: {self.paths.cwd.as_posix()}") - - @property - def local_image_tag(self): - return f"{self.image_tag}-{os.getgid()}-{os.getuid()}" - - @property - def local_image(self) -> BytesIO: - """Return the local image that will be built, as a file-like object.""" - return BytesIO( - """ - ARG IMAGE_TAG - FROM binbash/leverage-toolbox:$IMAGE_TAG - - ARG UNAME - ARG UID - ARG GID - RUN groupadd -g $GID -o $UNAME - RUN useradd -m -u $UID -g $GID -o -s /bin/bash $UNAME - RUN chown -R $UID:$GID /home/leverage - USER $UNAME - """.encode( - "utf-8" - ) - ) - - def ensure_image(self): - """ - Make sure the required local Docker image is available in the system. If not, build it. - If the image already exists, re-build it so changes in the arguments can take effect. - """ - logger.info(f"Checking for local docker image, tag: {self.local_image_tag}...") - image_name = f"{self.image}:{self.local_image_tag}" - - # check first is our image is already available locally - found_image = self.client.api.images(f"{self.image}:{self.local_image_tag}") - if found_image: - logger.info("[green]✔ OK[/green]\n") - return - - logger.info(f"Image not found, building it...") - build_args = { - "IMAGE_TAG": self.image_tag, - "UNAME": self.CONTAINER_USER, - "GID": str(os.getgid()), - "UID": str(os.getuid()), - } - - stream = self.client.api.build( - fileobj=self.local_image, - tag=image_name, - pull=True, - buildargs=build_args, - decode=True, - ) - - for line in stream: - if "stream" in line and line["stream"].startswith("Successfully built"): - logger.info("[green]✔ OK[/green]\n") - elif "errorDetail" in line: - raise ExitError(1, f"Failed building local image: {line['errorDetail']}") - - def _create_container(self, tty, command="", *args): - """Create the container that will run the command. - - Args: - tty (bool): Whether the container will run interactively or not. - command (str, optional): Command to run. Defaults to "". - - Raises: - Exit: If the container could not be created. - - Returns: - dict: Reference to the created container. - """ - command = " ".join([command] + list(args)) - logger.debug(f"[bold cyan]Running command:[/bold cyan] {command}") - self.container_config["command"] = command - self.container_config["tty"] = tty - - try: - return self.client.api.create_container(**self.container_config) - - except APIError as exc: - exc.__traceback__ = None - exc.__context__.__traceback__ = None - logger.exception("Error creating container:", exc_info=exc) - raise Exit(1) - - def _run(self, container, run_func): - """Apply the given run function to the given container, return its outputs and handle container cleanup. - - Args: - container (dict): Reference to a Docker container. - run_func (function): Function to apply to the given container. - - Returns: - any: Whatever the given function returns. - """ - try: - return run_func(self.client, container) - - except APIError as exc: - exc.__traceback__ = None - exc.__context__.__traceback__ = None - logger.exception("Error during container execution:", exc_info=exc) - - finally: - self.client.api.stop(container) - self.client.api.remove_container(container) - - def _start(self, command: str, *args): - """Create an interactive container, and run command with the given arguments. - - Args: - command: Command to run. - - Returns: - int: Execution exit code. - """ - container = self._create_container(True, command, *args) - - def run_func(client, container): - dockerpty.start(client=client.api, container=container) - return client.api.inspect_container(container)["State"]["ExitCode"] - - return self._run(container, run_func) - - def _start_with_output(self, command, *args): - """ - Same than _start but also returns the outputs (by dumping the logs) of the container. - """ - container = self._create_container(True, command, *args) - - def run_func(client, container): - dockerpty.start(client=client.api, container=container) - exit_code = client.api.inspect_container(container)["State"]["ExitCode"] - logs = client.api.logs(container).decode("utf-8") - return exit_code, logs - - return self._run(container, run_func) - - def start(self, command: str, *arguments) -> int: - """Run command with the given arguments in an interactive container. - Returns execution exit code. - """ - return self._start(command, *arguments) - - def _exec(self, command: str, *args) -> Tuple[int, str]: - """Create a non interactive container and execute command with the given arguments. - Returns execution exit code and output. - """ - container = self._create_container(False, command, *args) - - def run_func(client, container): - client.api.start(container) - exit_code = client.api.wait(container)["StatusCode"] - output = client.api.logs(container).decode("utf-8") - return exit_code, output - - return self._run(container, run_func) - - def exec(self, command: str, *arguments) -> Tuple[int, str]: - """Execute command with the given arguments in a container. - Returns execution exit code and output. - """ - return self._exec(command, *arguments) - - def docker_logs(self, container): - return self.client.api.logs(container).decode("utf-8") - - -class SSOContainer(LeverageContainer): - # SSO scripts - AWS_SSO_LOGIN_SCRIPT = "/home/leverage/scripts/aws-sso/aws-sso-login.sh" - AWS_SSO_LOGOUT_SCRIPT = "/home/leverage/scripts/aws-sso/aws-sso-logout.sh" - - # SSO constants - AWS_SSO_LOGIN_URL = "{sso_url}/#/device?user_code={user_code}" - AWS_SSO_CODE_WAIT_SECONDS = 2 - AWS_SSO_CODE_ATTEMPTS = 10 - FALLBACK_LINK_MSG = "Opening the browser... if it fails, open this link in your browser:\n{link}" - - def get_sso_access_token(self): - with open(self.paths.sso_token_file) as token_file: - return json.loads(token_file.read())["accessToken"] - - @property - def sso_region_from_main_profile(self): - """ - Same than AWSCLIContainer.get_sso_region but without using a container. - """ - conf = ConfigUpdater() - conf.read(self.paths.host_aws_profiles_file) - return conf.get(f"profile {self.project}-sso", "sso_region").value - - def get_sso_code(self, container) -> str: - """ - Find and return the SSO user code by periodically checking the logs. - Up until N attempts. - """ - logger.info("Fetching SSO code...") - for _ in range(self.AWS_SSO_CODE_ATTEMPTS): - # pull logs periodically until we find our SSO code - logs = self.docker_logs(container) - if "Then enter the code:" in logs: - return logs.split("Then enter the code:")[1].split("\n")[2] - else: - logger.debug(logs) - sleep(self.AWS_SSO_CODE_WAIT_SECONDS) - - raise ExitError(1, "Get SSO code timed-out") - - def get_sso_region(self): - # TODO: what about using the .region property we have now? that takes the value from the path of the layer - _, region = self.exec(f"configure get sso_region --profile {self.project}-sso") - return region - - def sso_login(self) -> int: - region = self.get_sso_region() - - with CustomEntryPoint(self, "sh -c"): - container = self._create_container(False, command=self.AWS_SSO_LOGIN_SCRIPT) - - with ContainerSession(self.client, container): - # once inside this block, the SSO_LOGIN_SCRIPT is being executed in the "background" - # now let's grab the user code from the logs - user_code = self.get_sso_code(container) - # with the user code, we can now autocomplete the url - link = self.AWS_SSO_LOGIN_URL.format(sso_url=self.paths.common_conf["sso_start_url"], user_code=user_code) - webbrowser.open_new_tab(link) - # The SSO code is only valid once: if the browser was able to open it, the fallback link will be invalid - logger.info(self.FALLBACK_LINK_MSG.format(link=link)) - # now let's wait until the command locking the container resolve itself: - # aws sso login will wait for the user code - # once submitted to the browser, the authentication finish and the lock is released - exit_code = self.client.api.wait(container)["StatusCode"] - raw_logger.info(self.docker_logs(container)) - - return exit_code - - -class AWSCLIContainer(SSOContainer): - """Leverage Container specially tailored to run AWS CLI commands.""" - - AWS_CLI_BINARY = "/usr/local/bin/aws" - - def __init__(self, client): - super().__init__(client) - - self.environment = { - "COMMON_CONFIG_FILE": self.paths.common_tfvars, - "ACCOUNT_CONFIG_FILE": self.paths.account_tfvars, - "BACKEND_CONFIG_FILE": self.paths.backend_tfvars, - "AWS_SHARED_CREDENTIALS_FILE": f"{self.paths.guest_aws_credentials_dir}/credentials", - "AWS_CONFIG_FILE": f"{self.paths.guest_aws_credentials_dir}/config", - "SSO_CACHE_DIR": f"{self.paths.guest_aws_credentials_dir}/sso/cache", - "SCRIPT_LOG_LEVEL": get_script_log_level(), - } - self.entrypoint = self.AWS_CLI_BINARY - self.mounts = [ - Mount(source=self.paths.root_dir.as_posix(), target=self.paths.guest_base_path, type="bind"), - Mount( - source=self.paths.host_aws_credentials_dir.as_posix(), - target=self.paths.guest_aws_credentials_dir, - type="bind", - ), - ] - - logger.debug(f"[bold cyan]Container configuration:[/bold cyan]\n{json.dumps(self.container_config, indent=2)}") - - def start(self, command, profile=""): - args = [] if not profile else ["--profile", profile] - return self._start(command, *args) - - # FIXME: we have a context manager for this now, remove this method later! - def system_start(self, command): - """Momentarily override the container's default entrypoint. To run arbitrary commands and not only AWS CLI ones.""" - self.entrypoint = "" - exit_code = self._start(command) - self.entrypoint = self.AWS_CLI_BINARY - return exit_code - - def exec(self, command, profile=""): - args = [] if not profile else ["--profile", profile] - return self._exec(command, *args) - - # FIXME: we have a context manager for this now, remove this method later! - def system_exec(self, command): - """Momentarily override the container's default entrypoint. To run arbitrary commands and not only AWS CLI ones.""" - self.entrypoint = "" - exit_code, output = self._exec(command) - - self.entrypoint = self.AWS_CLI_BINARY - return exit_code, output - - -class TFContainer(SSOContainer): - """Leverage container specifically tailored to run Terraform/OpenTofu commands. - It handles authentication and some checks regarding where the command is being executed.""" - - TERRAFORM_BINARY = "/bin/terraform" - TOFU_BINARY = "/bin/tofu" - - TF_MFA_ENTRYPOINT = "/home/leverage/scripts/aws-mfa/aws-mfa-entrypoint.sh" - - def __init__(self, client, terraform=False, mounts=None, env_vars=None): - super().__init__(client, mounts=mounts, env_vars=env_vars) - - self.paths.assert_running_leverage_project() - - # Set authentication methods - self.sso_enabled = self.paths.common_conf.get("sso_enabled", False) - self.mfa_enabled = ( - self.env_conf.get("MFA_ENABLED", "false") == "true" - ) # TODO: Convert values to bool upon loading - - # SSH AGENT - SSH_AUTH_SOCK = os.getenv("SSH_AUTH_SOCK") - - # make sure .gitconfig exists before mounting it - self.paths.host_git_config_file.touch(exist_ok=True) - - self.environment.update( - { - "COMMON_CONFIG_FILE": self.paths.common_tfvars, - "ACCOUNT_CONFIG_FILE": self.paths.account_tfvars, - "BACKEND_CONFIG_FILE": self.paths.backend_tfvars, - "AWS_SHARED_CREDENTIALS_FILE": f"{self.paths.guest_aws_credentials_dir}/credentials", - "AWS_CONFIG_FILE": f"{self.paths.guest_aws_credentials_dir}/config", - "SRC_AWS_SHARED_CREDENTIALS_FILE": f"{self.paths.guest_aws_credentials_dir}/credentials", # Legacy? - "SRC_AWS_CONFIG_FILE": f"{self.paths.guest_aws_credentials_dir}/config", # Legacy? - "AWS_CACHE_DIR": f"{self.paths.guest_aws_credentials_dir}/cache", - "SSO_CACHE_DIR": f"{self.paths.guest_aws_credentials_dir}/sso/cache", - "SCRIPT_LOG_LEVEL": get_script_log_level(), - "MFA_SCRIPT_LOG_LEVEL": get_script_log_level(), # Legacy - "SSH_AUTH_SOCK": "" if SSH_AUTH_SOCK is None else "/ssh-agent", - } - ) - self.entrypoint = self.TERRAFORM_BINARY if terraform else self.TOFU_BINARY - extra_mounts = [ - Mount(source=self.paths.root_dir.as_posix(), target=self.paths.guest_base_path, type="bind"), - Mount( - source=self.paths.host_aws_credentials_dir.as_posix(), - target=self.paths.guest_aws_credentials_dir, - type="bind", - ), - Mount(source=self.paths.host_git_config_file.as_posix(), target="/etc/gitconfig", type="bind"), - ] - self.mounts.extend(extra_mounts) - # if you have set the tf plugin cache locally - if self.paths.tf_cache_dir: - # then mount it too into the container - self.environment["TF_PLUGIN_CACHE_DIR"] = self.paths.tf_cache_dir - # given that opentofu/terraform uses symlinks to point from the .terraform folder - # into the plugin folder we need to use the same directory inside the container - # otherwise symlinks will be broken once outside the container - # which will break opentofu/terraform usage outside Leverage - self.mounts.append(Mount(source=self.paths.tf_cache_dir, target=self.paths.tf_cache_dir, type="bind")) - if SSH_AUTH_SOCK is not None: - self.mounts.append(Mount(source=SSH_AUTH_SOCK, target="/ssh-agent", type="bind")) - - self._backend_key = None - - logger.debug(f"[bold cyan]Container configuration:[/bold cyan]\n{json.dumps(self.container_config, indent=2)}") - - def auth_method(self) -> str: - """ - Return the expected auth method based on the SSO or MFA flags. - - In the case of MFA, we also need to tweak some env variables for AWS credentials. - Once you are done with authentication, remember to revert the env var changes. - """ - if self.sso_enabled: - self._check_sso_token() - # sso credentials needs to be refreshed right before we execute our command on the container - refresh_layer_credentials(self) - elif self.mfa_enabled: - self.environment.update( - { - "AWS_SHARED_CREDENTIALS_FILE": self.environment["AWS_SHARED_CREDENTIALS_FILE"].replace( - "tmp", ".aws" - ), - "AWS_CONFIG_FILE": self.environment["AWS_CONFIG_FILE"].replace("tmp", ".aws"), - } - ) - return f"{self.TF_MFA_ENTRYPOINT} -- " - - return "" - - @property - def tf_default_args(self): - """Array of strings containing all valid config files for layer as parameters for OpenTofu/Terraform""" - common_config_files = [ - f"-var-file={self.paths.guest_config_file(common_file)}" - for common_file in self.paths.common_config_dir.glob("*.tfvars") - ] - account_config_files = [ - f"-var-file={self.paths.guest_config_file(account_file)}" - for account_file in self.paths.account_config_dir.glob("*.tfvars") - ] - return common_config_files + account_config_files - - def enable_mfa(self): - """Enable Multi-Factor Authentication.""" - self.mfa_enabled = True - - def enable_sso(self): - """Enable Single Sign-On Authentication.""" - self.sso_enabled = True - - def disable_authentication(self): - """Disable all authentication.""" - self.mfa_enabled = False - self.sso_enabled = False - - def _check_sso_token(self): - """Check for the existence and validity of the SSO token to be used to get credentials.""" - - # Adding `token` file name to this function in order to - # meet the requirement regarding to have just one - # token file in the sso/cache - sso_role = self.paths.account_conf.get("sso_role") - token_file = self.paths.sso_cache / sso_role - - token_files = list(self.paths.sso_cache.glob("*")) - if not token_files: - logger.error("No AWS SSO token found. Please log in or configure SSO.") - raise Exit(1) - - if token_file not in token_files: - sso_role = "token" - token_file = self.paths.sso_cache / sso_role - if token_file not in token_files: - logger.error( - "No valid AWS SSO token found for current account.\n" - "Please log out and reconfigure SSO before proceeding." - ) - raise Exit(1) - - entrypoint = self.entrypoint - self.entrypoint = "" - - _, cached_token = self._exec(f"sh -c 'cat $SSO_CACHE_DIR/{sso_role}'") - token = json.loads(cached_token) - expiry = datetime.strptime(token.get("expiresAt"), "%Y-%m-%dT%H:%M:%SZ") - renewal = datetime.utcnow() - - if expiry < renewal: - logger.error( - "AWS SSO token has expired, please log back in by running [bold]leverage aws sso login[/bold]" - " to refresh your credentials before re-running the last command." - ) - raise Exit(1) - - self.entrypoint = entrypoint - - def refresh_credentials(self): - with AwsCredsEntryPoint(self, override_entrypoint=""): - if exit_code := self._start('echo "Done."'): - return exit_code - - def start(self, command, *arguments): - with AwsCredsEntryPoint(self, self.entrypoint): - return self._start(command, *arguments) - - def start_in_layer(self, command, *arguments): - """Run a command that can only be performed in layer level.""" - self.paths.check_for_layer_location() - - return self.start(command, *arguments) - - def exec(self, command, *arguments): - with AwsCredsEntryPoint(self): - return self._exec(command, *arguments) - - # FIXME: we have a context manager for this now, remove this method later! - def system_exec(self, command): - """Momentarily override the container's default entrypoint. To run arbitrary commands and not only AWS CLI ones.""" - original_entrypoint = self.entrypoint - self.entrypoint = "" - exit_code, output = self._exec(command) - - self.entrypoint = original_entrypoint - return exit_code, output - - def start_shell(self): - """Launch a shell in the container.""" - if self.mfa_enabled or self.sso_enabled: - self.paths.check_for_layer_location() - - with AwsCredsEntryPoint(self, override_entrypoint=""): - self._start(self.SHELL) - - def set_backend_key(self, skip_validation=False): - # Scenarios: - # - # scenario | s3 backend set | s3 key set | skip_validation | result - # 0 | false | false | false | fail - # 1 | false | false | true | ok - # 2 | true | false | false/true | set the key - # 3 | true | true | false/true | read the key - try: - config_tf_file = self.paths.cwd / "config.tf" - config_tf = hcl2.loads(config_tf_file.read_text()) if config_tf_file.exists() else {} - if ( - "terraform" in config_tf - and "backend" in config_tf["terraform"][0] - and "s3" in config_tf["terraform"][0]["backend"][0] - ): - if "key" in config_tf["terraform"][0]["backend"][0]["s3"]: - backend_key = config_tf["terraform"][0]["backend"][0]["s3"]["key"] - self._backend_key = backend_key - else: - self._backend_key = ( - f"{self.paths.cwd.relative_to(self.paths.root_dir).as_posix()}/terraform.tfstate".replace( - "/base-", "/" - ).replace("/tools-", "/") - ) - - in_container_file_path = ( - f"{self.paths.guest_base_path}/{config_tf_file.relative_to(self.paths.root_dir).as_posix()}" - ) - resp = self.system_exec( - "hcledit " - f"-f {in_container_file_path} -u" - f' attribute append terraform.backend.key "\\"{self._backend_key}\\""' - ) - else: - if not skip_validation: - raise KeyError() - except (KeyError, IndexError): - logger.error( - "[red]✘[/red] Malformed [bold]config.tf[/bold] file. Missing backend block. In some cases you may want to skip this check by using the --skip-validation flag, e.g. the first time you initialize a tf-backend layer." - ) - raise Exit(1) - except Exception as e: - logger.error("[red]✘[/red] Malformed [bold]config.tf[/bold] file. Unable to parse.") - logger.debug(e) - raise Exit(1) - - @property - def backend_key(self): - return self._backend_key - - @backend_key.setter - def backend_key(self, backend_key): - self._backend_key = backend_key - - -class TFautomvContainer(TFContainer): - """Leverage Container tailored to run general commands.""" - - TFAUTOMV_CLI_BINARY = "/usr/local/bin/tfautomv" - - def __init__(self, client): - super().__init__(client) - - self.environment["TF_CLI_ARGS_init"] = " ".join(self.tf_default_args) - self.environment["TF_CLI_ARGS_plan"] = " ".join(self.tf_default_args) - - self.entrypoint = self.TFAUTOMV_CLI_BINARY - - logger.debug(f"[bold cyan]Container configuration:[/bold cyan]\n{json.dumps(self.container_config, indent=2)}") - - def start(self, *arguments): - with AwsCredsEntryPoint(self): - return self._start("", *arguments) - - def start_in_layer(self, *arguments): - """Run a command that can only be performed in layer level.""" - self.paths.check_for_layer_location() - - return self.start(*arguments) - - def exec(self, command, *arguments): - with AwsCredsEntryPoint(self): - return self._exec(command, *arguments) diff --git a/leverage/containers/kubectl.py b/leverage/containers/kubectl.py deleted file mode 100644 index 8a21a8b..0000000 --- a/leverage/containers/kubectl.py +++ /dev/null @@ -1,145 +0,0 @@ -import os -from dataclasses import dataclass -from enum import Enum -from pathlib import Path - -from click.exceptions import Exit -from docker.types import Mount -import ruamel.yaml -import simple_term_menu - -from leverage import logger -from leverage._utils import AwsCredsEntryPoint, ExitError, CustomEntryPoint -from leverage.container import TFContainer - - -@dataclass -class ClusterInfo: - cluster_name: str - profile: str - region: str - - -class MetadataTypes(Enum): - K8S_CLUSTER = "k8s-eks-cluster" - - -class KubeCtlContainer(TFContainer): - """Container specifically tailored to run kubectl commands.""" - - KUBECTL_CLI_BINARY = "/usr/local/bin/kubectl" - KUBECTL_CONFIG_PATH = Path(f"/home/{TFContainer.CONTAINER_USER}/.kube") - KUBECTL_CONFIG_FILE = KUBECTL_CONFIG_PATH / Path("config") - METADATA_FILENAME = "metadata.yaml" - - def __init__(self, client): - super().__init__(client) - - self.entrypoint = self.KUBECTL_CLI_BINARY - - self.host_kubectl_config_dir = Path.home() / Path(f".kube/{self.project}") - if not self.host_kubectl_config_dir.exists(): - # make sure the folder exists before mounting it - self.host_kubectl_config_dir.mkdir(parents=True) - - self.container_config["host_config"]["Mounts"].append( - # the container is expecting a file named "config" here - Mount( - source=str(self.host_kubectl_config_dir), - target=str(self.KUBECTL_CONFIG_PATH), - type="bind", - ) - ) - - def start_shell(self): - with AwsCredsEntryPoint(self, override_entrypoint=""): - self._start(self.SHELL) - - def configure(self, ci: ClusterInfo = None): - """ - Add the given EKS cluster configuration to the .kube/ files. - """ - if ci: - # if you have the details, generate the command right away - cmd = f"aws eks update-kubeconfig --region {ci.region} --name {ci.cluster_name} --profile {ci.profile}" - else: - # otherwise go get them from the layer - logger.info("Retrieving k8s cluster information...") - with CustomEntryPoint(self, entrypoint=""): - cmd = self._get_eks_kube_config() - - logger.info("Configuring context...") - with AwsCredsEntryPoint(self, override_entrypoint=""): - exit_code = self._start(cmd) - - if exit_code: - raise Exit(exit_code) - - logger.info("Done.") - - def _get_eks_kube_config(self) -> str: - tf_binary = self.TOFU_BINARY if "tofu" in self.image_tag else self.TERRAFORM_BINARY - exit_code, output = self._start_with_output(f"{tf_binary} output -no-color") # TODO: override on CM? - if exit_code: - raise ExitError(exit_code, output) - - aws_eks_cmd = next(op for op in output.split("\r\n") if op.startswith("aws eks update-kubeconfig")) - return aws_eks_cmd + f" --region {self.region}" - - def _scan_clusters(self): - """ - Scan all the subdirectories in search of "cluster" metadata files. - """ - for root, dirs, files in os.walk(self.paths.cwd): - # exclude hidden directories - dirs[:] = [d for d in dirs if d[0] != "."] - - for file in files: - if file != self.METADATA_FILENAME: - continue - - cluster_file = Path(root) / file - try: - with open(cluster_file) as cluster_yaml_file: - data = ruamel.yaml.safe_load(cluster_yaml_file) - if data.get("type") != MetadataTypes.K8S_CLUSTER.value: - continue - except Exception as exc: - logger.warning(exc) - continue - else: - yield Path(root), data - - def discover(self): - """ - Do a scan down the tree of subdirectories looking for k8s clusters metadata files. - Open up a menu with all the found items, where you can pick up and configure it on your .kubeconfig file. - """ - cluster_files = [(path, data) for path, data in self._scan_clusters()] - if not cluster_files: - raise ExitError(1, "No clusters found.") - - terminal_menu = simple_term_menu.TerminalMenu( - [f"{c[1]['data']['cluster_name']}: {str(c[0])}" for c in cluster_files], title="Clusters found:" - ) - menu_entry_index = terminal_menu.show() - if menu_entry_index is None: - # selection cancelled - return - - layer_path = cluster_files[menu_entry_index][0] - cluster_data = cluster_files[menu_entry_index][1] - cluster_info = ClusterInfo( - cluster_name=cluster_data["data"]["cluster_name"], - profile=cluster_data["data"]["profile"], - region=cluster_data["data"]["region"], - ) - - # cluster is the host path, so in order to be able to run commands in that layer - # we need to convert it into a relative inside the container - self.container_config["working_dir"] = ( - self.paths.guest_base_path / layer_path.relative_to(self.paths.cwd) - ).as_posix() - # now simulate we are standing on the chosen layer folder - self.paths.update_cwd(layer_path) - self.configure(cluster_info) diff --git a/leverage/modules/tf.py b/leverage/modules/tf.py index c380fe3..7439a3f 100644 --- a/leverage/modules/tf.py +++ b/leverage/modules/tf.py @@ -1,6 +1,6 @@ import re from pathlib import Path -from typing import Sequence, List +from typing import Sequence import click from click.exceptions import Exit diff --git a/leverage/path.py b/leverage/path.py index b5cd0bd..1cbac61 100644 --- a/leverage/path.py +++ b/leverage/path.py @@ -2,8 +2,6 @@ Utilities to obtain relevant files' and directories' locations """ -import os -import pathlib from pathlib import Path from subprocess import CalledProcessError from subprocess import PIPE From 849e4c1d19f452ff729fb80f977187bc3c8d6bf1 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 28 Nov 2025 14:58:00 -0300 Subject: [PATCH 31/46] Tests cleanup WIP --- tests/test_backend_config.py | 1 - tests/test_conf.py | 32 ------------------------------- tests/test_containers/__init__.py | 13 ------------- tests/test_containers/conftest.py | 10 ---------- tests/test_modules/test_tf.py | 17 ---------------- 5 files changed, 73 deletions(-) delete mode 100644 tests/test_containers/conftest.py diff --git a/tests/test_backend_config.py b/tests/test_backend_config.py index a1ca13c..9c65db8 100644 --- a/tests/test_backend_config.py +++ b/tests/test_backend_config.py @@ -1,7 +1,6 @@ """Tests for backend configuration utilities.""" import pytest -from pathlib import Path from leverage._backend_config import set_backend_key, get_backend_key from leverage._utils import ExitError diff --git a/tests/test_conf.py b/tests/test_conf.py index c8f783e..2d3b85c 100644 --- a/tests/test_conf.py +++ b/tests/test_conf.py @@ -1,7 +1,4 @@ -from unittest import mock - import pytest -from click.testing import CliRunner from leverage import leverage from leverage.conf import load @@ -55,32 +52,3 @@ def test_load_config(monkeypatch, click_context, tmp_path, write_files, expected loaded_values = load() assert dict(loaded_values) == expected_values - - -@pytest.mark.parametrize( - "tofu, conf", - [ - (True, {"TERRAFORM_IMAGE_TAG": "1.1.1-tofu-2.2.2"}), - (False, {"TERRAFORM_IMAGE_TAG": "1.1.1-2.2.2"}), - ], -) -def test_version_validation(tofu, conf): - """ - Test that we get a warning if we are working with a version lower than the required by the project. - """ - runner = CliRunner() - with ( - mock.patch("leverage.conf.load", return_value=conf), - mock.patch.dict("leverage.MINIMUM_VERSIONS", {"TERRAFORM": "3.3.3", "TOOLBOX": "4.4.4"}), - ): - result = runner.invoke(leverage) - - if not tofu: - assert ( - "Your current TERRAFORM version (1.1.1) is lower than the required minimum (3.3.3)" - in result.output.replace("\n", "") - ) - assert "Your current TOOLBOX version (2.2.2) is lower than the required minimum (4.4.4)" in result.output.replace( - "\n", "" - ) - assert result.exit_code == 0 diff --git a/tests/test_containers/__init__.py b/tests/test_containers/__init__.py index 4d7af7d..d82826a 100644 --- a/tests/test_containers/__init__.py +++ b/tests/test_containers/__init__.py @@ -7,16 +7,3 @@ "SecurityOpt": ["label:disable"], "Mounts": [], } - - -def container_fixture_factory(container_class, *args, **kwargs): - """ - Given a container class, return an instance of it with patched working variables. - """ - mocked_client = MagicMock() - mocked_client.api.create_host_config.return_value = FAKE_HOST_CONFIG - with patch("leverage.container.load_env", return_value=FAKE_ENV): - container = container_class(mocked_client, *args, **kwargs) - container._run = Mock() - container._check_sso_token = Mock() - return container diff --git a/tests/test_containers/conftest.py b/tests/test_containers/conftest.py deleted file mode 100644 index 30d0e6c..0000000 --- a/tests/test_containers/conftest.py +++ /dev/null @@ -1,10 +0,0 @@ -from unittest.mock import patch, Mock - -import pytest - - -@pytest.fixture -def fake_os_user(): - with patch("os.getuid", Mock(return_value=1234)): - with patch("os.getgid", Mock(return_value=5678)): - yield diff --git a/tests/test_modules/test_tf.py b/tests/test_modules/test_tf.py index 18293a2..bc68e15 100644 --- a/tests/test_modules/test_tf.py +++ b/tests/test_modules/test_tf.py @@ -4,25 +4,8 @@ from click import get_current_context from leverage._internals import State -from leverage.container import TFContainer from leverage.modules.tf import _init from leverage.modules.tf import has_a_plan_file -from tests.test_containers import container_fixture_factory - - -@pytest.fixture -def tf_container(muted_click_context): - tf_container = container_fixture_factory(TFContainer) - - # this is required because of the @pass_container decorator - ctx = get_current_context() - state = State() - state.container = tf_container - ctx.obj = state - - # assume we are on a valid location - with patch.object(tf_container.paths, "check_for_layer_location", Mock()): - yield tf_container @pytest.mark.parametrize( From ecaca0ded92b8dc6db14d9173063322aaeaa7ca6 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 28 Nov 2025 15:05:14 -0300 Subject: [PATCH 32/46] Update CLAUDE.md --- CLAUDE.md | 245 +++++++++++++++++++++--------------------------------- 1 file changed, 94 insertions(+), 151 deletions(-) diff --git a/CLAUDE.md b/CLAUDE.md index 21846ad..342e559 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -34,22 +34,24 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co ## Architecture -Leverage CLI is a Python-based command-line tool for managing Binbash Leverage projects. It uses a dockerized approach to encapsulate infrastructure tools. +Leverage CLI is a Python-based command-line tool for managing Binbash Leverage projects. It uses host-based execution to run infrastructure tools directly on the system. ### Core Structure - `leverage/leverage.py` - Main CLI entry point using Click framework - `leverage/modules/` - Command modules (aws, terraform, kubectl, etc.) -- `leverage/container.py` - Docker container management and execution +- `leverage/modules/runner.py` - Generic binary runner base class +- `leverage/modules/tfrunner.py` - Terraform/OpenTofu-specific runner - `leverage/conf.py` - Configuration loading from build.env files - `leverage/tasks.py` - Task system for build scripts - `leverage/path.py` - Path utilities and git repository handling ### Key Components - **Module System**: Commands are organized in modules under `leverage/modules/` -- **Container Integration**: Heavy use of Docker containers for tool execution +- **Host-Based Execution**: Direct execution of system binaries (terraform, tofu, kubectl, etc.) +- **Runner Architecture**: Generic Runner class with specialized subclasses (TFRunner) - **Configuration Management**: Hierarchical loading of build.env files - **Task System**: Decorator-based task definition system for build scripts -- **AWS Integration**: Extensive AWS credential and service management +- **AWS Integration**: Extensive AWS credential and service management via SSO/MFA ### Command Structure The CLI follows this pattern: @@ -69,98 +71,16 @@ Key modules include: ### Version Management - Supports Python 3.9-3.13 - Version defined in `leverage/__init__.py` -- Minimum tool versions enforced via `MINIMUM_VERSIONS` -- Docker image versioning through `__toolbox_version__` +- Binary version validation on initialization (for TFRunner) ### Configuration - Uses `build.env` files for project configuration - Hierarchical loading from project root to current directory - Environment-specific overrides supported -## Docker Container Architecture for Terraform/OpenTofu +## Execution Architecture -The CLI uses a containerized approach for all Terraform/OpenTofu operations to ensure consistent tool versions and isolated execution environments. - -### Container Classes - -#### TFContainer (`leverage/container.py:436-687`) -Primary container for Terraform/OpenTofu execution: -- **Image**: `binbash/leverage-toolbox` with user-specific permissions -- **Binaries**: `/bin/terraform` (when `terraform=True`) or `/bin/tofu` (default) -- **Mount Points**: - - Project root → `/leverage` (guest base path) - - AWS credentials directory → `/tmp/.aws` - - Git config file → `/etc/gitconfig` - - Optional: TF plugin cache directory (maintains symlinks) - - Optional: SSH agent socket → `/ssh-agent` - -#### TFautomvContainer (`leverage/container.py:689-717`) -Extends TFContainer for TFAutomv operations: -- **Binary**: `/usr/local/bin/tfautomv` -- Inherits all TFContainer mounts and configuration - -### Configuration File Management - -#### Environment Variables in Containers: -- `COMMON_CONFIG_FILE` → `common.tfvars` -- `ACCOUNT_CONFIG_FILE` → `account.tfvars` -- `BACKEND_CONFIG_FILE` → `backend.tfvars` -- `AWS_SHARED_CREDENTIALS_FILE` → `/tmp/.aws/credentials` -- `AWS_CONFIG_FILE` → `/tmp/.aws/config` -- `SSO_CACHE_DIR` → `/tmp/.aws/sso/cache` - -#### Terraform Variable Files: -The `tf_default_args` property automatically includes: -- All `*.tfvars` files from `common/` directory -- All `*.tfvars` files from account-specific directory - -### Docker Execution Points - -#### Terraform/OpenTofu Commands (`leverage/modules/tf.py`) -- Container creation for `tofu` and `terraform` commands (lines 38, 56) -- Command execution via `tf.start()` for all operations -- **Supported Commands**: `init`, `plan`, `apply`, `destroy`, `output`, `version`, `shell`, `format`, `validate`, `import`, `refresh-credentials` - -#### TFAutomv Commands (`leverage/modules/tfautomv.py`) -- Container creation for `tfautomv` commands (line 24) -- Command execution via `tf.start_in_layer()` (line 36) - -### Container Lifecycle - -1. **Image Verification**: `ensure_image()` builds local image with user permissions -2. **Container Creation**: `_create_container()` with mounted volumes and environment -3. **Authentication Setup**: SSO token validation or MFA credential handling -4. **Command Execution**: Interactive (`_start()`) or silent (`_exec()`) -5. **Cleanup**: Automatic container stop and removal - -### Authentication & Credentials - -#### SSO Authentication: -- Token validation before container execution -- Automatic credential refresh via `refresh_layer_credentials()` -- Browser-based authentication flow with user code - -#### MFA Authentication: -- Script-based authentication via `aws-mfa-entrypoint.sh` -- Environment variable adjustments for credential paths - -#### Credential Mounting: -- Host AWS credentials directory mounted to container -- Separate credential files for different authentication methods - -### Backend Configuration Management - -#### S3 Backend Handling: -- Automatic `backend.tfvars` parameter injection for `init` commands -- Dynamic state key generation based on layer path structure -- Backend block validation in `config.tf` files -- Support for legacy naming conventions (tf- vs terraform-) - -**IMPORTANT**: As of the latest update, Leverage CLI now uses **host-based execution** instead of Docker containers: - -## Host-Based Execution Architecture - -The CLI has been updated to use host-based execution for improved performance and flexibility while maintaining all functionality. +The CLI executes infrastructure tools directly on the host system using the Runner architecture. ### Core Runner Classes @@ -179,55 +99,74 @@ Generic command runner base class: Terraform/OpenTofu-specific runner extending Runner: - **Binaries**: Uses system-installed `terraform` or `tofu` binaries - **Configuration**: Accepts `terraform=True` for Terraform, defaults to OpenTofu +- **Binary Validation**: Validates binary type by checking `--version` output + - Ensures `tofu` binary is actually OpenTofu (not Terraform) + - Ensures `terraform` binary is actually Terraform (not OpenTofu) - **Error Messages**: Provides installation URLs when binaries are not found - Terraform: https://developer.hashicorp.com/terraform/install - OpenTofu: https://opentofu.org/docs/intro/install/ - **Environment Variables**: Initialized with AWS credential file paths via `env_vars` parameter -- **No Containers**: Direct binary execution on host system - -### Command Flow Architecture - -#### Terraform/OpenTofu Command Flow (`leverage/modules/tf.py`) - -1. **CLI Entry Points**: - - `@click.group() tofu()` (lines 22-35) - Creates TFRunner with OpenTofu binary - - `@click.group() terraform()` (lines 38-51) - Creates TFRunner with Terraform binary - - Both set up credential environment variables for AWS config and credentials files - -2. **Command Decoration**: - - `@pass_runner` - Injects TFRunner instance from Click context - - `@pass_paths` - Injects PathsHandler instance for file/directory management - -3. **Supported Commands**: - - `init` - Layer initialization with backend configuration injection - - `plan` - Execution plan generation with auto-discovered tfvars - - `apply` - Infrastructure changes with conditional tfvars injection - - `destroy` - Infrastructure destruction - - `output` - Output variable display - - `version` - Binary version display - - `format` - Code formatting (recursive by default) - - `force-unlock` - State file lock removal - - `validate` - Configuration validation - - `validate-layout` - Leverage convention validation - - `import` - Resource import - - `refresh-credentials` - AWS credential refresh - -4. **Multi-Layer Support**: - - `--layers` option for operating on multiple layers from account directory - - Layer validation and backend key management via `invoke_for_all_commands()` - - Automatic backend key generation based on layer path structure - -### Authentication Management - -#### SSO Authentication (`leverage/modules/auth.py`) - -**Token Validation** (`check_sso_token()` - lines 98-127): -- Validates SSO token existence in cache directory + +### Command Modules + +#### Terraform/OpenTofu Commands (`leverage/modules/tf.py`) + +**CLI Entry Points**: +- `tofu` - Creates TFRunner with OpenTofu binary (`tofu`) +- `terraform` - Creates TFRunner with Terraform binary (`terraform`) +- Both set up credential environment variables for AWS config and credentials files + +**Command Decorators**: +- `@pass_runner` - Injects TFRunner instance from Click context +- `@pass_paths` - Injects PathsHandler instance for file/directory management + +**Supported Commands**: +- `init` - Layer initialization with backend configuration injection +- `plan` - Execution plan generation with auto-discovered tfvars +- `apply` - Infrastructure changes with conditional tfvars injection +- `destroy` - Infrastructure destruction +- `output` - Output variable display +- `version` - Binary version display +- `format` - Code formatting (recursive by default) +- `force-unlock` - State file lock removal +- `validate` - Configuration validation +- `validate-layout` - Leverage convention validation +- `import` - Resource import +- `refresh-credentials` - AWS credential refresh + +**Multi-Layer Support**: +- `--layers` option for operating on multiple layers from account directory +- Layer validation and backend key management via `invoke_for_all_commands()` +- Automatic backend key generation based on layer path structure + +#### Kubectl Commands (`leverage/modules/kubectl.py`) + +Uses generic Runner class to execute `kubectl` binary: +- **Binary**: System-installed `kubectl` +- **Configuration**: Sets KUBECONFIG environment variable to project-specific path +- **AWS Integration**: Configures kubectl contexts for EKS clusters +- **Commands**: + - `configure` - Add EKS cluster from current layer to kubectl config + - `discover` - Scan for cluster metadata files and configure selected cluster +- All other kubectl commands pass through to the binary + +#### TFAutomv Commands (`leverage/modules/tfautomv.py`) + +Uses generic Runner class to execute `tfautomv` binary: +- **Binary**: System-installed `tfautomv` +- **Configuration**: Passes terraform binary path via `--terraform-bin` flag +- **Integration**: Uses same tfvars discovery as Terraform/OpenTofu commands + +### Authentication Management (`leverage/modules/auth.py`) + +#### SSO Authentication + +**Token Validation** (`check_sso_token()`): +- Validates SSO token existence in cache directory (`~/.aws/sso/cache/`) - Checks token expiration against current time - Provides clear error messages for missing or expired tokens -- Token file location: `~/.aws/sso/cache/` -**Credential Refresh** (`refresh_layer_credentials()` - lines 130-204): +**Credential Refresh** (`refresh_layer_credentials()`): - Parses Terraform files to discover required AWS profiles - Uses boto3 SSO client to retrieve temporary credentials - Updates AWS config file with credential expiration timestamps @@ -235,28 +174,28 @@ Terraform/OpenTofu-specific runner extending Runner: - Implements 30-minute early renewal to avoid mid-operation expiration - Supports cross-account profile resolution -**Profile Discovery** (`get_profiles()` - lines 68-88): +**Profile Discovery** (`get_profiles()`): - Scans `config.tf`, `locals.tf`, `runtime.tf` for profile references - Extracts profile variables from Terraform configurations - Reads backend profile from `backend.tfvars` ### Configuration Management -#### Automatic tfvars Discovery (`tf_default_args()` - lines 133-154): +#### Automatic tfvars Discovery (`tf_default_args()`): - Discovers all `*.tfvars` files in `common/` directory - Discovers all `*.tfvars` files in account-specific directory - Returns as `-var-file=` arguments for Terraform/OpenTofu - Used automatically in plan, destroy, validate, and conditionally in apply #### Backend Configuration: -- Backend config file path injected during `init` command (line 336) -- Automatic backend key generation in `invoke_for_all_commands()` (lines 291-294) -- Backend key validation in `validate_layout()` (lines 538-550) +- Backend config file path injected during `init` command +- Automatic backend key generation in `invoke_for_all_commands()` +- Backend key validation in `validate_layout()` - Support for legacy naming conventions (tf- vs terraform-, base- vs tools-) ### Execution Flow -**Standard Command Execution**: +#### Standard Command Execution 1. User runs `leverage tofu|terraform [args]` 2. Click creates TFRunner instance with credential environment variables 3. Command function decorated with `@pass_runner` and `@pass_paths` @@ -269,26 +208,30 @@ Terraform/OpenTofu-specific runner extending Runner: - User-provided arguments 7. Exit code returned to CLI -**Multi-Layer Execution**: +#### Multi-Layer Execution 1. User runs command with `--layers layer1,layer2` from account directory 2. `invoke_for_all_commands()` validates all layers 3. Backend keys generated/validated for each layer 4. Command executed sequentially for each layer with layer-specific working directory -### Benefits of Host-Based Execution +### System Requirements -- **Performance**: No container startup overhead or image building -- **Flexibility**: Use any installed tool version (including custom builds) -- **IDE Integration**: Better debugging and tooling support -- **Simplicity**: Direct binary execution with standard environment variables -- **Plugin Compatibility**: Native Terraform/OpenTofu plugin caching -- **Development Speed**: Faster iteration during development +For full functionality, ensure the following binaries are installed and available in PATH: -### Host Requirements +**Required**: +- `terraform` or `tofu` - For Terraform/OpenTofu operations +- `aws` - AWS CLI for SSO authentication (via boto3) +- Python 3.9-3.13 -For full functionality, ensure the following binaries are installed and available in PATH: -- `terraform` or `tofu` (for Terraform/OpenTofu operations) -- `aws` CLI (for SSO authentication via boto3) +**Optional**: +- `kubectl` - For Kubernetes operations +- `tfautomv` - For TFAutomv operations + +### Benefits of Current Architecture -Optional binaries: -- `tfautomv` (for TFAutomv operations) \ No newline at end of file +- **Performance**: Direct binary execution without overhead +- **Flexibility**: Use any installed tool version (including custom builds) +- **IDE Integration**: Better debugging and tooling support +- **Simplicity**: Standard environment variables and execution +- **Plugin Compatibility**: Native Terraform/OpenTofu plugin caching +- **Development Speed**: Faster iteration during development \ No newline at end of file From b640b9e073ba7d2ec963c8ccc99bad50fbc87f3e Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 28 Nov 2025 15:08:53 -0300 Subject: [PATCH 33/46] Formatting --- leverage/_utils.py | 1 + leverage/modules/aws.py | 2 +- leverage/modules/kubectl.py | 8 ++++++-- leverage/modules/tfautomv.py | 3 +-- leverage/modules/utils.py | 6 +++++- tests/test_modules/test_tfrunner.py | 12 +++++++++--- 6 files changed, 23 insertions(+), 9 deletions(-) diff --git a/leverage/_utils.py b/leverage/_utils.py index a0232a3..f5d1b42 100644 --- a/leverage/_utils.py +++ b/leverage/_utils.py @@ -74,6 +74,7 @@ def __init__(self, exit_code: int, error_description: str): def show(self): logger.error(self.message) + def parse_tf_file(file: Path): """ Open and parse an HCL file. diff --git a/leverage/modules/aws.py b/leverage/modules/aws.py index 2ffa13a..7db8ad0 100644 --- a/leverage/modules/aws.py +++ b/leverage/modules/aws.py @@ -31,7 +31,7 @@ def refresh_aws_credentials(paths: PathsHandler) -> None: """ check_sso_token(paths) - try: # if we are not in a layer, we don't need to refresh the credentials + try: # if we are not in a layer, we don't need to refresh the credentials paths.check_for_layer_location() except ExitError: return diff --git a/leverage/modules/kubectl.py b/leverage/modules/kubectl.py index 947ae5d..b5940c6 100644 --- a/leverage/modules/kubectl.py +++ b/leverage/modules/kubectl.py @@ -41,13 +41,14 @@ def refresh_kubectl_credentials(paths: PathsHandler) -> None: """ check_sso_token(paths) - try: # if we are not in a layer, we don't need to refresh the credentials + try: # if we are not in a layer, we don't need to refresh the credentials paths.check_for_layer_location() except ExitError: return refresh_layer_credentials(paths) + @click.group(invoke_without_command=True, context_settings={"ignore_unknown_options": True}) @click.argument("args", nargs=-1, type=click.UNPROCESSED) @pass_state @@ -71,7 +72,9 @@ def kubectl(context, state, args): ), env_vars=credentials_env_vars, ) - _handle_subcommand(context=context, runner=state.runner, args=args, pre_invocation_callback=refresh_kubectl_credentials) + _handle_subcommand( + context=context, runner=state.runner, args=args, pre_invocation_callback=refresh_kubectl_credentials + ) def _configure(ci: ClusterInfo = None, layer_path: Path = None): @@ -94,6 +97,7 @@ def _configure(ci: ClusterInfo = None, layer_path: Path = None): logger.info("Done.") + @pass_paths def _get_eks_kube_config(paths: PathsHandler, layer_path: Path) -> str: # TODO: Get rid of this ugly workaround diff --git a/leverage/modules/tfautomv.py b/leverage/modules/tfautomv.py index 88a1cf4..7f0eb6e 100644 --- a/leverage/modules/tfautomv.py +++ b/leverage/modules/tfautomv.py @@ -11,8 +11,7 @@ @click.argument("args", nargs=-1) @pass_state def tfautomv(state, args): - """Run TFAutomv commands in the context of the current project.` - """ + """Run TFAutomv commands in the context of the current project.`""" tf_default_args_string = " ".join(tf_default_args()) credentials_env_vars = { "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), diff --git a/leverage/modules/utils.py b/leverage/modules/utils.py index fb9e39e..5e894a0 100644 --- a/leverage/modules/utils.py +++ b/leverage/modules/utils.py @@ -7,7 +7,11 @@ def _handle_subcommand( - context: Context, runner: Runner, args: Tuple[str, ...], caller_name: Optional[str] = None, pre_invocation_callback: Optional[Callable] = None + context: Context, + runner: Runner, + args: Tuple[str, ...], + caller_name: Optional[str] = None, + pre_invocation_callback: Optional[Callable] = None, ) -> None: """Decide if command corresponds to a wrapped one or not and run accordingly. diff --git a/tests/test_modules/test_tfrunner.py b/tests/test_modules/test_tfrunner.py index b7a1706..57d43e8 100644 --- a/tests/test_modules/test_tfrunner.py +++ b/tests/test_modules/test_tfrunner.py @@ -227,8 +227,8 @@ def test_run_interactive_false(mocker): mocker.patch("shutil.which", return_value="/usr/bin/tofu") mock_subprocess = mocker.patch("subprocess.run") # First call for --version, second for the actual command - version_output = type('obj', (object,), {'stdout': 'OpenTofu v1.6.0', 'returncode': 0})() - run_output = type('obj', (object,), {'stdout': 'terraform output', 'stderr': '', 'returncode': 0})() + version_output = type("obj", (object,), {"stdout": "OpenTofu v1.6.0", "returncode": 0})() + run_output = type("obj", (object,), {"stdout": "terraform output", "stderr": "", "returncode": 0})() mock_subprocess.side_effect = [version_output, run_output] runner = TFRunner(binary="") @@ -254,7 +254,13 @@ def test_run_with_multiple_args(mocker): assert result == 0 # Check the last call - assert mock_subprocess.call_args[0][0] == ["/usr/bin/terraform", "plan", "-var", "region=us-east-1", "-out=plan.tfplan"] + assert mock_subprocess.call_args[0][0] == [ + "/usr/bin/terraform", + "plan", + "-var", + "region=us-east-1", + "-out=plan.tfplan", + ] assert mock_subprocess.call_args[1]["env"] == os.environ.copy() assert mock_subprocess.call_args[1]["cwd"] is None From 985a101d40d076b66c76a28c31d354a76431865e Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Fri, 28 Nov 2025 15:29:04 -0300 Subject: [PATCH 34/46] Update pyproyect and lockfile --- poetry.lock | 1418 +++++++++++++++++++++++++----------------------- pyproject.toml | 4 +- 2 files changed, 755 insertions(+), 667 deletions(-) diff --git a/poetry.lock b/poetry.lock index eb38254..0389d9d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -127,95 +127,112 @@ crt = ["awscrt (==0.19.17)"] [[package]] name = "certifi" -version = "2025.8.3" +version = "2025.11.12" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"}, - {file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"}, + {file = "certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b"}, + {file = "certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316"}, ] [[package]] name = "cffi" -version = "1.17.1" +version = "2.0.0" description = "Foreign Function Interface for Python calling C code." category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, - {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, - {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, - {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, - {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, - {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, - {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, - {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, - {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, - {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, - {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, - {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, - {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, - {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, - {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, - {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, + {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, + {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"}, + {file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"}, + {file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"}, + {file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"}, + {file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"}, + {file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"}, + {file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"}, + {file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"}, + {file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"}, + {file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"}, + {file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"}, + {file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"}, + {file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"}, + {file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"}, + {file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"}, + {file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"}, + {file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"}, + {file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"}, + {file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"}, + {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"}, + {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"}, ] [package.dependencies] -pycparser = "*" +pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} [[package]] name = "cfgv" @@ -231,91 +248,125 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.4.3" +version = "3.4.4" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0f2be7e0cf7754b9a30eb01f4295cc3d4358a479843b31f328afd210e2c7598c"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c60e092517a73c632ec38e290eba714e9627abe9d301c8c8a12ec32c314a2a4b"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:252098c8c7a873e17dd696ed98bbe91dbacd571da4b87df3736768efa7a792e4"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3653fad4fe3ed447a596ae8638b437f827234f01a8cd801842e43f3d0a6b281b"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8999f965f922ae054125286faf9f11bc6932184b93011d138925a1773830bbe9"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d95bfb53c211b57198bb91c46dd5a2d8018b3af446583aab40074bf7988401cb"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:5b413b0b1bfd94dbf4023ad6945889f374cd24e3f62de58d6bb102c4d9ae534a"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:b5e3b2d152e74e100a9e9573837aba24aab611d39428ded46f4e4022ea7d1942"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a2d08ac246bb48479170408d6c19f6385fa743e7157d716e144cad849b2dd94b"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-win32.whl", hash = "sha256:ec557499516fc90fd374bf2e32349a2887a876fbf162c160e3c01b6849eaf557"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:5d8d01eac18c423815ed4f4a2ec3b439d654e55ee4ad610e153cf02faf67ea40"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:70bfc5f2c318afece2f5838ea5e4c3febada0be750fcf4775641052bbba14d05"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23b6b24d74478dc833444cbd927c338349d6ae852ba53a0d02a2de1fce45b96e"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:34a7f768e3f985abdb42841e20e17b330ad3aaf4bb7e7aeeb73db2e70f077b99"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb731e5deb0c7ef82d698b0f4c5bb724633ee2a489401594c5c88b02e6cb15f7"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:257f26fed7d7ff59921b78244f3cd93ed2af1800ff048c33f624c87475819dd7"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1ef99f0456d3d46a50945c98de1774da86f8e992ab5c77865ea8b8195341fc19"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:2c322db9c8c89009a990ef07c3bcc9f011a3269bc06782f916cd3d9eed7c9312"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:511729f456829ef86ac41ca78c63a5cb55240ed23b4b737faca0eb1abb1c41bc"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:88ab34806dea0671532d3f82d82b85e8fc23d7b2dd12fa837978dad9bb392a34"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-win32.whl", hash = "sha256:16a8770207946ac75703458e2c743631c79c59c5890c80011d536248f8eaa432"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:d22dbedd33326a4a5190dd4fe9e9e693ef12160c77382d9e87919bce54f3d4ca"}, - {file = "charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a"}, - {file = "charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"}, + {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"}, + {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, ] [[package]] @@ -362,100 +413,116 @@ testing = ["flake8", "pytest", "pytest-cov", "pytest-randomly", "pytest-xdist", [[package]] name = "coverage" -version = "7.10.5" +version = "7.10.7" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c6a5c3414bfc7451b879141ce772c546985163cf553f08e0f135f0699a911801"}, - {file = "coverage-7.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bc8e4d99ce82f1710cc3c125adc30fd1487d3cf6c2cd4994d78d68a47b16989a"}, - {file = "coverage-7.10.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:02252dc1216e512a9311f596b3169fad54abcb13827a8d76d5630c798a50a754"}, - {file = "coverage-7.10.5-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:73269df37883e02d460bee0cc16be90509faea1e3bd105d77360b512d5bb9c33"}, - {file = "coverage-7.10.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f8a81b0614642f91c9effd53eec284f965577591f51f547a1cbeb32035b4c2f"}, - {file = "coverage-7.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6a29f8e0adb7f8c2b95fa2d4566a1d6e6722e0a637634c6563cb1ab844427dd9"}, - {file = "coverage-7.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fcf6ab569436b4a647d4e91accba12509ad9f2554bc93d3aee23cc596e7f99c3"}, - {file = "coverage-7.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:90dc3d6fb222b194a5de60af8d190bedeeddcbc7add317e4a3cd333ee6b7c879"}, - {file = "coverage-7.10.5-cp310-cp310-win32.whl", hash = "sha256:414a568cd545f9dc75f0686a0049393de8098414b58ea071e03395505b73d7a8"}, - {file = "coverage-7.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:e551f9d03347196271935fd3c0c165f0e8c049220280c1120de0084d65e9c7ff"}, - {file = "coverage-7.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c177e6ffe2ebc7c410785307758ee21258aa8e8092b44d09a2da767834f075f2"}, - {file = "coverage-7.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:14d6071c51ad0f703d6440827eaa46386169b5fdced42631d5a5ac419616046f"}, - {file = "coverage-7.10.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:61f78c7c3bc272a410c5ae3fde7792b4ffb4acc03d35a7df73ca8978826bb7ab"}, - {file = "coverage-7.10.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f39071caa126f69d63f99b324fb08c7b1da2ec28cbb1fe7b5b1799926492f65c"}, - {file = "coverage-7.10.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:343a023193f04d46edc46b2616cdbee68c94dd10208ecd3adc56fcc54ef2baa1"}, - {file = "coverage-7.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:585ffe93ae5894d1ebdee69fc0b0d4b7c75d8007983692fb300ac98eed146f78"}, - {file = "coverage-7.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0ef4e66f006ed181df29b59921bd8fc7ed7cd6a9289295cd8b2824b49b570df"}, - {file = "coverage-7.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eb7b0bbf7cc1d0453b843eca7b5fa017874735bef9bfdfa4121373d2cc885ed6"}, - {file = "coverage-7.10.5-cp311-cp311-win32.whl", hash = "sha256:1d043a8a06987cc0c98516e57c4d3fc2c1591364831e9deb59c9e1b4937e8caf"}, - {file = "coverage-7.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:fefafcca09c3ac56372ef64a40f5fe17c5592fab906e0fdffd09543f3012ba50"}, - {file = "coverage-7.10.5-cp311-cp311-win_arm64.whl", hash = "sha256:7e78b767da8b5fc5b2faa69bb001edafcd6f3995b42a331c53ef9572c55ceb82"}, - {file = "coverage-7.10.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c2d05c7e73c60a4cecc7d9b60dbfd603b4ebc0adafaef371445b47d0f805c8a9"}, - {file = "coverage-7.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:32ddaa3b2c509778ed5373b177eb2bf5662405493baeff52278a0b4f9415188b"}, - {file = "coverage-7.10.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dd382410039fe062097aa0292ab6335a3f1e7af7bba2ef8d27dcda484918f20c"}, - {file = "coverage-7.10.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7fa22800f3908df31cea6fb230f20ac49e343515d968cc3a42b30d5c3ebf9b5a"}, - {file = "coverage-7.10.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f366a57ac81f5e12797136552f5b7502fa053c861a009b91b80ed51f2ce651c6"}, - {file = "coverage-7.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f1dc8f1980a272ad4a6c84cba7981792344dad33bf5869361576b7aef42733a"}, - {file = "coverage-7.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2285c04ee8676f7938b02b4936d9b9b672064daab3187c20f73a55f3d70e6b4a"}, - {file = "coverage-7.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c2492e4dd9daab63f5f56286f8a04c51323d237631eb98505d87e4c4ff19ec34"}, - {file = "coverage-7.10.5-cp312-cp312-win32.whl", hash = "sha256:38a9109c4ee8135d5df5505384fc2f20287a47ccbe0b3f04c53c9a1989c2bbaf"}, - {file = "coverage-7.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:6b87f1ad60b30bc3c43c66afa7db6b22a3109902e28c5094957626a0143a001f"}, - {file = "coverage-7.10.5-cp312-cp312-win_arm64.whl", hash = "sha256:672a6c1da5aea6c629819a0e1461e89d244f78d7b60c424ecf4f1f2556c041d8"}, - {file = "coverage-7.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ef3b83594d933020f54cf65ea1f4405d1f4e41a009c46df629dd964fcb6e907c"}, - {file = "coverage-7.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2b96bfdf7c0ea9faebce088a3ecb2382819da4fbc05c7b80040dbc428df6af44"}, - {file = "coverage-7.10.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:63df1fdaffa42d914d5c4d293e838937638bf75c794cf20bee12978fc8c4e3bc"}, - {file = "coverage-7.10.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8002dc6a049aac0e81ecec97abfb08c01ef0c1fbf962d0c98da3950ace89b869"}, - {file = "coverage-7.10.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:63d4bb2966d6f5f705a6b0c6784c8969c468dbc4bcf9d9ded8bff1c7e092451f"}, - {file = "coverage-7.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1f672efc0731a6846b157389b6e6d5d5e9e59d1d1a23a5c66a99fd58339914d5"}, - {file = "coverage-7.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3f39cef43d08049e8afc1fde4a5da8510fc6be843f8dea350ee46e2a26b2f54c"}, - {file = "coverage-7.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2968647e3ed5a6c019a419264386b013979ff1fb67dd11f5c9886c43d6a31fc2"}, - {file = "coverage-7.10.5-cp313-cp313-win32.whl", hash = "sha256:0d511dda38595b2b6934c2b730a1fd57a3635c6aa2a04cb74714cdfdd53846f4"}, - {file = "coverage-7.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:9a86281794a393513cf117177fd39c796b3f8e3759bb2764259a2abba5cce54b"}, - {file = "coverage-7.10.5-cp313-cp313-win_arm64.whl", hash = "sha256:cebd8e906eb98bb09c10d1feed16096700b1198d482267f8bf0474e63a7b8d84"}, - {file = "coverage-7.10.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0520dff502da5e09d0d20781df74d8189ab334a1e40d5bafe2efaa4158e2d9e7"}, - {file = "coverage-7.10.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d9cd64aca68f503ed3f1f18c7c9174cbb797baba02ca8ab5112f9d1c0328cd4b"}, - {file = "coverage-7.10.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0913dd1613a33b13c4f84aa6e3f4198c1a21ee28ccb4f674985c1f22109f0aae"}, - {file = "coverage-7.10.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1b7181c0feeb06ed8a02da02792f42f829a7b29990fef52eff257fef0885d760"}, - {file = "coverage-7.10.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36d42b7396b605f774d4372dd9c49bed71cbabce4ae1ccd074d155709dd8f235"}, - {file = "coverage-7.10.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b4fdc777e05c4940b297bf47bf7eedd56a39a61dc23ba798e4b830d585486ca5"}, - {file = "coverage-7.10.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:42144e8e346de44a6f1dbd0a56575dd8ab8dfa7e9007da02ea5b1c30ab33a7db"}, - {file = "coverage-7.10.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:66c644cbd7aed8fe266d5917e2c9f65458a51cfe5eeff9c05f15b335f697066e"}, - {file = "coverage-7.10.5-cp313-cp313t-win32.whl", hash = "sha256:2d1b73023854068c44b0c554578a4e1ef1b050ed07cf8b431549e624a29a66ee"}, - {file = "coverage-7.10.5-cp313-cp313t-win_amd64.whl", hash = "sha256:54a1532c8a642d8cc0bd5a9a51f5a9dcc440294fd06e9dda55e743c5ec1a8f14"}, - {file = "coverage-7.10.5-cp313-cp313t-win_arm64.whl", hash = "sha256:74d5b63fe3f5f5d372253a4ef92492c11a4305f3550631beaa432fc9df16fcff"}, - {file = "coverage-7.10.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:68c5e0bc5f44f68053369fa0d94459c84548a77660a5f2561c5e5f1e3bed7031"}, - {file = "coverage-7.10.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cf33134ffae93865e32e1e37df043bef15a5e857d8caebc0099d225c579b0fa3"}, - {file = "coverage-7.10.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ad8fa9d5193bafcf668231294241302b5e683a0518bf1e33a9a0dfb142ec3031"}, - {file = "coverage-7.10.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:146fa1531973d38ab4b689bc764592fe6c2f913e7e80a39e7eeafd11f0ef6db2"}, - {file = "coverage-7.10.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6013a37b8a4854c478d3219ee8bc2392dea51602dd0803a12d6f6182a0061762"}, - {file = "coverage-7.10.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:eb90fe20db9c3d930fa2ad7a308207ab5b86bf6a76f54ab6a40be4012d88fcae"}, - {file = "coverage-7.10.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:384b34482272e960c438703cafe63316dfbea124ac62006a455c8410bf2a2262"}, - {file = "coverage-7.10.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:467dc74bd0a1a7de2bedf8deaf6811f43602cb532bd34d81ffd6038d6d8abe99"}, - {file = "coverage-7.10.5-cp314-cp314-win32.whl", hash = "sha256:556d23d4e6393ca898b2e63a5bca91e9ac2d5fb13299ec286cd69a09a7187fde"}, - {file = "coverage-7.10.5-cp314-cp314-win_amd64.whl", hash = "sha256:f4446a9547681533c8fa3e3c6cf62121eeee616e6a92bd9201c6edd91beffe13"}, - {file = "coverage-7.10.5-cp314-cp314-win_arm64.whl", hash = "sha256:5e78bd9cf65da4c303bf663de0d73bf69f81e878bf72a94e9af67137c69b9fe9"}, - {file = "coverage-7.10.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5661bf987d91ec756a47c7e5df4fbcb949f39e32f9334ccd3f43233bbb65e508"}, - {file = "coverage-7.10.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a46473129244db42a720439a26984f8c6f834762fc4573616c1f37f13994b357"}, - {file = "coverage-7.10.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1f64b8d3415d60f24b058b58d859e9512624bdfa57a2d1f8aff93c1ec45c429b"}, - {file = "coverage-7.10.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:44d43de99a9d90b20e0163f9770542357f58860a26e24dc1d924643bd6aa7cb4"}, - {file = "coverage-7.10.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a931a87e5ddb6b6404e65443b742cb1c14959622777f2a4efd81fba84f5d91ba"}, - {file = "coverage-7.10.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9559b906a100029274448f4c8b8b0a127daa4dade5661dfd821b8c188058842"}, - {file = "coverage-7.10.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b08801e25e3b4526ef9ced1aa29344131a8f5213c60c03c18fe4c6170ffa2874"}, - {file = "coverage-7.10.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ed9749bb8eda35f8b636fb7632f1c62f735a236a5d4edadd8bbcc5ea0542e732"}, - {file = "coverage-7.10.5-cp314-cp314t-win32.whl", hash = "sha256:609b60d123fc2cc63ccee6d17e4676699075db72d14ac3c107cc4976d516f2df"}, - {file = "coverage-7.10.5-cp314-cp314t-win_amd64.whl", hash = "sha256:0666cf3d2c1626b5a3463fd5b05f5e21f99e6aec40a3192eee4d07a15970b07f"}, - {file = "coverage-7.10.5-cp314-cp314t-win_arm64.whl", hash = "sha256:bc85eb2d35e760120540afddd3044a5bf69118a91a296a8b3940dfc4fdcfe1e2"}, - {file = "coverage-7.10.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:62835c1b00c4a4ace24c1a88561a5a59b612fbb83a525d1c70ff5720c97c0610"}, - {file = "coverage-7.10.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5255b3bbcc1d32a4069d6403820ac8e6dbcc1d68cb28a60a1ebf17e47028e898"}, - {file = "coverage-7.10.5-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3876385722e335d6e991c430302c24251ef9c2a9701b2b390f5473199b1b8ebf"}, - {file = "coverage-7.10.5-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8048ce4b149c93447a55d279078c8ae98b08a6951a3c4d2d7e87f4efc7bfe100"}, - {file = "coverage-7.10.5-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4028e7558e268dd8bcf4d9484aad393cafa654c24b4885f6f9474bf53183a82a"}, - {file = "coverage-7.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03f47dc870eec0367fcdd603ca6a01517d2504e83dc18dbfafae37faec66129a"}, - {file = "coverage-7.10.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2d488d7d42b6ded7ea0704884f89dcabd2619505457de8fc9a6011c62106f6e5"}, - {file = "coverage-7.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b3dcf2ead47fa8be14224ee817dfc1df98043af568fe120a22f81c0eb3c34ad2"}, - {file = "coverage-7.10.5-cp39-cp39-win32.whl", hash = "sha256:02650a11324b80057b8c9c29487020073d5e98a498f1857f37e3f9b6ea1b2426"}, - {file = "coverage-7.10.5-cp39-cp39-win_amd64.whl", hash = "sha256:b45264dd450a10f9e03237b41a9a24e85cbb1e278e5a32adb1a303f58f0017f3"}, - {file = "coverage-7.10.5-py3-none-any.whl", hash = "sha256:0be24d35e4db1d23d0db5c0f6a74a962e2ec83c426b5cac09f4234aadef38e4a"}, - {file = "coverage-7.10.5.tar.gz", hash = "sha256:f2e57716a78bc3ae80b2207be0709a3b2b63b9f2dcf9740ee6ac03588a2015b6"}, + {file = "coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a"}, + {file = "coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5"}, + {file = "coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17"}, + {file = "coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b"}, + {file = "coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87"}, + {file = "coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e"}, + {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e"}, + {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df"}, + {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0"}, + {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13"}, + {file = "coverage-7.10.7-cp310-cp310-win32.whl", hash = "sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b"}, + {file = "coverage-7.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807"}, + {file = "coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59"}, + {file = "coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a"}, + {file = "coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699"}, + {file = "coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d"}, + {file = "coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e"}, + {file = "coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23"}, + {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab"}, + {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82"}, + {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2"}, + {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61"}, + {file = "coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14"}, + {file = "coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2"}, + {file = "coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a"}, + {file = "coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417"}, + {file = "coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973"}, + {file = "coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c"}, + {file = "coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7"}, + {file = "coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6"}, + {file = "coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59"}, + {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b"}, + {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a"}, + {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb"}, + {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1"}, + {file = "coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256"}, + {file = "coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba"}, + {file = "coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf"}, + {file = "coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d"}, + {file = "coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b"}, + {file = "coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e"}, + {file = "coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b"}, + {file = "coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49"}, + {file = "coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911"}, + {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0"}, + {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f"}, + {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c"}, + {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f"}, + {file = "coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698"}, + {file = "coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843"}, + {file = "coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546"}, + {file = "coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c"}, + {file = "coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15"}, + {file = "coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4"}, + {file = "coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0"}, + {file = "coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0"}, + {file = "coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65"}, + {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541"}, + {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6"}, + {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999"}, + {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2"}, + {file = "coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a"}, + {file = "coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb"}, + {file = "coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb"}, + {file = "coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520"}, + {file = "coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32"}, + {file = "coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f"}, + {file = "coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a"}, + {file = "coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360"}, + {file = "coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69"}, + {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14"}, + {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe"}, + {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e"}, + {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd"}, + {file = "coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2"}, + {file = "coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681"}, + {file = "coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880"}, + {file = "coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63"}, + {file = "coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2"}, + {file = "coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d"}, + {file = "coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0"}, + {file = "coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699"}, + {file = "coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9"}, + {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f"}, + {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1"}, + {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0"}, + {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399"}, + {file = "coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235"}, + {file = "coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d"}, + {file = "coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a"}, + {file = "coverage-7.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fff7b9c3f19957020cac546c70025331113d2e61537f6e2441bc7657913de7d3"}, + {file = "coverage-7.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bc91b314cef27742da486d6839b677b3f2793dfe52b51bbbb7cf736d5c29281c"}, + {file = "coverage-7.10.7-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:567f5c155eda8df1d3d439d40a45a6a5f029b429b06648235f1e7e51b522b396"}, + {file = "coverage-7.10.7-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2af88deffcc8a4d5974cf2d502251bc3b2db8461f0b66d80a449c33757aa9f40"}, + {file = "coverage-7.10.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7315339eae3b24c2d2fa1ed7d7a38654cba34a13ef19fbcb9425da46d3dc594"}, + {file = "coverage-7.10.7-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:912e6ebc7a6e4adfdbb1aec371ad04c68854cd3bf3608b3514e7ff9062931d8a"}, + {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f49a05acd3dfe1ce9715b657e28d138578bc40126760efb962322c56e9ca344b"}, + {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cce2109b6219f22ece99db7644b9622f54a4e915dad65660ec435e89a3ea7cc3"}, + {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:f3c887f96407cea3916294046fc7dab611c2552beadbed4ea901cbc6a40cc7a0"}, + {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:635adb9a4507c9fd2ed65f39693fa31c9a3ee3a8e6dc64df033e8fdf52a7003f"}, + {file = "coverage-7.10.7-cp39-cp39-win32.whl", hash = "sha256:5a02d5a850e2979b0a014c412573953995174743a3f7fa4ea5a6e9a3c5617431"}, + {file = "coverage-7.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:c134869d5ffe34547d14e174c866fd8fe2254918cc0a95e99052903bc1543e07"}, + {file = "coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260"}, + {file = "coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239"}, ] [package.dependencies] @@ -526,42 +593,6 @@ files = [ {file = "distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d"}, ] -[[package]] -name = "docker" -version = "6.1.0" -description = "A Python library for the Docker Engine API." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "docker-6.1.0-py3-none-any.whl", hash = "sha256:b65c999f87cb5c31700b6944dc17a631071170d1aab3ad6e23506068579f885d"}, - {file = "docker-6.1.0.tar.gz", hash = "sha256:cb697eccfeff55d232f7a7f4f88cd3770d27327c38d6c266b8f55c9f14a8491e"}, -] - -[package.dependencies] -packaging = ">=14.0" -pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} -requests = ">=2.26.0" -urllib3 = ">=1.26.0" -websocket-client = ">=0.32.0" - -[package.extras] -ssh = ["paramiko (>=2.4.3)"] - -[[package]] -name = "dockerpty" -version = "0.4.1" -description = "Python library to use the pseudo-tty of a docker container" -category = "main" -optional = false -python-versions = "*" -files = [ - {file = "dockerpty-0.4.1.tar.gz", hash = "sha256:69a9d69d573a0daa31bcd1c0774eeed5c15c295fe719c61aca550ed1393156ce"}, -] - -[package.dependencies] -six = ">=1.3.0" - [[package]] name = "docutils" version = "0.17.1" @@ -576,14 +607,14 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.3.0" +version = "1.3.1" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, - {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, + {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, + {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, ] [package.dependencies] @@ -626,14 +657,14 @@ test = ["coverage[toml]", "pretend", "pytest", "pytest-cov"] [[package]] name = "identify" -version = "2.6.13" +version = "2.6.15" description = "File identification library for Python" category = "dev" optional = false python-versions = ">=3.9" files = [ - {file = "identify-2.6.13-py2.py3-none-any.whl", hash = "sha256:60381139b3ae39447482ecc406944190f690d4a2997f2584062089848361b33b"}, - {file = "identify-2.6.13.tar.gz", hash = "sha256:da8d6c828e773620e13bfa86ea601c5a5310ba4bcd65edf378198b56a1f9fb32"}, + {file = "identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757"}, + {file = "identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf"}, ] [package.extras] @@ -641,14 +672,14 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.10" +version = "3.11" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, ] [package.extras] @@ -814,14 +845,14 @@ files = [ [[package]] name = "keyring" -version = "25.6.0" +version = "25.7.0" description = "Store and access your passwords safely." category = "dev" optional = false python-versions = ">=3.9" files = [ - {file = "keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd"}, - {file = "keyring-25.6.0.tar.gz", hash = "sha256:0b39998aa941431eb3d9b0d4b2460bc773b9df6fed7621c2dfb291a7e0187a66"}, + {file = "keyring-25.7.0-py3-none-any.whl", hash = "sha256:be4a0b195f149690c166e850609a477c532ddbfbaed96a404d4e43f8d5e2689f"}, + {file = "keyring-25.7.0.tar.gz", hash = "sha256:fe01bd85eb3f8fb3dd0405defdeac9a5b4f6f0439edbb3149577f244a2e8245b"}, ] [package.dependencies] @@ -838,20 +869,20 @@ check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] completion = ["shtab (>=1.1.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] +enabler = ["pytest-enabler (>=3.4)"] test = ["pyfakefs", "pytest (>=6,!=8.1.*)"] -type = ["pygobject-stubs", "pytest-mypy", "shtab", "types-pywin32"] +type = ["pygobject-stubs", "pytest-mypy (>=1.0.1)", "shtab", "types-pywin32"] [[package]] name = "lark" -version = "1.2.2" +version = "1.3.1" description = "a modern parsing library" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "lark-1.2.2-py3-none-any.whl", hash = "sha256:c2276486b02f0f1b90be155f2c8ba4a8e194d42775786db622faccd652d8e80c"}, - {file = "lark-1.2.2.tar.gz", hash = "sha256:ca807d0162cd16cef15a8feecb862d7319e7a09bdb13aef927968e45040fed80"}, + {file = "lark-1.3.1-py3-none-any.whl", hash = "sha256:c629b661023a014c37da873b4ff58a817398d12635d3bbb2c5a03be7fe5d1e12"}, + {file = "lark-1.3.1.tar.gz", hash = "sha256:b426a7a6d6d53189d318f2b6236ab5d6429eaf09259f1ca33eb716eed10d2905"}, ] [package.extras] @@ -941,73 +972,101 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "3.0.2" +version = "3.0.3" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false python-versions = ">=3.9" files = [ - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, - {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, + {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"}, + {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"}, + {file = "markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"}, + {file = "markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"}, + {file = "markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"}, + {file = "markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"}, + {file = "markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d"}, + {file = "markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8"}, + {file = "markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"}, ] [[package]] @@ -1036,14 +1095,14 @@ files = [ [[package]] name = "more-itertools" -version = "10.7.0" +version = "10.8.0" description = "More routines for operating on iterables, beyond itertools" category = "dev" optional = false python-versions = ">=3.9" files = [ - {file = "more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e"}, - {file = "more_itertools-10.7.0.tar.gz", hash = "sha256:9fddd5403be01a94b204faadcff459ec3568cf110265d3c54323e1e866ad29d3"}, + {file = "more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b"}, + {file = "more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd"}, ] [[package]] @@ -1060,38 +1119,38 @@ files = [ [[package]] name = "nh3" -version = "0.3.0" +version = "0.3.2" description = "Python binding to Ammonia HTML sanitizer Rust crate" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "nh3-0.3.0-cp313-cp313t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:a537ece1bf513e5a88d8cff8a872e12fe8d0f42ef71dd15a5e7520fecd191bbb"}, - {file = "nh3-0.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c915060a2c8131bef6a29f78debc29ba40859b6dbe2362ef9e5fd44f11487c2"}, - {file = "nh3-0.3.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ba0caa8aa184196daa6e574d997a33867d6d10234018012d35f86d46024a2a95"}, - {file = "nh3-0.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:80fe20171c6da69c7978ecba33b638e951b85fb92059259edd285ff108b82a6d"}, - {file = "nh3-0.3.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e90883f9f85288f423c77b3f5a6f4486375636f25f793165112679a7b6363b35"}, - {file = "nh3-0.3.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0649464ac8eee018644aacbc103874ccbfac80e3035643c3acaab4287e36e7f5"}, - {file = "nh3-0.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1adeb1062a1c2974bc75b8d1ecb014c5fd4daf2df646bbe2831f7c23659793f9"}, - {file = "nh3-0.3.0-cp313-cp313t-win32.whl", hash = "sha256:7275fdffaab10cc5801bf026e3c089d8de40a997afc9e41b981f7ac48c5aa7d5"}, - {file = "nh3-0.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:423201bbdf3164a9e09aa01e540adbb94c9962cc177d5b1cbb385f5e1e79216e"}, - {file = "nh3-0.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:16f8670201f7e8e0e05ed1a590eb84bfa51b01a69dd5caf1d3ea57733de6a52f"}, - {file = "nh3-0.3.0-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:ec6cfdd2e0399cb79ba4dcffb2332b94d9696c52272ff9d48a630c5dca5e325a"}, - {file = "nh3-0.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce5e7185599f89b0e391e2f29cc12dc2e206167380cea49b33beda4891be2fe1"}, - {file = "nh3-0.3.0-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:389d93d59b8214d51c400fb5b07866c2a4f79e4e14b071ad66c92184fec3a392"}, - {file = "nh3-0.3.0-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e9e6a7e4d38f7e8dda9edd1433af5170c597336c1a74b4693c5cb75ab2b30f2a"}, - {file = "nh3-0.3.0-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7852f038a054e0096dac12b8141191e02e93e0b4608c4b993ec7d4ffafea4e49"}, - {file = "nh3-0.3.0-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af5aa8127f62bbf03d68f67a956627b1bd0469703a35b3dad28d0c1195e6c7fb"}, - {file = "nh3-0.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f416c35efee3e6a6c9ab7716d9e57aa0a49981be915963a82697952cba1353e1"}, - {file = "nh3-0.3.0-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:37d3003d98dedca6cd762bf88f2e70b67f05100f6b949ffe540e189cc06887f9"}, - {file = "nh3-0.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:634e34e6162e0408e14fb61d5e69dbaea32f59e847cfcfa41b66100a6b796f62"}, - {file = "nh3-0.3.0-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:b0612ccf5de8a480cf08f047b08f9d3fecc12e63d2ee91769cb19d7290614c23"}, - {file = "nh3-0.3.0-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c7a32a7f0d89f7d30cb8f4a84bdbd56d1eb88b78a2434534f62c71dac538c450"}, - {file = "nh3-0.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3f1b4f8a264a0c86ea01da0d0c390fe295ea0bcacc52c2103aca286f6884f518"}, - {file = "nh3-0.3.0-cp38-abi3-win32.whl", hash = "sha256:6d68fa277b4a3cf04e5c4b84dd0c6149ff7d56c12b3e3fab304c525b850f613d"}, - {file = "nh3-0.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:bae63772408fd63ad836ec569a7c8f444dd32863d0c67f6e0b25ebbd606afa95"}, - {file = "nh3-0.3.0-cp38-abi3-win_arm64.whl", hash = "sha256:d97d3efd61404af7e5721a0e74d81cdbfc6e5f97e11e731bb6d090e30a7b62b2"}, - {file = "nh3-0.3.0.tar.gz", hash = "sha256:d8ba24cb31525492ea71b6aac11a4adac91d828aadeff7c4586541bf5dc34d2f"}, + {file = "nh3-0.3.2-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:d18957a90806d943d141cc5e4a0fefa1d77cf0d7a156878bf9a66eed52c9cc7d"}, + {file = "nh3-0.3.2-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45c953e57028c31d473d6b648552d9cab1efe20a42ad139d78e11d8f42a36130"}, + {file = "nh3-0.3.2-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2c9850041b77a9147d6bbd6dbbf13eeec7009eb60b44e83f07fcb2910075bf9b"}, + {file = "nh3-0.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:403c11563e50b915d0efdb622866d1d9e4506bce590ef7da57789bf71dd148b5"}, + {file = "nh3-0.3.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:0dca4365db62b2d71ff1620ee4f800c4729849906c5dd504ee1a7b2389558e31"}, + {file = "nh3-0.3.2-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0fe7ee035dd7b2290715baf29cb27167dddd2ff70ea7d052c958dbd80d323c99"}, + {file = "nh3-0.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a40202fd58e49129764f025bbaae77028e420f1d5b3c8e6f6fd3a6490d513868"}, + {file = "nh3-0.3.2-cp314-cp314t-win32.whl", hash = "sha256:1f9ba555a797dbdcd844b89523f29cdc90973d8bd2e836ea6b962cf567cadd93"}, + {file = "nh3-0.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:dce4248edc427c9b79261f3e6e2b3ecbdd9b88c267012168b4a7b3fc6fd41d13"}, + {file = "nh3-0.3.2-cp314-cp314t-win_arm64.whl", hash = "sha256:019ecbd007536b67fdf76fab411b648fb64e2257ca3262ec80c3425c24028c80"}, + {file = "nh3-0.3.2-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7064ccf5ace75825bd7bf57859daaaf16ed28660c1c6b306b649a9eda4b54b1e"}, + {file = "nh3-0.3.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8745454cdd28bbbc90861b80a0111a195b0e3961b9fa2e672be89eb199fa5d8"}, + {file = "nh3-0.3.2-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72d67c25a84579f4a432c065e8b4274e53b7cf1df8f792cf846abfe2c3090866"}, + {file = "nh3-0.3.2-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:13398e676a14d6233f372c75f52d5ae74f98210172991f7a3142a736bd92b131"}, + {file = "nh3-0.3.2-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03d617e5c8aa7331bd2659c654e021caf9bba704b109e7b2b28b039a00949fe5"}, + {file = "nh3-0.3.2-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2f55c4d2d5a207e74eefe4d828067bbb01300e06e2a7436142f915c5928de07"}, + {file = "nh3-0.3.2-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bb18403f02b655a1bbe4e3a4696c2ae1d6ae8f5991f7cacb684b1ae27e6c9f7"}, + {file = "nh3-0.3.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d66f41672eb4060cf87c037f760bdbc6847852ca9ef8e9c5a5da18f090abf87"}, + {file = "nh3-0.3.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f97f8b25cb2681d25e2338148159447e4d689aafdccfcf19e61ff7db3905768a"}, + {file = "nh3-0.3.2-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:2ab70e8c6c7d2ce953d2a58102eefa90c2d0a5ed7aa40c7e29a487bc5e613131"}, + {file = "nh3-0.3.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:1710f3901cd6440ca92494ba2eb6dc260f829fa8d9196b659fa10de825610ce0"}, + {file = "nh3-0.3.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:91e9b001101fb4500a2aafe3e7c92928d85242d38bf5ac0aba0b7480da0a4cd6"}, + {file = "nh3-0.3.2-cp38-abi3-win32.whl", hash = "sha256:169db03df90da63286e0560ea0efa9b6f3b59844a9735514a1d47e6bb2c8c61b"}, + {file = "nh3-0.3.2-cp38-abi3-win_amd64.whl", hash = "sha256:562da3dca7a17f9077593214a9781a94b8d76de4f158f8c895e62f09573945fe"}, + {file = "nh3-0.3.2-cp38-abi3-win_arm64.whl", hash = "sha256:cf5964d54edd405e68583114a7cba929468bcd7db5e676ae38ee954de1cfc104"}, + {file = "nh3-0.3.2.tar.gz", hash = "sha256:f394759a06df8b685a4ebfb1874fb67a9cbfd58c64fc5ed587a663c0e63ec376"}, ] [[package]] @@ -1110,7 +1169,7 @@ files = [ name = "packaging" version = "25.0" description = "Core utilities for Python packages" -category = "main" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1199,14 +1258,14 @@ wcwidth = "*" [[package]] name = "pycparser" -version = "2.22" +version = "2.23" description = "C parser in Python" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, + {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"}, + {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}, ] [[package]] @@ -1334,36 +1393,6 @@ files = [ lark = ">=1.1.5,<2.0" regex = ">=2024.4.16" -[[package]] -name = "pywin32" -version = "311" -description = "Python for Window Extensions" -category = "main" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3"}, - {file = "pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b"}, - {file = "pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b"}, - {file = "pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151"}, - {file = "pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503"}, - {file = "pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2"}, - {file = "pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31"}, - {file = "pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067"}, - {file = "pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852"}, - {file = "pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d"}, - {file = "pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d"}, - {file = "pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a"}, - {file = "pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee"}, - {file = "pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87"}, - {file = "pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42"}, - {file = "pywin32-311-cp38-cp38-win32.whl", hash = "sha256:6c6f2969607b5023b0d9ce2541f8d2cbb01c4f46bc87456017cf63b73f1e2d8c"}, - {file = "pywin32-311-cp38-cp38-win_amd64.whl", hash = "sha256:c8015b09fb9a5e188f83b7b04de91ddca4658cee2ae6f3bc483f0b21a77ef6cd"}, - {file = "pywin32-311-cp39-cp39-win32.whl", hash = "sha256:aba8f82d551a942cb20d4a83413ccbac30790b50efb89a75e4f586ac0bb8056b"}, - {file = "pywin32-311-cp39-cp39-win_amd64.whl", hash = "sha256:e0c4cfb0621281fe40387df582097fd796e80430597cb9944f0ae70447bacd91"}, - {file = "pywin32-311-cp39-cp39-win_arm64.whl", hash = "sha256:62ea666235135fee79bb154e695f3ff67370afefd71bd7fea7512fc70ef31e3d"}, -] - [[package]] name = "pywin32-ctypes" version = "0.2.3" @@ -1378,65 +1407,85 @@ files = [ [[package]] name = "pyyaml" -version = "6.0.2" +version = "6.0.3" description = "YAML parser and emitter for Python" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, + {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"}, + {file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"}, + {file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"}, + {file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}, + {file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}, + {file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}, + {file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}, + {file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}, + {file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}, + {file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"}, + {file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"}, + {file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"}, + {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, ] [[package]] @@ -1479,99 +1528,127 @@ md = ["cmarkgfm (>=0.8.0)"] [[package]] name = "regex" -version = "2025.7.34" +version = "2025.11.3" description = "Alternative regular expression module, to replace re." category = "main" optional = false python-versions = ">=3.9" files = [ - {file = "regex-2025.7.34-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d856164d25e2b3b07b779bfed813eb4b6b6ce73c2fd818d46f47c1eb5cd79bd6"}, - {file = "regex-2025.7.34-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d15a9da5fad793e35fb7be74eec450d968e05d2e294f3e0e77ab03fa7234a83"}, - {file = "regex-2025.7.34-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:95b4639c77d414efa93c8de14ce3f7965a94d007e068a94f9d4997bb9bd9c81f"}, - {file = "regex-2025.7.34-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d7de1ceed5a5f84f342ba4a9f4ae589524adf9744b2ee61b5da884b5b659834"}, - {file = "regex-2025.7.34-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:02e5860a250cd350c4933cf376c3bc9cb28948e2c96a8bc042aee7b985cfa26f"}, - {file = "regex-2025.7.34-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0a5966220b9a1a88691282b7e4350e9599cf65780ca60d914a798cb791aa1177"}, - {file = "regex-2025.7.34-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:48fb045bbd4aab2418dc1ba2088a5e32de4bfe64e1457b948bb328a8dc2f1c2e"}, - {file = "regex-2025.7.34-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:20ff8433fa45e131f7316594efe24d4679c5449c0ca69d91c2f9d21846fdf064"}, - {file = "regex-2025.7.34-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c436fd1e95c04c19039668cfb548450a37c13f051e8659f40aed426e36b3765f"}, - {file = "regex-2025.7.34-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0b85241d3cfb9f8a13cefdfbd58a2843f208f2ed2c88181bf84e22e0c7fc066d"}, - {file = "regex-2025.7.34-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:075641c94126b064c65ab86e7e71fc3d63e7ff1bea1fb794f0773c97cdad3a03"}, - {file = "regex-2025.7.34-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:70645cad3407d103d1dbcb4841839d2946f7d36cf38acbd40120fee1682151e5"}, - {file = "regex-2025.7.34-cp310-cp310-win32.whl", hash = "sha256:3b836eb4a95526b263c2a3359308600bd95ce7848ebd3c29af0c37c4f9627cd3"}, - {file = "regex-2025.7.34-cp310-cp310-win_amd64.whl", hash = "sha256:cbfaa401d77334613cf434f723c7e8ba585df162be76474bccc53ae4e5520b3a"}, - {file = "regex-2025.7.34-cp310-cp310-win_arm64.whl", hash = "sha256:bca11d3c38a47c621769433c47f364b44e8043e0de8e482c5968b20ab90a3986"}, - {file = "regex-2025.7.34-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:da304313761b8500b8e175eb2040c4394a875837d5635f6256d6fa0377ad32c8"}, - {file = "regex-2025.7.34-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:35e43ebf5b18cd751ea81455b19acfdec402e82fe0dc6143edfae4c5c4b3909a"}, - {file = "regex-2025.7.34-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96bbae4c616726f4661fe7bcad5952e10d25d3c51ddc388189d8864fbc1b3c68"}, - {file = "regex-2025.7.34-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9feab78a1ffa4f2b1e27b1bcdaad36f48c2fed4870264ce32f52a393db093c78"}, - {file = "regex-2025.7.34-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f14b36e6d4d07f1a5060f28ef3b3561c5d95eb0651741474ce4c0a4c56ba8719"}, - {file = "regex-2025.7.34-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85c3a958ef8b3d5079c763477e1f09e89d13ad22198a37e9d7b26b4b17438b33"}, - {file = "regex-2025.7.34-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:37555e4ae0b93358fa7c2d240a4291d4a4227cc7c607d8f85596cdb08ec0a083"}, - {file = "regex-2025.7.34-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ee38926f31f1aa61b0232a3a11b83461f7807661c062df9eb88769d86e6195c3"}, - {file = "regex-2025.7.34-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a664291c31cae9c4a30589bd8bc2ebb56ef880c9c6264cb7643633831e606a4d"}, - {file = "regex-2025.7.34-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f3e5c1e0925e77ec46ddc736b756a6da50d4df4ee3f69536ffb2373460e2dafd"}, - {file = "regex-2025.7.34-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d428fc7731dcbb4e2ffe43aeb8f90775ad155e7db4347a639768bc6cd2df881a"}, - {file = "regex-2025.7.34-cp311-cp311-win32.whl", hash = "sha256:e154a7ee7fa18333ad90b20e16ef84daaeac61877c8ef942ec8dfa50dc38b7a1"}, - {file = "regex-2025.7.34-cp311-cp311-win_amd64.whl", hash = "sha256:24257953d5c1d6d3c129ab03414c07fc1a47833c9165d49b954190b2b7f21a1a"}, - {file = "regex-2025.7.34-cp311-cp311-win_arm64.whl", hash = "sha256:3157aa512b9e606586900888cd469a444f9b898ecb7f8931996cb715f77477f0"}, - {file = "regex-2025.7.34-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:7f7211a746aced993bef487de69307a38c5ddd79257d7be83f7b202cb59ddb50"}, - {file = "regex-2025.7.34-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fb31080f2bd0681484b275461b202b5ad182f52c9ec606052020fe13eb13a72f"}, - {file = "regex-2025.7.34-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0200a5150c4cf61e407038f4b4d5cdad13e86345dac29ff9dab3d75d905cf130"}, - {file = "regex-2025.7.34-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:739a74970e736df0773788377969c9fea3876c2fc13d0563f98e5503e5185f46"}, - {file = "regex-2025.7.34-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4fef81b2f7ea6a2029161ed6dea9ae13834c28eb5a95b8771828194a026621e4"}, - {file = "regex-2025.7.34-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ea74cf81fe61a7e9d77989050d0089a927ab758c29dac4e8e1b6c06fccf3ebf0"}, - {file = "regex-2025.7.34-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e4636a7f3b65a5f340ed9ddf53585c42e3ff37101d383ed321bfe5660481744b"}, - {file = "regex-2025.7.34-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6cef962d7834437fe8d3da6f9bfc6f93f20f218266dcefec0560ed7765f5fe01"}, - {file = "regex-2025.7.34-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:cbe1698e5b80298dbce8df4d8d1182279fbdaf1044e864cbc9d53c20e4a2be77"}, - {file = "regex-2025.7.34-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:32b9f9bcf0f605eb094b08e8da72e44badabb63dde6b83bd530580b488d1c6da"}, - {file = "regex-2025.7.34-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:524c868ba527eab4e8744a9287809579f54ae8c62fbf07d62aacd89f6026b282"}, - {file = "regex-2025.7.34-cp312-cp312-win32.whl", hash = "sha256:d600e58ee6d036081c89696d2bdd55d507498a7180df2e19945c6642fac59588"}, - {file = "regex-2025.7.34-cp312-cp312-win_amd64.whl", hash = "sha256:9a9ab52a466a9b4b91564437b36417b76033e8778e5af8f36be835d8cb370d62"}, - {file = "regex-2025.7.34-cp312-cp312-win_arm64.whl", hash = "sha256:c83aec91af9c6fbf7c743274fd952272403ad9a9db05fe9bfc9df8d12b45f176"}, - {file = "regex-2025.7.34-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c3c9740a77aeef3f5e3aaab92403946a8d34437db930a0280e7e81ddcada61f5"}, - {file = "regex-2025.7.34-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:69ed3bc611540f2ea70a4080f853741ec698be556b1df404599f8724690edbcd"}, - {file = "regex-2025.7.34-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d03c6f9dcd562c56527c42b8530aad93193e0b3254a588be1f2ed378cdfdea1b"}, - {file = "regex-2025.7.34-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6164b1d99dee1dfad33f301f174d8139d4368a9fb50bf0a3603b2eaf579963ad"}, - {file = "regex-2025.7.34-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1e4f4f62599b8142362f164ce776f19d79bdd21273e86920a7b604a4275b4f59"}, - {file = "regex-2025.7.34-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:72a26dcc6a59c057b292f39d41465d8233a10fd69121fa24f8f43ec6294e5415"}, - {file = "regex-2025.7.34-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5273fddf7a3e602695c92716c420c377599ed3c853ea669c1fe26218867002f"}, - {file = "regex-2025.7.34-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c1844be23cd40135b3a5a4dd298e1e0c0cb36757364dd6cdc6025770363e06c1"}, - {file = "regex-2025.7.34-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dde35e2afbbe2272f8abee3b9fe6772d9b5a07d82607b5788e8508974059925c"}, - {file = "regex-2025.7.34-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f6e8e7af516a7549412ce57613e859c3be27d55341a894aacaa11703a4c31a"}, - {file = "regex-2025.7.34-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:469142fb94a869beb25b5f18ea87646d21def10fbacb0bcb749224f3509476f0"}, - {file = "regex-2025.7.34-cp313-cp313-win32.whl", hash = "sha256:da7507d083ee33ccea1310447410c27ca11fb9ef18c95899ca57ff60a7e4d8f1"}, - {file = "regex-2025.7.34-cp313-cp313-win_amd64.whl", hash = "sha256:9d644de5520441e5f7e2db63aec2748948cc39ed4d7a87fd5db578ea4043d997"}, - {file = "regex-2025.7.34-cp313-cp313-win_arm64.whl", hash = "sha256:7bf1c5503a9f2cbd2f52d7e260acb3131b07b6273c470abb78568174fe6bde3f"}, - {file = "regex-2025.7.34-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:8283afe7042d8270cecf27cca558873168e771183d4d593e3c5fe5f12402212a"}, - {file = "regex-2025.7.34-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6c053f9647e3421dd2f5dff8172eb7b4eec129df9d1d2f7133a4386319b47435"}, - {file = "regex-2025.7.34-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a16dd56bbcb7d10e62861c3cd000290ddff28ea142ffb5eb3470f183628011ac"}, - {file = "regex-2025.7.34-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69c593ff5a24c0d5c1112b0df9b09eae42b33c014bdca7022d6523b210b69f72"}, - {file = "regex-2025.7.34-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:98d0ce170fcde1a03b5df19c5650db22ab58af375aaa6ff07978a85c9f250f0e"}, - {file = "regex-2025.7.34-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d72765a4bff8c43711d5b0f5b452991a9947853dfa471972169b3cc0ba1d0751"}, - {file = "regex-2025.7.34-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4494f8fd95a77eb434039ad8460e64d57baa0434f1395b7da44015bef650d0e4"}, - {file = "regex-2025.7.34-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4f42b522259c66e918a0121a12429b2abcf696c6f967fa37bdc7b72e61469f98"}, - {file = "regex-2025.7.34-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:aaef1f056d96a0a5d53ad47d019d5b4c66fe4be2da87016e0d43b7242599ffc7"}, - {file = "regex-2025.7.34-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:656433e5b7dccc9bc0da6312da8eb897b81f5e560321ec413500e5367fcd5d47"}, - {file = "regex-2025.7.34-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e91eb2c62c39705e17b4d42d4b86c4e86c884c0d15d9c5a47d0835f8387add8e"}, - {file = "regex-2025.7.34-cp314-cp314-win32.whl", hash = "sha256:f978ddfb6216028c8f1d6b0f7ef779949498b64117fc35a939022f67f810bdcb"}, - {file = "regex-2025.7.34-cp314-cp314-win_amd64.whl", hash = "sha256:4b7dc33b9b48fb37ead12ffc7bdb846ac72f99a80373c4da48f64b373a7abeae"}, - {file = "regex-2025.7.34-cp314-cp314-win_arm64.whl", hash = "sha256:4b8c4d39f451e64809912c82392933d80fe2e4a87eeef8859fcc5380d0173c64"}, - {file = "regex-2025.7.34-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fd5edc3f453de727af267c7909d083e19f6426fc9dd149e332b6034f2a5611e6"}, - {file = "regex-2025.7.34-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa1cdfb8db96ef20137de5587954c812821966c3e8b48ffc871e22d7ec0a4938"}, - {file = "regex-2025.7.34-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:89c9504fc96268e8e74b0283e548f53a80c421182a2007e3365805b74ceef936"}, - {file = "regex-2025.7.34-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:33be70d75fa05a904ee0dc43b650844e067d14c849df7e82ad673541cd465b5f"}, - {file = "regex-2025.7.34-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:57d25b6732ea93eeb1d090e8399b6235ca84a651b52d52d272ed37d3d2efa0f1"}, - {file = "regex-2025.7.34-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:baf2fe122a3db1c0b9f161aa44463d8f7e33eeeda47bb0309923deb743a18276"}, - {file = "regex-2025.7.34-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a764a83128af9c1a54be81485b34dca488cbcacefe1e1d543ef11fbace191e1"}, - {file = "regex-2025.7.34-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c7f663ccc4093877f55b51477522abd7299a14c5bb7626c5238599db6a0cb95d"}, - {file = "regex-2025.7.34-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4913f52fbc7a744aaebf53acd8d3dc1b519e46ba481d4d7596de3c862e011ada"}, - {file = "regex-2025.7.34-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:efac4db9e044d47fd3b6b0d40b6708f4dfa2d8131a5ac1d604064147c0f552fd"}, - {file = "regex-2025.7.34-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7373afae7cfb716e3b8e15d0184510d518f9d21471f2d62918dbece85f2c588f"}, - {file = "regex-2025.7.34-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9960d162f3fecf6af252534a1ae337e9c2e20d74469fed782903b24e2cc9d3d7"}, - {file = "regex-2025.7.34-cp39-cp39-win32.whl", hash = "sha256:95d538b10eb4621350a54bf14600cc80b514211d91a019dc74b8e23d2159ace5"}, - {file = "regex-2025.7.34-cp39-cp39-win_amd64.whl", hash = "sha256:f7f3071b5faa605b0ea51ec4bb3ea7257277446b053f4fd3ad02b1dcb4e64353"}, - {file = "regex-2025.7.34-cp39-cp39-win_arm64.whl", hash = "sha256:716a47515ba1d03f8e8a61c5013041c8c90f2e21f055203498105d7571b44531"}, - {file = "regex-2025.7.34.tar.gz", hash = "sha256:9ead9765217afd04a86822dfcd4ed2747dfe426e887da413b15ff0ac2457e21a"}, + {file = "regex-2025.11.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2b441a4ae2c8049106e8b39973bfbddfb25a179dda2bdb99b0eeb60c40a6a3af"}, + {file = "regex-2025.11.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2fa2eed3f76677777345d2f81ee89f5de2f5745910e805f7af7386a920fa7313"}, + {file = "regex-2025.11.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8b4a27eebd684319bdf473d39f1d79eed36bf2cd34bd4465cdb4618d82b3d56"}, + {file = "regex-2025.11.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5cf77eac15bd264986c4a2c63353212c095b40f3affb2bc6b4ef80c4776c1a28"}, + {file = "regex-2025.11.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b7f9ee819f94c6abfa56ec7b1dbab586f41ebbdc0a57e6524bd5e7f487a878c7"}, + {file = "regex-2025.11.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:838441333bc90b829406d4a03cb4b8bf7656231b84358628b0406d803931ef32"}, + {file = "regex-2025.11.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cfe6d3f0c9e3b7e8c0c694b24d25e677776f5ca26dce46fd6b0489f9c8339391"}, + {file = "regex-2025.11.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2ab815eb8a96379a27c3b6157fcb127c8f59c36f043c1678110cea492868f1d5"}, + {file = "regex-2025.11.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:728a9d2d173a65b62bdc380b7932dd8e74ed4295279a8fe1021204ce210803e7"}, + {file = "regex-2025.11.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:509dc827f89c15c66a0c216331260d777dd6c81e9a4e4f830e662b0bb296c313"}, + {file = "regex-2025.11.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:849202cd789e5f3cf5dcc7822c34b502181b4824a65ff20ce82da5524e45e8e9"}, + {file = "regex-2025.11.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b6f78f98741dcc89607c16b1e9426ee46ce4bf31ac5e6b0d40e81c89f3481ea5"}, + {file = "regex-2025.11.3-cp310-cp310-win32.whl", hash = "sha256:149eb0bba95231fb4f6d37c8f760ec9fa6fabf65bab555e128dde5f2475193ec"}, + {file = "regex-2025.11.3-cp310-cp310-win_amd64.whl", hash = "sha256:ee3a83ce492074c35a74cc76cf8235d49e77b757193a5365ff86e3f2f93db9fd"}, + {file = "regex-2025.11.3-cp310-cp310-win_arm64.whl", hash = "sha256:38af559ad934a7b35147716655d4a2f79fcef2d695ddfe06a06ba40ae631fa7e"}, + {file = "regex-2025.11.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eadade04221641516fa25139273505a1c19f9bf97589a05bc4cfcd8b4a618031"}, + {file = "regex-2025.11.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:feff9e54ec0dd3833d659257f5c3f5322a12eee58ffa360984b716f8b92983f4"}, + {file = "regex-2025.11.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3b30bc921d50365775c09a7ed446359e5c0179e9e2512beec4a60cbcef6ddd50"}, + {file = "regex-2025.11.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f99be08cfead2020c7ca6e396c13543baea32343b7a9a5780c462e323bd8872f"}, + {file = "regex-2025.11.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6dd329a1b61c0ee95ba95385fb0c07ea0d3fe1a21e1349fa2bec272636217118"}, + {file = "regex-2025.11.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4c5238d32f3c5269d9e87be0cf096437b7622b6920f5eac4fd202468aaeb34d2"}, + {file = "regex-2025.11.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10483eefbfb0adb18ee9474498c9a32fcf4e594fbca0543bb94c48bac6183e2e"}, + {file = "regex-2025.11.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:78c2d02bb6e1da0720eedc0bad578049cad3f71050ef8cd065ecc87691bed2b0"}, + {file = "regex-2025.11.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e6b49cd2aad93a1790ce9cffb18964f6d3a4b0b3dbdbd5de094b65296fce6e58"}, + {file = "regex-2025.11.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:885b26aa3ee56433b630502dc3d36ba78d186a00cc535d3806e6bfd9ed3c70ab"}, + {file = "regex-2025.11.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ddd76a9f58e6a00f8772e72cff8ebcff78e022be95edf018766707c730593e1e"}, + {file = "regex-2025.11.3-cp311-cp311-win32.whl", hash = "sha256:3e816cc9aac1cd3cc9a4ec4d860f06d40f994b5c7b4d03b93345f44e08cc68bf"}, + {file = "regex-2025.11.3-cp311-cp311-win_amd64.whl", hash = "sha256:087511f5c8b7dfbe3a03f5d5ad0c2a33861b1fc387f21f6f60825a44865a385a"}, + {file = "regex-2025.11.3-cp311-cp311-win_arm64.whl", hash = "sha256:1ff0d190c7f68ae7769cd0313fe45820ba07ffebfddfaa89cc1eb70827ba0ddc"}, + {file = "regex-2025.11.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bc8ab71e2e31b16e40868a40a69007bc305e1109bd4658eb6cad007e0bf67c41"}, + {file = "regex-2025.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:22b29dda7e1f7062a52359fca6e58e548e28c6686f205e780b02ad8ef710de36"}, + {file = "regex-2025.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a91e4a29938bc1a082cc28fdea44be420bf2bebe2665343029723892eb073e1"}, + {file = "regex-2025.11.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08b884f4226602ad40c5d55f52bf91a9df30f513864e0054bad40c0e9cf1afb7"}, + {file = "regex-2025.11.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3e0b11b2b2433d1c39c7c7a30e3f3d0aeeea44c2a8d0bae28f6b95f639927a69"}, + {file = "regex-2025.11.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:87eb52a81ef58c7ba4d45c3ca74e12aa4b4e77816f72ca25258a85b3ea96cb48"}, + {file = "regex-2025.11.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a12ab1f5c29b4e93db518f5e3872116b7e9b1646c9f9f426f777b50d44a09e8c"}, + {file = "regex-2025.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7521684c8c7c4f6e88e35ec89680ee1aa8358d3f09d27dfbdf62c446f5d4c695"}, + {file = "regex-2025.11.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7fe6e5440584e94cc4b3f5f4d98a25e29ca12dccf8873679a635638349831b98"}, + {file = "regex-2025.11.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8e026094aa12b43f4fd74576714e987803a315c76edb6b098b9809db5de58f74"}, + {file = "regex-2025.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:435bbad13e57eb5606a68443af62bed3556de2f46deb9f7d4237bc2f1c9fb3a0"}, + {file = "regex-2025.11.3-cp312-cp312-win32.whl", hash = "sha256:3839967cf4dc4b985e1570fd8d91078f0c519f30491c60f9ac42a8db039be204"}, + {file = "regex-2025.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:e721d1b46e25c481dc5ded6f4b3f66c897c58d2e8cfdf77bbced84339108b0b9"}, + {file = "regex-2025.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:64350685ff08b1d3a6fff33f45a9ca183dc1d58bbfe4981604e70ec9801bbc26"}, + {file = "regex-2025.11.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c1e448051717a334891f2b9a620fe36776ebf3dd8ec46a0b877c8ae69575feb4"}, + {file = "regex-2025.11.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9b5aca4d5dfd7fbfbfbdaf44850fcc7709a01146a797536a8f84952e940cca76"}, + {file = "regex-2025.11.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:04d2765516395cf7dda331a244a3282c0f5ae96075f728629287dfa6f76ba70a"}, + {file = "regex-2025.11.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d9903ca42bfeec4cebedba8022a7c97ad2aab22e09573ce9976ba01b65e4361"}, + {file = "regex-2025.11.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:639431bdc89d6429f6721625e8129413980ccd62e9d3f496be618a41d205f160"}, + {file = "regex-2025.11.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f117efad42068f9715677c8523ed2be1518116d1c49b1dd17987716695181efe"}, + {file = "regex-2025.11.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4aecb6f461316adf9f1f0f6a4a1a3d79e045f9b71ec76055a791affa3b285850"}, + {file = "regex-2025.11.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3b3a5f320136873cc5561098dfab677eea139521cb9a9e8db98b7e64aef44cbc"}, + {file = "regex-2025.11.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:75fa6f0056e7efb1f42a1c34e58be24072cb9e61a601340cc1196ae92326a4f9"}, + {file = "regex-2025.11.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:dbe6095001465294f13f1adcd3311e50dd84e5a71525f20a10bd16689c61ce0b"}, + {file = "regex-2025.11.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:454d9b4ae7881afbc25015b8627c16d88a597479b9dea82b8c6e7e2e07240dc7"}, + {file = "regex-2025.11.3-cp313-cp313-win32.whl", hash = "sha256:28ba4d69171fc6e9896337d4fc63a43660002b7da53fc15ac992abcf3410917c"}, + {file = "regex-2025.11.3-cp313-cp313-win_amd64.whl", hash = "sha256:bac4200befe50c670c405dc33af26dad5a3b6b255dd6c000d92fe4629f9ed6a5"}, + {file = "regex-2025.11.3-cp313-cp313-win_arm64.whl", hash = "sha256:2292cd5a90dab247f9abe892ac584cb24f0f54680c73fcb4a7493c66c2bf2467"}, + {file = "regex-2025.11.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:1eb1ebf6822b756c723e09f5186473d93236c06c579d2cc0671a722d2ab14281"}, + {file = "regex-2025.11.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1e00ec2970aab10dc5db34af535f21fcf32b4a31d99e34963419636e2f85ae39"}, + {file = "regex-2025.11.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a4cb042b615245d5ff9b3794f56be4138b5adc35a4166014d31d1814744148c7"}, + {file = "regex-2025.11.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44f264d4bf02f3176467d90b294d59bf1db9fe53c141ff772f27a8b456b2a9ed"}, + {file = "regex-2025.11.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7be0277469bf3bd7a34a9c57c1b6a724532a0d235cd0dc4e7f4316f982c28b19"}, + {file = "regex-2025.11.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0d31e08426ff4b5b650f68839f5af51a92a5b51abd8554a60c2fbc7c71f25d0b"}, + {file = "regex-2025.11.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e43586ce5bd28f9f285a6e729466841368c4a0353f6fd08d4ce4630843d3648a"}, + {file = "regex-2025.11.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0f9397d561a4c16829d4e6ff75202c1c08b68a3bdbfe29dbfcdb31c9830907c6"}, + {file = "regex-2025.11.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:dd16e78eb18ffdb25ee33a0682d17912e8cc8a770e885aeee95020046128f1ce"}, + {file = "regex-2025.11.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:ffcca5b9efe948ba0661e9df0fa50d2bc4b097c70b9810212d6b62f05d83b2dd"}, + {file = "regex-2025.11.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c56b4d162ca2b43318ac671c65bd4d563e841a694ac70e1a976ac38fcf4ca1d2"}, + {file = "regex-2025.11.3-cp313-cp313t-win32.whl", hash = "sha256:9ddc42e68114e161e51e272f667d640f97e84a2b9ef14b7477c53aac20c2d59a"}, + {file = "regex-2025.11.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7a7c7fdf755032ffdd72c77e3d8096bdcb0eb92e89e17571a196f03d88b11b3c"}, + {file = "regex-2025.11.3-cp313-cp313t-win_arm64.whl", hash = "sha256:df9eb838c44f570283712e7cff14c16329a9f0fb19ca492d21d4b7528ee6821e"}, + {file = "regex-2025.11.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9697a52e57576c83139d7c6f213d64485d3df5bf84807c35fa409e6c970801c6"}, + {file = "regex-2025.11.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e18bc3f73bd41243c9b38a6d9f2366cd0e0137a9aebe2d8ff76c5b67d4c0a3f4"}, + {file = "regex-2025.11.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:61a08bcb0ec14ff4e0ed2044aad948d0659604f824cbd50b55e30b0ec6f09c73"}, + {file = "regex-2025.11.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9c30003b9347c24bcc210958c5d167b9e4f9be786cb380a7d32f14f9b84674f"}, + {file = "regex-2025.11.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4e1e592789704459900728d88d41a46fe3969b82ab62945560a31732ffc19a6d"}, + {file = "regex-2025.11.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6538241f45eb5a25aa575dbba1069ad786f68a4f2773a29a2bd3dd1f9de787be"}, + {file = "regex-2025.11.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce22519c989bb72a7e6b36a199384c53db7722fe669ba891da75907fe3587db"}, + {file = "regex-2025.11.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:66d559b21d3640203ab9075797a55165d79017520685fb407b9234d72ab63c62"}, + {file = "regex-2025.11.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:669dcfb2e38f9e8c69507bace46f4889e3abbfd9b0c29719202883c0a603598f"}, + {file = "regex-2025.11.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:32f74f35ff0f25a5021373ac61442edcb150731fbaa28286bbc8bb1582c89d02"}, + {file = "regex-2025.11.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e6c7a21dffba883234baefe91bc3388e629779582038f75d2a5be918e250f0ed"}, + {file = "regex-2025.11.3-cp314-cp314-win32.whl", hash = "sha256:795ea137b1d809eb6836b43748b12634291c0ed55ad50a7d72d21edf1cd565c4"}, + {file = "regex-2025.11.3-cp314-cp314-win_amd64.whl", hash = "sha256:9f95fbaa0ee1610ec0fc6b26668e9917a582ba80c52cc6d9ada15e30aa9ab9ad"}, + {file = "regex-2025.11.3-cp314-cp314-win_arm64.whl", hash = "sha256:dfec44d532be4c07088c3de2876130ff0fbeeacaa89a137decbbb5f665855a0f"}, + {file = "regex-2025.11.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ba0d8a5d7f04f73ee7d01d974d47c5834f8a1b0224390e4fe7c12a3a92a78ecc"}, + {file = "regex-2025.11.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:442d86cf1cfe4faabf97db7d901ef58347efd004934da045c745e7b5bd57ac49"}, + {file = "regex-2025.11.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fd0a5e563c756de210bb964789b5abe4f114dacae9104a47e1a649b910361536"}, + {file = "regex-2025.11.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf3490bcbb985a1ae97b2ce9ad1c0f06a852d5b19dde9b07bdf25bf224248c95"}, + {file = "regex-2025.11.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3809988f0a8b8c9dcc0f92478d6501fac7200b9ec56aecf0ec21f4a2ec4b6009"}, + {file = "regex-2025.11.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f4ff94e58e84aedb9c9fce66d4ef9f27a190285b451420f297c9a09f2b9abee9"}, + {file = "regex-2025.11.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eb542fd347ce61e1321b0a6b945d5701528dca0cd9759c2e3bb8bd57e47964d"}, + {file = "regex-2025.11.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d6c2d5919075a1f2e413c00b056ea0c2f065b3f5fe83c3d07d325ab92dce51d6"}, + {file = "regex-2025.11.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3f8bf11a4827cc7ce5a53d4ef6cddd5ad25595d3c1435ef08f76825851343154"}, + {file = "regex-2025.11.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:22c12d837298651e5550ac1d964e4ff57c3f56965fc1812c90c9fb2028eaf267"}, + {file = "regex-2025.11.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:62ba394a3dda9ad41c7c780f60f6e4a70988741415ae96f6d1bf6c239cf01379"}, + {file = "regex-2025.11.3-cp314-cp314t-win32.whl", hash = "sha256:4bf146dca15cdd53224a1bf46d628bd7590e4a07fbb69e720d561aea43a32b38"}, + {file = "regex-2025.11.3-cp314-cp314t-win_amd64.whl", hash = "sha256:adad1a1bcf1c9e76346e091d22d23ac54ef28e1365117d99521631078dfec9de"}, + {file = "regex-2025.11.3-cp314-cp314t-win_arm64.whl", hash = "sha256:c54f768482cef41e219720013cd05933b6f971d9562544d691c68699bf2b6801"}, + {file = "regex-2025.11.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:81519e25707fc076978c6143b81ea3dc853f176895af05bf7ec51effe818aeec"}, + {file = "regex-2025.11.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3bf28b1873a8af8bbb58c26cc56ea6e534d80053b41fb511a35795b6de507e6a"}, + {file = "regex-2025.11.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:856a25c73b697f2ce2a24e7968285579e62577a048526161a2c0f53090bea9f9"}, + {file = "regex-2025.11.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a3d571bd95fade53c86c0517f859477ff3a93c3fde10c9e669086f038e0f207"}, + {file = "regex-2025.11.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:732aea6de26051af97b94bc98ed86448821f839d058e5d259c72bf6d73ad0fc0"}, + {file = "regex-2025.11.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:51c1c1847128238f54930edb8805b660305dca164645a9fd29243f5610beea34"}, + {file = "regex-2025.11.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22dd622a402aad4558277305350699b2be14bc59f64d64ae1d928ce7d072dced"}, + {file = "regex-2025.11.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f3b5a391c7597ffa96b41bd5cbd2ed0305f515fcbb367dfa72735679d5502364"}, + {file = "regex-2025.11.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:cc4076a5b4f36d849fd709284b4a3b112326652f3b0466f04002a6c15a0c96c1"}, + {file = "regex-2025.11.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a295ca2bba5c1c885826ce3125fa0b9f702a1be547d821c01d65f199e10c01e2"}, + {file = "regex-2025.11.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:b4774ff32f18e0504bfc4e59a3e71e18d83bc1e171a3c8ed75013958a03b2f14"}, + {file = "regex-2025.11.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e7d1cdfa88ef33a2ae6aa0d707f9255eb286ffbd90045f1088246833223aee"}, + {file = "regex-2025.11.3-cp39-cp39-win32.whl", hash = "sha256:74d04244852ff73b32eeede4f76f51c5bcf44bc3c207bc3e6cf1c5c45b890708"}, + {file = "regex-2025.11.3-cp39-cp39-win_amd64.whl", hash = "sha256:7a50cd39f73faa34ec18d6720ee25ef10c4c1839514186fcda658a06c06057a2"}, + {file = "regex-2025.11.3-cp39-cp39-win_arm64.whl", hash = "sha256:43b4fb020e779ca81c1b5255015fe2b82816c76ec982354534ad9ec09ad7c9e3"}, + {file = "regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01"}, ] [[package]] @@ -1666,53 +1743,73 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] [[package]] name = "ruamel-yaml-clib" -version = "0.2.12" +version = "0.2.15" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" category = "main" optional = false python-versions = ">=3.9" files = [ - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd5415dded15c3822597455bc02bcd66e81ef8b7a48cb71a33628fc9fdde39df"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d84318609196d6bd6da0edfa25cedfbabd8dbde5140a0a23af29ad4b8f91fb1e"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb43a269eb827806502c7c8efb7ae7e9e9d0573257a46e8e952f4d4caba4f31e"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:943f32bc9dedb3abff9879edc134901df92cfce2c3d5c9348f172f62eb2d771d"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c3829bb364fdb8e0332c9931ecf57d9be3519241323c5274bd82f709cebc0c"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:e7e3736715fbf53e9be2a79eb4db68e4ed857017344d697e8b9749444ae57475"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7e75b4965e1d4690e93021adfcecccbca7d61c7bddd8e22406ef2ff20d74ef"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bc5f1e1c28e966d61d2519f2a3d451ba989f9ea0f2307de7bc45baa526de9e45"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a0e060aace4c24dcaf71023bbd7d42674e3b230f7e7b97317baf1e953e5b519"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"}, - {file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:88eea8baf72f0ccf232c22124d122a7f26e8a24110a0273d9bcddcb0f7e1fa03"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b6f7d74d094d1f3a4e157278da97752f16ee230080ae331fcc219056ca54f77"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4be366220090d7c3424ac2b71c90d1044ea34fca8c0b88f250064fd06087e614"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f66f600833af58bea694d5892453f2270695b92200280ee8c625ec5a477eed3"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da3d6adadcf55a93c214d23941aef4abfd45652110aed6580e814152f385b862"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e9fde97ecb7bb9c41261c2ce0da10323e9227555c674989f8d9eb7572fc2098d"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:05c70f7f86be6f7bee53794d80050a28ae7e13e4a0087c1839dcdefd68eb36b6"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f1d38cbe622039d111b69e9ca945e7e3efebb30ba998867908773183357f3ed"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-win32.whl", hash = "sha256:fe239bdfdae2302e93bd6e8264bd9b71290218fff7084a9db250b55caaccf43f"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-win_amd64.whl", hash = "sha256:468858e5cbde0198337e6a2a78eda8c3fb148bdf4c6498eaf4bc9ba3f8e780bd"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c583229f336682b7212a43d2fa32c30e643d3076178fb9f7a6a14dde85a2d8bd"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56ea19c157ed8c74b6be51b5fa1c3aff6e289a041575f0556f66e5fb848bb137"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5fea0932358e18293407feb921d4f4457db837b67ec1837f87074667449f9401"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef71831bd61fbdb7aa0399d5c4da06bea37107ab5c79ff884cc07f2450910262"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:617d35dc765715fa86f8c3ccdae1e4229055832c452d4ec20856136acc75053f"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b45498cc81a4724a2d42273d6cfc243c0547ad7c6b87b4f774cb7bcc131c98d"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:def5663361f6771b18646620fca12968aae730132e104688766cf8a3b1d65922"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:014181cdec565c8745b7cbc4de3bf2cc8ced05183d986e6d1200168e5bb59490"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-win32.whl", hash = "sha256:d290eda8f6ada19e1771b54e5706b8f9807e6bb08e873900d5ba114ced13e02c"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-win_amd64.whl", hash = "sha256:bdc06ad71173b915167702f55d0f3f027fc61abd975bd308a0968c02db4a4c3e"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cb15a2e2a90c8475df45c0949793af1ff413acfb0a716b8b94e488ea95ce7cff"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:64da03cbe93c1e91af133f5bec37fd24d0d4ba2418eaf970d7166b0a26a148a2"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f6d3655e95a80325b84c4e14c080b2470fe4f33b6846f288379ce36154993fb1"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71845d377c7a47afc6592aacfea738cc8a7e876d586dfba814501d8c53c1ba60"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e5499db1ccbc7f4b41f0565e4f799d863ea720e01d3e99fa0b7b5fcd7802c9"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4b293a37dc97e2b1e8a1aec62792d1e52027087c8eea4fc7b5abd2bdafdd6642"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:512571ad41bba04eac7268fe33f7f4742210ca26a81fe0c75357fa682636c690"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e5e9f630c73a490b758bf14d859a39f375e6999aea5ddd2e2e9da89b9953486a"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-win32.whl", hash = "sha256:f4421ab780c37210a07d138e56dd4b51f8642187cdfb433eb687fe8c11de0144"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-win_amd64.whl", hash = "sha256:2b216904750889133d9222b7b873c199d48ecbb12912aca78970f84a5aa1a4bc"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4dcec721fddbb62e60c2801ba08c87010bd6b700054a09998c4d09c08147b8fb"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:65f48245279f9bb301d1276f9679b82e4c080a1ae25e679f682ac62446fac471"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:46895c17ead5e22bea5e576f1db7e41cb273e8d062c04a6a49013d9f60996c25"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3eb199178b08956e5be6288ee0b05b2fb0b5c1f309725ad25d9c6ea7e27f962a"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d1032919280ebc04a80e4fb1e93f7a738129857eaec9448310e638c8bccefcf"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab0df0648d86a7ecbd9c632e8f8d6b21bb21b5fc9d9e095c796cacf32a728d2d"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:331fb180858dd8534f0e61aa243b944f25e73a4dae9962bd44c46d1761126bbf"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fd4c928ddf6bce586285daa6d90680b9c291cfd045fc40aad34e445d57b1bf51"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-win32.whl", hash = "sha256:bf0846d629e160223805db9fe8cc7aec16aaa11a07310c50c8c7164efa440aec"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-win_amd64.whl", hash = "sha256:45702dfbea1420ba3450bb3dd9a80b33f0badd57539c6aac09f42584303e0db6"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:753faf20b3a5906faf1fc50e4ddb8c074cb9b251e00b14c18b28492f933ac8ef"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:480894aee0b29752560a9de46c0e5f84a82602f2bc5c6cde8db9a345319acfdf"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4d3b58ab2454b4747442ac76fab66739c72b1e2bb9bd173d7694b9f9dbc9c000"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bfd309b316228acecfa30670c3887dcedf9b7a44ea39e2101e75d2654522acd4"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2812ff359ec1f30129b62372e5f22a52936fac13d5d21e70373dbca5d64bb97c"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7e74ea87307303ba91073b63e67f2c667e93f05a8c63079ee5b7a5c8d0d7b043"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:713cd68af9dfbe0bb588e144a61aad8dcc00ef92a82d2e87183ca662d242f524"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:542d77b72786a35563f97069b9379ce762944e67055bea293480f7734b2c7e5e"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-win32.whl", hash = "sha256:424ead8cef3939d690c4b5c85ef5b52155a231ff8b252961b6516ed7cf05f6aa"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-win_amd64.whl", hash = "sha256:ac9b8d5fa4bb7fd2917ab5027f60d4234345fd366fe39aa711d5dca090aa1467"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:923816815974425fbb1f1bf57e85eca6e14d8adc313c66db21c094927ad01815"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dcc7f3162d3711fd5d52e2267e44636e3e566d1e5675a5f0b30e98f2c4af7974"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5d3c9210219cbc0f22706f19b154c9a798ff65a6beeafbf77fc9c057ec806f7d"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bb7b728fd9f405aa00b4a0b17ba3f3b810d0ccc5f77f7373162e9b5f0ff75d5"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3cb75a3c14f1d6c3c2a94631e362802f70e83e20d1f2b2ef3026c05b415c4900"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:badd1d7283f3e5894779a6ea8944cc765138b96804496c91812b2829f70e18a7"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ba6604bbc3dfcef844631932d06a1a4dcac3fee904efccf582261948431628a"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a8220fd4c6f98485e97aea65e1df76d4fed1678ede1fe1d0eed2957230d287c4"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-win32.whl", hash = "sha256:04d21dc9c57d9608225da28285900762befbb0165ae48482c15d8d4989d4af14"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-win_amd64.whl", hash = "sha256:27dc656e84396e6d687f97c6e65fb284d100483628f02d95464fd731743a4afe"}, + {file = "ruamel_yaml_clib-0.2.15.tar.gz", hash = "sha256:46e4cc8c43ef6a94885f72512094e482114a8a706d3c555a34ed4b0d20200600"}, ] [[package]] @@ -1787,44 +1884,54 @@ files = [ [[package]] name = "tomli" -version = "2.2.1" +version = "2.3.0" description = "A lil' TOML parser" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c"}, + {file = "tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456"}, + {file = "tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6"}, + {file = "tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876"}, + {file = "tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05"}, + {file = "tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606"}, + {file = "tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005"}, + {file = "tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463"}, + {file = "tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f"}, + {file = "tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0"}, + {file = "tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba"}, + {file = "tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b"}, + {file = "tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"}, ] [[package]] @@ -1903,14 +2010,14 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.34.0" +version = "20.35.4" description = "Virtual Python Environment builder" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.34.0-py3-none-any.whl", hash = "sha256:341f5afa7eee943e4984a9207c025feedd768baff6753cd660c857ceb3e36026"}, - {file = "virtualenv-20.34.0.tar.gz", hash = "sha256:44815b2c9dee7ed86e387b842a84f20b93f7f417f95886ca1996a72a4138eb1a"}, + {file = "virtualenv-20.35.4-py3-none-any.whl", hash = "sha256:c21c9cede36c9753eeade68ba7d523529f228a403463376cf821eaae2b650f1b"}, + {file = "virtualenv-20.35.4.tar.gz", hash = "sha256:643d3914d73d3eeb0c552cbb12d7e82adf0e504dbf86a3182f8771a153a1971c"}, ] [package.dependencies] @@ -1925,33 +2032,16 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "wcwidth" -version = "0.2.13" +version = "0.2.14" description = "Measures the displayed width of unicode strings in a terminal" category = "main" optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "websocket-client" -version = "1.8.0" -description = "WebSocket client for Python with low level API options" -category = "main" -optional = false -python-versions = ">=3.8" +python-versions = ">=3.6" files = [ - {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, - {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, + {file = "wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1"}, + {file = "wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605"}, ] -[package.extras] -docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - [[package]] name = "wrapt" version = "1.12.1" @@ -1998,4 +2088,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "~3.9 || ~3.10 || ~3.11 || ~3.12 || ~3.13" -content-hash = "1f5899f1c878231d764da654eeed24f372eeb90a4eee0efbf84e9579e9af7d1c" +content-hash = "ce4d9bb8a0938a79412b2184795be64fb896119dec65814486a44fc34f5480f6" diff --git a/pyproject.toml b/pyproject.toml index d16252b..795287d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,6 @@ classifiers = [ packages = [ { include = "leverage" }, { include = "leverage/modules" }, - { include = "leverage/containers" } ] [tool.poetry.dependencies] @@ -32,9 +31,8 @@ click = "8.0.1" yaenv = "1.5.3post1" "ruamel.yaml" = "0.17.10" jinja2 = "3.0.1" -docker = "6.1.0" -dockerpty = "0.4.1" questionary = "1.10.0" +prompt-toolkit = "3.0.51" python-hcl2 = "7.3.1" boto3 = "1.33.2" configupdater = "3.2" From 34005814eebb9f269411e750d591825a75b923d5 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Sat, 20 Dec 2025 12:59:52 -0300 Subject: [PATCH 35/46] Fix the need for TF_BINARY to be set --- leverage/path.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/leverage/path.py b/leverage/path.py index 1cbac61..6f75935 100644 --- a/leverage/path.py +++ b/leverage/path.py @@ -165,15 +165,15 @@ def __init__(self, env_conf: dict): self.aws_credentials_dir.mkdir(parents=True) self.sso_cache = self.aws_credentials_dir / "sso" / "cache" - # If not empty make the binary path absolute - if tf_binary := env_conf.get("TF_BINARY", ""): - binary_path = Path(tf_binary) - if tf_binary.startswith("~"): - self.tf_binary = str(binary_path.expanduser()) - elif not binary_path.is_absolute() and len(binary_path.parts) > 1: - self.tf_binary = str((self.root_dir / tf_binary).resolve()) - else: - self.tf_binary = tf_binary + # Make the binary path absolute + tf_binary = env_conf.get("TF_BINARY", "") + binary_path = Path(tf_binary) + if tf_binary.startswith("~"): + self.tf_binary = str(binary_path.expanduser()) + elif not binary_path.is_absolute() and len(binary_path.parts) > 1: + self.tf_binary = str((self.root_dir / tf_binary).resolve()) + else: + self.tf_binary = tf_binary def update_cwd(self, new_cwd): self.cwd = new_cwd From 242a3b4881727291d4f6bfc1df6960a536b37973 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Sat, 20 Dec 2025 13:02:23 -0300 Subject: [PATCH 36/46] Handle environment variables globally --- leverage/_internals.py | 13 +++++++++++ leverage/leverage.py | 4 ++++ leverage/modules/aws.py | 7 +----- leverage/modules/kubectl.py | 44 +++++++++++++++++------------------- leverage/modules/tf.py | 12 ++-------- leverage/modules/tfautomv.py | 11 ++++----- 6 files changed, 45 insertions(+), 46 deletions(-) diff --git a/leverage/_internals.py b/leverage/_internals.py index 037d4ea..69735b5 100644 --- a/leverage/_internals.py +++ b/leverage/_internals.py @@ -30,6 +30,7 @@ def __init__(self): self.config = None self.paths = None self.runner = None + self.environment = None @property def verbosity(self): @@ -65,3 +66,15 @@ def new_command(*args, **kwargs): return command(ctx.obj.paths, *args, **kwargs) return new_command + + +def pass_environment(command): + """Decorator to pass the current environment to the command.""" + + @wraps(command) + def new_command(*args, **kwargs): + ctx = click.get_current_context() + + return command(ctx.obj.environment, *args, **kwargs) + + return new_command diff --git a/leverage/leverage.py b/leverage/leverage.py index 9f32f15..fd3da87 100644 --- a/leverage/leverage.py +++ b/leverage/leverage.py @@ -28,6 +28,10 @@ def leverage(context, state, verbose): except NotARepositoryError: return state.paths = PathsHandler(state.config) + state.environment = { + "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), + "AWS_CONFIG_FILE": str(state.paths.aws_config_file), + } # Add modules to leverage diff --git a/leverage/modules/aws.py b/leverage/modules/aws.py index 7db8ad0..7cb416e 100644 --- a/leverage/modules/aws.py +++ b/leverage/modules/aws.py @@ -116,18 +116,13 @@ def configure_sso_profiles(paths: PathsHandler) -> None: @click.pass_context def aws(context: click.Context, state: Any, args: Tuple[str, ...]) -> None: """Run AWS CLI commands in the context of the current project.""" - - credentials_env_vars = { - "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), - "AWS_CONFIG_FILE": str(state.paths.aws_config_file), - } state.runner = Runner( binary="aws", error_message=( f"AWS CLI not found on system. " f"Please install it following the instructions at: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html" ), - env_vars=credentials_env_vars, + env_vars=state.environment, ) _handle_subcommand(context=context, runner=state.runner, args=args, pre_invocation_callback=refresh_aws_credentials) diff --git a/leverage/modules/kubectl.py b/leverage/modules/kubectl.py index b5940c6..2229869 100644 --- a/leverage/modules/kubectl.py +++ b/leverage/modules/kubectl.py @@ -15,7 +15,7 @@ from leverage.modules.tfrunner import TFRunner from leverage.modules.utils import _handle_subcommand from leverage.modules.auth import check_sso_token, refresh_layer_credentials -from leverage._internals import pass_state, pass_paths +from leverage._internals import pass_state, pass_paths, pass_environment @dataclass @@ -58,26 +58,23 @@ def kubectl(context, state, args): kubeconfig_dir = state.paths.home / ".kube" / state.paths.project kubeconfig_dir.mkdir(parents=True, exist_ok=True) + state.environment["KUBECONFIG"] = str(kubeconfig_dir / "config") - credentials_env_vars = { - "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), - "AWS_CONFIG_FILE": str(state.paths.aws_config_file), - "KUBECONFIG": str(kubeconfig_dir / "config"), - } state.runner = Runner( binary="kubectl", error_message=( f"Kubectl not found on system. " f"Please install it following the instructions at: https://kubernetes.io/docs/tasks/tools/#kubectl" ), - env_vars=credentials_env_vars, + env_vars=state.environment, ) + _handle_subcommand( context=context, runner=state.runner, args=args, pre_invocation_callback=refresh_kubectl_credentials ) -def _configure(ci: ClusterInfo = None, layer_path: Path = None): +def _configure(environment: dict, ci: ClusterInfo = None,layer_path: Path = None): """ Add the given EKS cluster configuration to the .kube/ files. """ @@ -87,31 +84,29 @@ def _configure(ci: ClusterInfo = None, layer_path: Path = None): else: # otherwise go get them from the layer logger.info("Retrieving k8s cluster information...") - cmd = _get_eks_kube_config(layer_path).split(" ")[1:] + cmd = _get_eks_kube_config(environment, layer_path).split(" ")[1:] logger.info("Configuring context...") try: - click.get_current_context().invoke(aws, args=cmd) + exit_code, _, _ = Runner(binary="aws", env_vars=environment).exec(*cmd) except ExitError as e: - raise ExitError(e.exit_code, f"Failed to configure kubectl context: {e.message}") + raise ExitError(e.exit_code, f"Could not locate AWS cli binary.") + if exit_code: + raise ExitError(exit_code, f"Failed to configure kubectl context: {exit_code}") logger.info("Done.") @pass_paths -def _get_eks_kube_config(paths: PathsHandler, layer_path: Path) -> str: +def _get_eks_kube_config(paths: PathsHandler, environment: dict, layer_path: Path) -> str: # TODO: Get rid of this ugly workaround - credentials_env_vars = { - "AWS_SHARED_CREDENTIALS_FILE": str(paths.aws_credentials_file), - "AWS_CONFIG_FILE": str(paths.aws_config_file), - } try: - tfrunner = TFRunner(binary=paths.tf_binary, env_vars=credentials_env_vars) + tfrunner = TFRunner(binary=paths.tf_binary, env_vars=environment) except ExitError as e: try: - tfrunner = TFRunner(binary=paths.tf_binary, terraform=True, env_vars=credentials_env_vars) + tfrunner = TFRunner(binary=paths.tf_binary, terraform=True, env_vars=environment) except ExitError: - raise ExitError(1, "Could not locate TF binary.") + raise ExitError(e.exit_code, f"Could not locate TF binary.") refresh_kubectl_credentials() exit_code, output, error = tfrunner.exec("output", "-no-color", working_dir=layer_path) @@ -128,9 +123,11 @@ def _get_eks_kube_config(paths: PathsHandler, layer_path: Path) -> str: @kubectl.command(context_settings=CONTEXT_SETTINGS) @pass_paths -def configure(paths: PathsHandler): +@pass_environment +def configure(environment: dict, paths: PathsHandler): """Automatically add the EKS cluster from the layer into your kubectl config file.""" - _configure(layer_path=paths.cwd) + paths.check_for_cluster_layer() + _configure(environment, layer_path=paths.cwd) def _scan_clusters(paths: PathsHandler): @@ -160,7 +157,8 @@ def _scan_clusters(paths: PathsHandler): @kubectl.command(context_settings=CONTEXT_SETTINGS) @pass_paths -def discover(paths: PathsHandler): +@pass_environment +def discover(environment: dict, paths: PathsHandler): """ Do a scan down the tree of subdirectories looking for k8s clusters metadata files. Open up a menu with all the found items, where you can pick up and configure it on your .kubeconfig file. @@ -185,4 +183,4 @@ def discover(paths: PathsHandler): region=cluster_data["data"]["region"], ) - _configure(cluster_info, layer_path) + _configure(environment, cluster_info, layer_path) diff --git a/leverage/modules/tf.py b/leverage/modules/tf.py index 7439a3f..5848c26 100644 --- a/leverage/modules/tf.py +++ b/leverage/modules/tf.py @@ -28,11 +28,7 @@ def tofu(state): counterparts in the container. For example as in `leverage tofu apply -auto-approve` or `leverage tofu init -reconfigure` """ - credentials_env_vars = { - "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), - "AWS_CONFIG_FILE": str(state.paths.aws_config_file), - } - state.runner = TFRunner(binary=state.paths.tf_binary, env_vars=credentials_env_vars) + state.runner = TFRunner(binary=state.paths.tf_binary, env_vars=state.environment) @click.group() @@ -44,11 +40,7 @@ def terraform(state): counterparts in the container. For example as in `leverage terraform apply -auto-approve` or `leverage terraform init -reconfigure` """ - credentials_env_vars = { - "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), - "AWS_CONFIG_FILE": str(state.paths.aws_config_file), - } - state.runner = TFRunner(binary=state.paths.tf_binary, terraform=True, env_vars=credentials_env_vars) + state.runner = TFRunner(binary=state.paths.tf_binary, terraform=True, env_vars=state.environment) CONTEXT_SETTINGS = {"ignore_unknown_options": True} diff --git a/leverage/modules/tfautomv.py b/leverage/modules/tfautomv.py index 7f0eb6e..9cc03ee 100644 --- a/leverage/modules/tfautomv.py +++ b/leverage/modules/tfautomv.py @@ -13,19 +13,16 @@ def tfautomv(state, args): """Run TFAutomv commands in the context of the current project.`""" tf_default_args_string = " ".join(tf_default_args()) - credentials_env_vars = { - "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), - "AWS_CONFIG_FILE": str(state.paths.aws_config_file), - "TF_CLI_ARGS_init": tf_default_args_string, - "TF_CLI_ARGS_plan": tf_default_args_string, - } + state.environment["TF_CLI_ARGS_init"] = tf_default_args_string + state.environment["TF_CLI_ARGS_plan"] = tf_default_args_string + state.runner = Runner( binary="tfautomv", error_message=( f"TFAutomv not found on system. " f"Please install it following the instructions at: https://github.com/busser/tfautomv?tab=readme-ov-file#installation" ), - env_vars=credentials_env_vars, + env_vars=state.environment, ) tf_binary = "tofu" if not state.paths.tf_binary else state.paths.tf_binary From 07e521d8f69298cc9e4b426048ef36fe5844a1d3 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Sat, 20 Dec 2025 13:02:47 -0300 Subject: [PATCH 37/46] Make sure no extra arguments are accepted --- leverage/modules/kubectl.py | 8 +++----- leverage/modules/utils.py | 19 ++++++++++--------- 2 files changed, 13 insertions(+), 14 deletions(-) diff --git a/leverage/modules/kubectl.py b/leverage/modules/kubectl.py index 2229869..0867f47 100644 --- a/leverage/modules/kubectl.py +++ b/leverage/modules/kubectl.py @@ -29,8 +29,6 @@ class MetadataTypes(Enum): K8S_CLUSTER = "k8s-eks-cluster" -CONTEXT_SETTINGS = {"ignore_unknown_options": True} - METADATA_FILENAME = "metadata.yaml" @@ -121,7 +119,7 @@ def _get_eks_kube_config(paths: PathsHandler, environment: dict, layer_path: Pat return aws_eks_cmd + f" --region {region}" -@kubectl.command(context_settings=CONTEXT_SETTINGS) +@kubectl.command() @pass_paths @pass_environment def configure(environment: dict, paths: PathsHandler): @@ -134,7 +132,7 @@ def _scan_clusters(paths: PathsHandler): """ Scan all the subdirectories in search of "cluster" metadata files. """ - for root, dirs, files in os.walk(paths.paths.cwd): + for root, dirs, files in os.walk(paths.cwd): # exclude hidden directories dirs[:] = [d for d in dirs if d[0] != "."] @@ -155,7 +153,7 @@ def _scan_clusters(paths: PathsHandler): yield Path(root), data -@kubectl.command(context_settings=CONTEXT_SETTINGS) +@kubectl.command() @pass_paths @pass_environment def discover(environment: dict, paths: PathsHandler): diff --git a/leverage/modules/utils.py b/leverage/modules/utils.py index 5e894a0..8f60779 100644 --- a/leverage/modules/utils.py +++ b/leverage/modules/utils.py @@ -34,13 +34,14 @@ def _handle_subcommand( # Run the command directly if pre_invocation_callback: pre_invocation_callback() - if exit_code := runner.run(*args): - raise Exit(exit_code) - + exit_code = runner.run(*args) + raise Exit(exit_code) + + subcommand = context.command.commands.get(subcommand) + # Check that the subcommand arguments are valid + subcommand.make_context(info_name=subcommand.name, args=list(args)[args.index(subcommand.name) + 1:], parent=context) + # Invoke wrapped command + if not subcommand.params: + context.invoke(subcommand) else: - # Invoke wrapped command - subcommand = context.command.commands.get(subcommand) - if not subcommand.params: - context.invoke(subcommand) - else: - context.forward(subcommand) + context.forward(subcommand) From 9a28de85e07ffbde16f2fe4c117d4e566a312446 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Tue, 6 Jan 2026 15:51:21 -0300 Subject: [PATCH 38/46] Fix typo --- leverage/modules/auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/leverage/modules/auth.py b/leverage/modules/auth.py index 23964ff..b2f8df3 100644 --- a/leverage/modules/auth.py +++ b/leverage/modules/auth.py @@ -24,7 +24,7 @@ def get_layer_profile(raw_profile: str, config_updater: ConfigUpdater, tf_profil raise SkipProfile # if it is exactly that variable, we already know the layer profile is tf_profile - layer_profile = tf_profile if raw_profile in ("${var.profile}", "each.value.profile") else None + layer_profile = tf_profile if raw_profile in ("var.profile", "each.value.profile") else None # replace variables with their corresponding values raw = ( From b3367080ff0d950691d56f1af3e3f18f6572158f Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Tue, 6 Jan 2026 15:53:03 -0300 Subject: [PATCH 39/46] Fix small issues in credentials module --- leverage/modules/credentials.py | 27 ++++++++++++--------------- 1 file changed, 12 insertions(+), 15 deletions(-) diff --git a/leverage/modules/credentials.py b/leverage/modules/credentials.py index 9b53bc5..140c2ac 100644 --- a/leverage/modules/credentials.py +++ b/leverage/modules/credentials.py @@ -2,9 +2,10 @@ Credentials managing module. """ +import re import csv import json -import re +import shutil from pathlib import Path from functools import wraps from typing import Optional, Union @@ -287,17 +288,13 @@ def credentials(state): else: logger.info("Reading info from build.env") - credentials_env_vars = { - "AWS_SHARED_CREDENTIALS_FILE": str(state.paths.aws_credentials_file), - "AWS_CONFIG_FILE": str(state.paths.aws_config_file), - } state.runner = Runner( binary="aws", error_message=( f"AWS CLI not found on system. " f"Please install it following the instructions at: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html" ), - env_vars=credentials_env_vars, + env_vars=state.environment, ) @@ -375,7 +372,7 @@ def _profile_is_configured(awscli: Runner, profile: str): Returns: bool: Whether the profile was already configured or not. """ - exit_code, _ = awscli.exec("configure", "list", "--profile", profile) + exit_code, _, _ = awscli.exec("configure", "list", "--profile", profile) return not exit_code @@ -444,12 +441,12 @@ def configure_credentials( if make_backup: logger.info("Backing up credentials file.") - Path(paths.aws_credentials_file).copy(Path(paths.aws_credentials_file).with_suffix(".bkp")) + shutil.copy(paths.aws_credentials_file, paths.aws_credentials_file.with_suffix(".bkp")) values = {"aws_access_key_id": key_id, "aws_secret_access_key": secret_key} for key, value in values.items(): - exit_code, output = awscli.exec("configure", "set", key, value, "--profile", profile) + exit_code, output, _ = awscli.exec("configure", "set", key, value, "--profile", profile) if exit_code: raise ExitError(exit_code, f"AWS CLI error: {output}") @@ -470,7 +467,7 @@ def _credentials_are_valid(awscli: Runner, profile: str): Returns: bool: Whether the credentials are valid. """ - error_code, output = awscli.exec("sts", "get-caller-identity", "--profile", profile) + error_code, output, _ = awscli.exec("sts", "get-caller-identity", "--profile", profile) return error_code != 255 and "InvalidClientTokenId" not in output @@ -485,7 +482,7 @@ def _get_management_account_id(awscli: Runner, profile: str): Returns: str: Management account id. """ - exit_code, caller_identity = awscli.exec("sts", "get-caller-identity", "--output", "json", "--profile", profile) + exit_code, caller_identity, _ = awscli.exec("sts", "get-caller-identity", "--output", "json", "--profile", profile) if exit_code: raise ExitError(exit_code, f"AWS CLI error: {caller_identity}") @@ -504,7 +501,7 @@ def _get_organization_accounts(awscli: Runner, profile: str, project_name: str): Returns: dict: Mapping of organization accounts names to ids. """ - exit_code, organization_accounts = awscli.exec( + exit_code, organization_accounts, _ = awscli.exec( "organizations", "list-accounts", "--output", "json", "--profile", profile ) @@ -533,7 +530,7 @@ def _get_mfa_serial(awscli: Runner, profile: str): Returns: str: MFA device serial. """ - exit_code, mfa_devices = awscli.exec("iam", "list-mfa-devices", "--output", "json", "--profile", profile) + exit_code, mfa_devices, _ = awscli.exec("iam", "list-mfa-devices", "--output", "json", "--profile", profile) if exit_code: raise ExitError(exit_code, f"AWS CLI error: {mfa_devices}") mfa_devices = json.loads(mfa_devices) @@ -561,7 +558,7 @@ def configure_profile(awscli: Runner, profile: str, values: dict): """ logger.info(f"\tConfiguring profile [bold]{profile}[/bold]") for key, value in values.items(): - exit_code, output = awscli.exec("configure", "set", key, value, "--profile", profile) + exit_code, output, _ = awscli.exec("configure", "set", key, value, "--profile", profile) if exit_code: raise ExitError(exit_code, f"AWS CLI error: {output}") @@ -617,7 +614,7 @@ def configure_accounts_profiles( account_profiles[f"{short_name}-{account_name}-{PROFILES[_type]['profile_role']}"] = account_profile logger.info("Backing up account profiles file.") - Path(paths.aws_config_file).copy(Path(paths.aws_config_file).with_suffix(".bkp")) + shutil.copy(paths.aws_config_file, paths.aws_config_file.with_suffix(".bkp")) for profile_identifier, profile_values in account_profiles.items(): configure_profile(profile_identifier, profile_values) From e84f874e973d23e451f09c542e55dbb7abb2447f Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Tue, 6 Jan 2026 16:02:51 -0300 Subject: [PATCH 40/46] Simplify error handling when running commands --- leverage/modules/runner.py | 21 +++++++++++---------- leverage/modules/tf.py | 25 ++++++++----------------- leverage/modules/tfrunner.py | 16 +++++++++------- 3 files changed, 28 insertions(+), 34 deletions(-) diff --git a/leverage/modules/runner.py b/leverage/modules/runner.py index 4c9da9c..b579315 100644 --- a/leverage/modules/runner.py +++ b/leverage/modules/runner.py @@ -64,6 +64,7 @@ def run( env_vars: Optional[Dict[str, str]] = None, working_dir: Optional[Path] = None, interactive: bool = True, + raises: bool = False, ) -> Union[int, Tuple[int, str, str]]: """ Execute command with the binary. @@ -73,6 +74,7 @@ def run( env_vars: Environment variables to set during execution (overrides instance env_vars) working_dir: Working directory for command execution interactive: If True, run interactively. If False, capture output + raises: If True, raise an ExitError if the command fails Returns: If interactive=True: Exit code (int) @@ -93,17 +95,15 @@ def run( logger.debug(f"Working directory: {working_dir or Path.cwd()}") logger.debug(f"Additional environment variables: {merged_env_vars}") - if interactive: - # Interactive execution - process = subprocess.run(command, env=env, cwd=working_dir) - return process.returncode - else: - # Silent execution with output capture - process = subprocess.run(command, env=env, cwd=working_dir, capture_output=True, text=True) - return process.returncode, process.stdout.strip(), process.stderr.strip() + process = subprocess.run(command, env=env, cwd=working_dir, capture_output=not interactive, text=not interactive) + + if raises and not interactive and process.returncode: + raise ExitError(process.returncode, f"Command execution failed: {process.stderr.strip()}") + + return process.returncode if interactive else (process.returncode, process.stdout.strip(), process.stderr.strip()) def exec( - self, *args: str, env_vars: Optional[Dict[str, str]] = None, working_dir: Optional[Path] = None + self, *args: str, env_vars: Optional[Dict[str, str]] = None, working_dir: Optional[Path] = None, raises: bool = True ) -> Tuple[int, str, str]: """ Execute command with the binary in non-interactive mode (captures output). @@ -114,11 +114,12 @@ def exec( *args: Command arguments to pass to the binary env_vars: Environment variables to set during execution (overrides instance env_vars) working_dir: Working directory for command execution + raises: If True, raise an ExitError if the command fails. If False, return the exit code. Returns: Tuple of (exit_code, stdout, stderr) """ - return self.run(*args, env_vars=env_vars, working_dir=working_dir, interactive=False) + return self.run(*args, env_vars=env_vars, working_dir=working_dir, interactive=False, raises=raises) def __repr__(self): return f"Runner(binary_input='{self.binary_input}', binary_path='{self.binary_path}')" diff --git a/leverage/modules/tf.py b/leverage/modules/tf.py index 5848c26..3a7124b 100644 --- a/leverage/modules/tf.py +++ b/leverage/modules/tf.py @@ -11,7 +11,6 @@ from leverage._utils import ExitError, parse_tf_file from leverage._internals import pass_paths, pass_runner, pass_state from leverage._backend_config import get_backend_key, set_backend_key -from leverage.modules.auth import refresh_layer_credentials, check_sso_token REGION = r"(global|([a-z]{2}(-gov)?)-(central|(north|south)?(east|west)?)-\d)" @@ -168,8 +167,7 @@ def force_unlock(tf, paths: PathsHandler, lock_id): """Force unlock the state file.""" check_sso_token(paths) refresh_layer_credentials(paths) - if exit_code := tf.run("force-unlock", lock_id): - raise Exit(exit_code) + tf.run("force-unlock", lock_id) @click.command() @@ -179,8 +177,7 @@ def validate(tf, paths: PathsHandler): """Validate code of the current directory. Previous initialization might be needed.""" check_sso_token(paths) refresh_layer_credentials(paths) - if exit_code := tf.run("validate", *tf_default_args()): - raise Exit(exit_code) + tf.run("validate", *tf_default_args()) @click.command("validate-layout") @@ -196,8 +193,7 @@ def validate_layout(paths): @pass_runner def _import(tf, address, _id): """Import a resource.""" - if exit_code := tf.run("import", *tf_default_args(), address, _id): - raise Exit(exit_code) + tf.run("import", *tf_default_args(), address, _id) @click.command("refresh-credentials") @@ -330,8 +326,7 @@ def _init(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: P check_sso_token(paths) refresh_layer_credentials(paths) - if exit_code := tf.run("init", *init_args, working_dir=working_dir): - raise Exit(exit_code) + tf.run("init", *init_args, working_dir=working_dir) @pass_paths @@ -341,8 +336,7 @@ def _plan(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: P check_sso_token(paths) refresh_layer_credentials(paths) - if exit_code := tf.run("plan", *tf_default_args(), *args, working_dir=working_dir): - raise Exit(exit_code) + tf.run("plan", *tf_default_args(), *args, working_dir=working_dir) def has_a_plan_file(args: Sequence[str]) -> bool: @@ -409,8 +403,7 @@ def _apply(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: check_sso_token(paths) refresh_layer_credentials(paths) - if exit_code := tf.run("apply", *default_args, *args, working_dir=working_dir): - raise Exit(exit_code) + tf.run("apply", *default_args, *args, working_dir=working_dir) @pass_paths @@ -420,8 +413,7 @@ def _output(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: check_sso_token(paths) refresh_layer_credentials(paths) - if exit_code := tf.run("output", *args, working_dir=working_dir): - raise Exit(exit_code) + tf.run("output", *args, working_dir=working_dir) @pass_paths @@ -431,8 +423,7 @@ def _destroy(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir check_sso_token(paths) refresh_layer_credentials(paths) - if exit_code := tf.run("destroy", *tf_default_args(), *args, working_dir=working_dir): - raise Exit(exit_code) + tf.run("destroy", *tf_default_args(), *args, working_dir=working_dir) # ########################################################################### diff --git a/leverage/modules/tfrunner.py b/leverage/modules/tfrunner.py index 351a43a..a46e38e 100644 --- a/leverage/modules/tfrunner.py +++ b/leverage/modules/tfrunner.py @@ -2,6 +2,8 @@ from pathlib import Path from typing import Dict, Optional +from click.exceptions import Exit + from leverage._utils import ExitError from leverage.modules.runner import Runner @@ -52,7 +54,7 @@ def run( *args: str, env_vars: Optional[Dict[str, str]] = None, working_dir: Optional[Path] = None, - interactive: bool = True, + raises: bool = True, ): """ Run the Terraform/OpenTofu binary with the given arguments. @@ -61,15 +63,14 @@ def run( *args: Command and arguments to pass (e.g., 'plan', '-out=plan.tfplan') env_vars: Environment variables for this specific execution working_dir: Working directory for command execution - interactive: If True, run interactively. If False, capture output + raises: If True, raise an ExitError if the command fails. If False, return the exit code. Returns: - If interactive=True: Exit code (int) - If interactive=False: Tuple of (exit_code, stdout, stderr) + Exit code (int) """ - return super().run(*args, env_vars=env_vars, working_dir=working_dir, interactive=interactive) + return super().run(*args, env_vars=env_vars, working_dir=working_dir, raises=raises) - def exec(self, *args: str, env_vars: Optional[Dict[str, str]] = None, working_dir: Optional[Path] = None): + def exec(self, *args: str, env_vars: Optional[Dict[str, str]] = None, working_dir: Optional[Path] = None, raises: bool = False): """ Execute the Terraform/OpenTofu binary in non-interactive mode (captures output). @@ -79,8 +80,9 @@ def exec(self, *args: str, env_vars: Optional[Dict[str, str]] = None, working_di *args: Command and arguments to pass (e.g., 'plan', '-out=plan.tfplan') env_vars: Environment variables for this specific execution working_dir: Working directory for command execution + raises: If True, raise an ExitError if the command fails. If False, return the exit code. Returns: Tuple of (exit_code, stdout, stderr) """ - return self.run(*args, env_vars=env_vars, working_dir=working_dir, interactive=False) + return super().run(*args, env_vars=env_vars, working_dir=working_dir, interactive=False, raises=raises) From 7e44cf6dfaf6c8c0a3e92ee6cf4660fb140498f9 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Tue, 6 Jan 2026 16:03:33 -0300 Subject: [PATCH 41/46] Implement authentication with MFA --- leverage/modules/auth.py | 170 ++++++++++++++++++++++++++++++++++- leverage/modules/aws.py | 21 +---- leverage/modules/kubectl.py | 28 ++---- leverage/modules/tf.py | 30 ++----- leverage/modules/tfautomv.py | 6 +- leverage/path.py | 22 ++--- 6 files changed, 202 insertions(+), 75 deletions(-) diff --git a/leverage/modules/auth.py b/leverage/modules/auth.py index b2f8df3..43143c2 100644 --- a/leverage/modules/auth.py +++ b/leverage/modules/auth.py @@ -1,12 +1,16 @@ import time import json from pathlib import Path -from datetime import datetime +from datetime import datetime, timedelta +from functools import wraps from configparser import NoSectionError, NoOptionError import boto3 -from botocore.exceptions import ClientError +import click +from dateutil.tz import tzutc from configupdater import ConfigUpdater +from botocore.session import get_session +from botocore.exceptions import ClientError from leverage import logger from leverage.path import PathsHandler @@ -94,6 +98,49 @@ def get_sso_access_token(sso_token_file: Path) -> str: return json.loads(sso_token_file.read_text())["accessToken"] +def _perform_authentication(paths: PathsHandler): + """Perform authentication checks and credential refresh. + + This function contains the core authentication logic that checks for SSO or MFA + configuration and refreshes credentials accordingly. Only authenticates when + in a layer location. + + Args: + paths: PathsHandler instance containing project paths and configuration + """ + if paths.get_location_type() == "layer": + if paths.common_conf.get("sso_enabled", False): + check_sso_token(paths) + refresh_layer_credentials(paths) + elif paths.mfa_enabled: + refresh_layer_credentials_mfa(paths) + + +def authenticate(command): + """Decorator to require authentication before running a command. + + This decorator extracts the PathsHandler from the Click context and performs + authentication checks before executing the wrapped command. It handles both SSO + and MFA authentication based on the project configuration. + + Usage: + @click.command() + @authenticate + @pass_paths + @pass_runner + def some_command(tf: TFRunner, paths: PathsHandler, args): + # command logic + """ + @wraps(command) + def new_command(*args, **kwargs): + ctx = click.get_current_context() + paths = ctx.obj.paths + _perform_authentication(paths) + return command(*args, **kwargs) + + return new_command + + def check_sso_token(paths: PathsHandler): """Check for the existence and validity of the SSO token to be used to get credentials.""" @@ -203,3 +250,122 @@ def refresh_layer_credentials(paths: PathsHandler): }, ) logger.info(f"Credentials for {account_name} account written successfully.") + +def refresh_layer_credentials_mfa(paths: PathsHandler): + tf_profile, raw_profiles = get_profiles(paths) + config_updater = ConfigUpdater() + config_updater.read(paths.aws_config_file) + + # Create STS client with source profile credentials + session = get_session() + session.set_config_variable("credentials_file", paths.aws_credentials_file.as_posix()) + session.set_config_variable("config_file", paths.aws_config_file.as_posix()) + + for raw_profile in raw_profiles: + if "local." in raw_profile: + # ignore values referencing to local variables + # we will search for profiles directly in locals.tf instead + continue + + # if it is exactly that variable, we already know the layer profile is tf_profile + layer_profile = tf_profile if raw_profile in ("${var.profile}", "each.value.profile") else None + + # replace variables with their corresponding values + profile_name = raw_profile.replace("${var.profile}", tf_profile).replace("${var.project}", paths.project).replace("each.value.profile", tf_profile) + + # if layer_profile wasn't set, use profile_name + if layer_profile is None: + layer_profile = profile_name + + logger.info(f"Attempting to get temporary credentials for {profile_name} profile.") + if profile := config_updater.get_section(f"profile {profile_name}"): + role_arn = profile.get("role_arn").value + mfa_serial = profile.get("mfa_serial").value + source_profile = profile.get("source_profile").value + else: + raise ExitError( + 40, + f"Credentials for profile {profile_name} have not been properly configured. Please check your configuration.\n" + f"Check the following link for possible solutions: https://leverage.binbash.co/user-guide/troubleshooting/credentials/") + + cache_file = paths.aws_cache_dir / profile_name + if cache_file.exists(): + logger.debug(f"Found cached credentials in {cache_file}.") + cached_credentials = json.loads(cache_file.read_text()) + + expiration = datetime.strptime(cached_credentials.get("Expiration"), "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=tzutc()) + renewal = datetime.now(tzutc()) + timedelta(seconds=(30 * 60)) + if renewal < expiration: + logger.info("Using cached credentials.") + continue + + else: + logger.debug("No cached credentials found.") + + client_session = boto3.Session( + botocore_session=session, + profile_name=source_profile + ) + client = client_session.client("sts") + credentials = None + for _ in range(3): + try: + mfa_token_code = click.prompt("Enter MFA token code", type=str) + except click.exceptions.Abort: + raise ExitError(1, "Aborted by user.") + + try: + logger.debug(f"Assuming role {role_arn} for {profile_name} profile with serial {mfa_serial} and token code {mfa_token_code}") + credentials = client.assume_role( + RoleArn=role_arn, + SourceIdentity=source_profile, + RoleSessionName=f"leverage-{profile_name}", + SerialNumber=mfa_serial, + TokenCode=mfa_token_code, + ) + credentials = credentials["Credentials"] + credentials["Expiration"] = credentials["Expiration"].strftime("%Y-%m-%dT%H:%M:%SZ") + cache_file.write_text(json.dumps(credentials)) + break + + except ClientError as error: + if "invalid MFA" in error.response["Error"]["Message"]: + logger.error("Unable to get valid credentials. Please try again.") + continue + elif error.response["Error"]["Code"] == "AccessDeniedException": + raise ExitError( + 40, + f"User does not have permission to assume role [bold]{role_arn}[/bold]" + " in this account.\nPlease check with your administrator or try" + " checking your credentials configuration.", + ) + elif error.response["Error"]["Code"] == "ExpiredToken": + logger.error("Token has expired. Please try again.") + continue + elif error.response["Error"]["Code"] == "ValidationError" and "Invalid length for parameter TokenCode" in error.response["Error"]["Message"]: + logger.error("Invalid token length, it must be 6 digits long. Please try again.") + continue + elif "An error occurred" in error.response["Error"]["Message"]: + raise ExitError( + 50, + f"Error assuming role: {error}" + ) + + if credentials is None: + raise ExitError(60, "Failed to get credentials after 3 attempts. Please try again later.") + + # write credentials on aws//credentials (create the file if it doesn't exist first) + paths.aws_credentials_file.touch(exist_ok=True) + credentials_updater = ConfigUpdater() + credentials_updater.read(paths.aws_credentials_file) + + update_config_section( + credentials_updater, + layer_profile, + data={ + "aws_access_key_id": credentials["AccessKeyId"], + "aws_secret_access_key": credentials["SecretAccessKey"], + "aws_session_token": credentials["SessionToken"], + }, + ) + logger.info(f"Credentials written successfully.") diff --git a/leverage/modules/aws.py b/leverage/modules/aws.py index 7cb416e..bb951d5 100644 --- a/leverage/modules/aws.py +++ b/leverage/modules/aws.py @@ -15,7 +15,8 @@ from leverage.modules.utils import _handle_subcommand from leverage._utils import get_or_create_section, ExitError from leverage._internals import pass_state, pass_runner, pass_paths -from leverage.modules.auth import get_sso_access_token, check_sso_token, refresh_layer_credentials +from leverage.modules.auth import get_sso_access_token +from leverage.modules.auth import _perform_authentication as perform_authentication CONTEXT_SETTINGS = {"ignore_unknown_options": True} @@ -24,21 +25,6 @@ AWS_SSO_LOGIN_URL = "{sso_url}/#/device?user_code={user_code}" -@pass_paths -def refresh_aws_credentials(paths: PathsHandler) -> None: - """ - Refresh the AWS credentials for the current project. - """ - check_sso_token(paths) - - try: # if we are not in a layer, we don't need to refresh the credentials - paths.check_for_layer_location() - except ExitError: - return - - refresh_layer_credentials(paths) - - def get_account_roles(sso_client: Any, access_token: str) -> Dict[str, Dict[str, str]]: """ Fetch the accounts and roles from the user. @@ -125,7 +111,8 @@ def aws(context: click.Context, state: Any, args: Tuple[str, ...]) -> None: env_vars=state.environment, ) - _handle_subcommand(context=context, runner=state.runner, args=args, pre_invocation_callback=refresh_aws_credentials) + authenticate = pass_paths(lambda paths: perform_authentication(paths)) + _handle_subcommand(context=context, runner=state.runner, args=args, pre_invocation_callback=authenticate) @aws.group(invoke_without_command=True, add_help_option=False, context_settings=CONTEXT_SETTINGS) diff --git a/leverage/modules/kubectl.py b/leverage/modules/kubectl.py index 0867f47..9600a7c 100644 --- a/leverage/modules/kubectl.py +++ b/leverage/modules/kubectl.py @@ -14,7 +14,7 @@ from leverage.modules.runner import Runner from leverage.modules.tfrunner import TFRunner from leverage.modules.utils import _handle_subcommand -from leverage.modules.auth import check_sso_token, refresh_layer_credentials +from leverage.modules.auth import _perform_authentication as perform_authentication from leverage._internals import pass_state, pass_paths, pass_environment @@ -32,21 +32,6 @@ class MetadataTypes(Enum): METADATA_FILENAME = "metadata.yaml" -@pass_paths -def refresh_kubectl_credentials(paths: PathsHandler) -> None: - """ - Refresh the AWS credentials for the current project to be used by kubectl. - """ - check_sso_token(paths) - - try: # if we are not in a layer, we don't need to refresh the credentials - paths.check_for_layer_location() - except ExitError: - return - - refresh_layer_credentials(paths) - - @click.group(invoke_without_command=True, context_settings={"ignore_unknown_options": True}) @click.argument("args", nargs=-1, type=click.UNPROCESSED) @pass_state @@ -67,8 +52,9 @@ def kubectl(context, state, args): env_vars=state.environment, ) + authenticate = pass_paths(lambda paths: perform_authentication(paths)) _handle_subcommand( - context=context, runner=state.runner, args=args, pre_invocation_callback=refresh_kubectl_credentials + context=context, runner=state.runner, args=args, pre_invocation_callback=authenticate ) @@ -106,7 +92,7 @@ def _get_eks_kube_config(paths: PathsHandler, environment: dict, layer_path: Pat except ExitError: raise ExitError(e.exit_code, f"Could not locate TF binary.") - refresh_kubectl_credentials() + perform_authentication(paths) exit_code, output, error = tfrunner.exec("output", "-no-color", working_dir=layer_path) if exit_code: raise ExitError(exit_code, f"Failed to get EKS kube config: {error}") @@ -128,11 +114,11 @@ def configure(environment: dict, paths: PathsHandler): _configure(environment, layer_path=paths.cwd) -def _scan_clusters(paths: PathsHandler): +def _scan_clusters(cwd: Path): """ Scan all the subdirectories in search of "cluster" metadata files. """ - for root, dirs, files in os.walk(paths.cwd): + for root, dirs, files in os.walk(cwd): # exclude hidden directories dirs[:] = [d for d in dirs if d[0] != "."] @@ -161,7 +147,7 @@ def discover(environment: dict, paths: PathsHandler): Do a scan down the tree of subdirectories looking for k8s clusters metadata files. Open up a menu with all the found items, where you can pick up and configure it on your .kubeconfig file. """ - cluster_files = [(path, data) for path, data in _scan_clusters(paths)] + cluster_files = [(path, data) for path, data in _scan_clusters(paths.cwd)] if not cluster_files: raise ExitError(1, "No clusters found.") diff --git a/leverage/modules/tf.py b/leverage/modules/tf.py index 3a7124b..06f3d94 100644 --- a/leverage/modules/tf.py +++ b/leverage/modules/tf.py @@ -11,6 +11,7 @@ from leverage._utils import ExitError, parse_tf_file from leverage._internals import pass_paths, pass_runner, pass_state from leverage._backend_config import get_backend_key, set_backend_key +from leverage.modules.auth import authenticate REGION = r"(global|([a-z]{2}(-gov)?)-(central|(north|south)?(east|west)?)-\d)" @@ -161,22 +162,20 @@ def _format(tf, args): @click.command("force-unlock") @click.argument("lock_id", metavar="LOCK_ID") +@authenticate @pass_paths @pass_runner def force_unlock(tf, paths: PathsHandler, lock_id): """Force unlock the state file.""" - check_sso_token(paths) - refresh_layer_credentials(paths) tf.run("force-unlock", lock_id) @click.command() +@authenticate @pass_paths @pass_runner def validate(tf, paths: PathsHandler): """Validate code of the current directory. Previous initialization might be needed.""" - check_sso_token(paths) - refresh_layer_credentials(paths) tf.run("validate", *tf_default_args()) @@ -197,12 +196,11 @@ def _import(tf, address, _id): @click.command("refresh-credentials") +@authenticate @pass_paths def refresh_credentials(paths): """Refresh the AWS credentials used on the current layer.""" paths.check_for_layer_location() - check_sso_token(paths) - refresh_layer_credentials(paths) # ########################################################################### @@ -311,6 +309,7 @@ def validate_for_all_commands(layer, skip_validation=False): # ########################################################################### # BASE COMMAND EXECUTORS # ########################################################################### +@authenticate @pass_paths @pass_runner def _init(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: Path): @@ -323,19 +322,14 @@ def _init(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: P ) init_args = (*filtered_args, f"-backend-config={paths.backend_tfvars}") - check_sso_token(paths) - refresh_layer_credentials(paths) - tf.run("init", *init_args, working_dir=working_dir) +@authenticate @pass_paths @pass_runner def _plan(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: Path): """Generate an execution plan for this layer.""" - check_sso_token(paths) - refresh_layer_credentials(paths) - tf.run("plan", *tf_default_args(), *args, working_dir=working_dir) @@ -393,6 +387,7 @@ def has_a_plan_file(args: Sequence[str]) -> bool: return True +@authenticate @pass_paths @pass_runner def _apply(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: Path): @@ -400,29 +395,22 @@ def _apply(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: default_args = () if has_a_plan_file(args) else tf_default_args() logger.debug(f"Default args passed to apply command: {default_args}") - check_sso_token(paths) - refresh_layer_credentials(paths) - tf.run("apply", *default_args, *args, working_dir=working_dir) +@authenticate @pass_paths @pass_runner def _output(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: Path): """Show all output variables of this layer.""" - check_sso_token(paths) - refresh_layer_credentials(paths) - tf.run("output", *args, working_dir=working_dir) +@authenticate @pass_paths @pass_runner def _destroy(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: Path): """Destroy infrastructure in this layer.""" - check_sso_token(paths) - refresh_layer_credentials(paths) - tf.run("destroy", *tf_default_args(), *args, working_dir=working_dir) diff --git a/leverage/modules/tfautomv.py b/leverage/modules/tfautomv.py index 9cc03ee..9f67d51 100644 --- a/leverage/modules/tfautomv.py +++ b/leverage/modules/tfautomv.py @@ -4,11 +4,12 @@ from leverage._internals import pass_state from leverage.modules.runner import Runner from leverage.modules.tf import tf_default_args -from leverage.modules.auth import check_sso_token, refresh_layer_credentials +from leverage.modules.auth import authenticate @click.command() @click.argument("args", nargs=-1) +@authenticate @pass_state def tfautomv(state, args): """Run TFAutomv commands in the context of the current project.`""" @@ -33,8 +34,5 @@ def tfautomv(state, args): ) tfautomv_args = (*filtered_args, f"--terraform-bin={tf_binary}") - check_sso_token(state.paths) - refresh_layer_credentials(state.paths) - if exit_code := state.runner.run(*tfautomv_args): raise Exit(exit_code) diff --git a/leverage/path.py b/leverage/path.py index 6f75935..a9a48b6 100644 --- a/leverage/path.py +++ b/leverage/path.py @@ -129,7 +129,7 @@ def get_build_script_path(filename="build.py"): cur_path = cur_path.parent -class PathsHandler: +class PathsHandler: # TODO: Turn this class into a something that represents a leverage project COMMON_TF_VARS = "common.tfvars" ACCOUNT_TF_VARS = "account.tfvars" BACKEND_TF_VARS = "backend.tfvars" @@ -153,6 +153,12 @@ def __init__(self, env_conf: dict): account_config = self.account_config_dir / self.ACCOUNT_TF_VARS self.account_conf = hcl2.loads(account_config.read_text()) if account_config.exists() else {} + backend_config = self.account_config_dir / self.BACKEND_TF_VARS + self.backend_conf = hcl2.loads(backend_config.read_text()) if backend_config.exists() else {} + + # Get MFA enabled status + self.mfa_enabled = env_conf.get("MFA_ENABLED", "false") + # Get project name self.project = self.common_conf.get("project", env_conf.get("PROJECT", False)) if not self.project: @@ -175,14 +181,6 @@ def __init__(self, env_conf: dict): else: self.tf_binary = tf_binary - def update_cwd(self, new_cwd): - self.cwd = new_cwd - acc_folder = new_cwd.relative_to(self.root_dir).parts[0] - - self.account_config_dir = self.root_dir / acc_folder / "config" - account_config_path = self.account_config_dir / self.ACCOUNT_TF_VARS - self.account_conf = hcl2.loads(account_config_path.read_text()) - @property def common_tfvars(self): return f"{self.root_dir}/config/{self.COMMON_TF_VARS}" @@ -202,6 +200,10 @@ def aws_config_file(self): @property def aws_credentials_file(self): return self.aws_credentials_dir / "credentials" + + @property + def aws_cache_dir(self): + return self.aws_credentials_dir / "cache" @property def sso_token_file(self): @@ -243,7 +245,7 @@ def check_for_layer_location(self, path: Path = None): if path in (self.root_dir, self.account_dir): raise ExitError( 1, - "This command cannot run neither in the root of the project or in" " the root directory of an account.", + "This command cannot run neither in the root of the project or in the root directory of an account.", ) if not list(path.glob("*.tf")): From 33bd30ca7ea83c96ef1672e6e06f9ad14662988a Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Tue, 6 Jan 2026 16:05:46 -0300 Subject: [PATCH 42/46] Simplify error handling when running commands --- tests/test_modules/test_tfrunner.py | 32 +++++++++++------------------ 1 file changed, 12 insertions(+), 20 deletions(-) diff --git a/tests/test_modules/test_tfrunner.py b/tests/test_modules/test_tfrunner.py index 57d43e8..fa54546 100644 --- a/tests/test_modules/test_tfrunner.py +++ b/tests/test_modules/test_tfrunner.py @@ -134,6 +134,8 @@ def test_run_without_env_vars(mocker): assert mock_subprocess.call_args[0][0] == ["/usr/bin/tofu", "plan", "-out=plan.tfplan"] assert mock_subprocess.call_args[1]["env"] == os.environ.copy() assert mock_subprocess.call_args[1]["cwd"] is None + assert mock_subprocess.call_args[1]["capture_output"] is False + assert mock_subprocess.call_args[1]["text"] is False def test_run_with_instance_env_vars_only(mocker): @@ -154,6 +156,8 @@ def test_run_with_instance_env_vars_only(mocker): assert mock_subprocess.call_args[0][0] == ["/usr/bin/tofu", "apply", "-auto-approve"] assert mock_subprocess.call_args[1]["env"] == expected_env assert mock_subprocess.call_args[1]["cwd"] is None + assert mock_subprocess.call_args[1]["capture_output"] is False + assert mock_subprocess.call_args[1]["text"] is False def test_run_with_run_env_vars_only(mocker): @@ -174,6 +178,8 @@ def test_run_with_run_env_vars_only(mocker): assert mock_subprocess.call_args[0][0] == ["/usr/bin/tofu", "plan"] assert mock_subprocess.call_args[1]["env"] == expected_env assert mock_subprocess.call_args[1]["cwd"] is None + assert mock_subprocess.call_args[1]["capture_output"] is False + assert mock_subprocess.call_args[1]["text"] is False def test_run_merges_instance_and_run_env_vars(mocker): @@ -198,6 +204,8 @@ def test_run_merges_instance_and_run_env_vars(mocker): assert mock_subprocess.call_args[0][0] == ["/usr/bin/tofu", "apply"] assert mock_subprocess.call_args[1]["env"] == expected_env assert mock_subprocess.call_args[1]["cwd"] is None + assert mock_subprocess.call_args[1]["capture_output"] is False + assert mock_subprocess.call_args[1]["text"] is False def test_run_env_vars_override_instance_env_vars(mocker): @@ -221,26 +229,8 @@ def test_run_env_vars_override_instance_env_vars(mocker): assert mock_subprocess.call_args[0][0] == ["/usr/bin/tofu", "plan"] assert mock_subprocess.call_args[1]["env"] == expected_env assert mock_subprocess.call_args[1]["cwd"] is None - - -def test_run_interactive_false(mocker): - mocker.patch("shutil.which", return_value="/usr/bin/tofu") - mock_subprocess = mocker.patch("subprocess.run") - # First call for --version, second for the actual command - version_output = type("obj", (object,), {"stdout": "OpenTofu v1.6.0", "returncode": 0})() - run_output = type("obj", (object,), {"stdout": "terraform output", "stderr": "", "returncode": 0})() - mock_subprocess.side_effect = [version_output, run_output] - - runner = TFRunner(binary="") - exit_code, stdout, stderr = runner.run("output", "-json", interactive=False) - - assert exit_code == 0 - assert stdout == "terraform output" # Already stripped - assert stderr == "" - # Check the last call (the actual run) - assert mock_subprocess.call_args[0][0] == ["/usr/bin/tofu", "output", "-json"] - assert "capture_output" in mock_subprocess.call_args[1] - assert mock_subprocess.call_args[1]["capture_output"] is True + assert mock_subprocess.call_args[1]["capture_output"] is False + assert mock_subprocess.call_args[1]["text"] is False def test_run_with_multiple_args(mocker): @@ -263,6 +253,8 @@ def test_run_with_multiple_args(mocker): ] assert mock_subprocess.call_args[1]["env"] == os.environ.copy() assert mock_subprocess.call_args[1]["cwd"] is None + assert mock_subprocess.call_args[1]["capture_output"] is False + assert mock_subprocess.call_args[1]["text"] is False def test_run_preserves_instance_env_vars_across_multiple_calls(mocker): From fa516834654ab7f8014bed3af86c63bf977edce1 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Tue, 6 Jan 2026 16:07:00 -0300 Subject: [PATCH 43/46] Adapt tests to dockerless design --- tests/conftest.py | 185 +++++++++++++++++++++++++ tests/test_containers/__init__.py | 9 -- tests/test_containers/test_aws.py | 64 --------- tests/test_containers/test_kubectl.py | 164 ---------------------- tests/test_containers/test_leverage.py | 79 ----------- tests/test_containers/test_tf.py | 61 -------- tests/test_modules/test_kubectl.py | 47 +++++++ tests/test_modules/test_runner.py | 139 +++++++++++++++---- tests/test_modules/test_tf.py | 47 ++++--- tests/test_path.py | 8 +- 10 files changed, 376 insertions(+), 427 deletions(-) delete mode 100644 tests/test_containers/__init__.py delete mode 100644 tests/test_containers/test_aws.py delete mode 100644 tests/test_containers/test_kubectl.py delete mode 100644 tests/test_containers/test_leverage.py delete mode 100644 tests/test_containers/test_tf.py create mode 100644 tests/test_modules/test_kubectl.py diff --git a/tests/conftest.py b/tests/conftest.py index f699846..6d78b93 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,12 +1,17 @@ +import tempfile +import subprocess from pathlib import Path import pytest import click +from click.testing import CliRunner from leverage import path as lepath +from leverage import conf from leverage._internals import State from leverage._internals import Module from leverage.logger import _configure_logger, _leverage_logger +from leverage.path import PathsHandler BUILD_SCRIPTS = Path("./tests/build_scripts/").resolve() BUILD_SCRIPT = BUILD_SCRIPTS / "simple_build.py" @@ -54,3 +59,183 @@ def muted_click_context(click_context): def propagate_logs(): _configure_logger(logger=_leverage_logger) _leverage_logger.propagate = True + + +@pytest.fixture +def leverage_project(tmp_path): + """ + Creates a mock Leverage project directory structure based on leverage-dir-structure. + + Structure: + bb/ + ├── .git/ + ├── build.env (PROJECT=bb, MFA_ENABLED=false) + ├── build.py + ├── config/ + │ ├── common_variables.tf + │ └── common.tfvars + └── account/ + ├── config/ + │ ├── account.tfvars + │ └── backend.tfvars + ├── global/ + │ ├── sso/ + │ └── organizations/ + └── us-east-1/ + ├── base-tf-backend/ + │ ├── base-tf-backend.tf + │ └── backend.tfvars + └── security-base/ + ├── security-base.tf + └── backend.tfvars + + Returns: + Path: Root directory of the mock project + """ + # Create root directory + tmp_path = tmp_path if tmp_path else tempfile.mkdtemp() + root = tmp_path / "bb" + root.mkdir(parents=True) + + # Initialize git repository + subprocess.run(["git", "init"], cwd=root) + + # Create build.env file with specified content + build_env = root / "build.env" + build_env.write_text("PROJECT=bb\nMFA_ENABLED=false\n") + + # Create build.py file with specified content + build_py = root / "build.py" + build_py.write_text("# Build script\n") + + # Create config directory and files + config_dir = root / "config" + config_dir.mkdir() + (config_dir / "common_variables.tf").write_text("# Common variables\n") + (config_dir / "common.tfvars").write_text("# Common tfvars\n") + + # Create account directory structure + account_dir = root / "account" + account_dir.mkdir() + + # Create account/config + account_config = account_dir / "config" + account_config.mkdir() + (account_config / "account.tfvars").write_text( + 'environment = "account"\n' + 'sso_role = "test-sso-role"\n' + ) + (account_config / "backend.tfvars").write_text( + 'profile = "bb-account-profile"\n' + 'bucket = "bb-account-terraform-backend"\n' + 'dynamodb_table = "bb-account-terraform-backend-lock"\n' + 'region = "us-east-1"\n' + ) + + # Create account/global + global_dir = account_dir / "global" + global_dir.mkdir() + (global_dir / "sso").mkdir() + (global_dir / "organizations").mkdir() + + # Create account/us-east-1 + us_east_1 = account_dir / "us-east-1" + us_east_1.mkdir() + + # Create account/us-east-1/base-tf-backend + base_tf_backend = us_east_1 / "base-tf-backend" + base_tf_backend.mkdir() + (base_tf_backend / "base-tf-backend.tf").write_text("# Base TF backend configuration\n") + (base_tf_backend / "backend.tfvars").write_text("# Backend tfvars\n") + + # Create account/us-east-1/security-base + security_base = us_east_1 / "security-base" + security_base.mkdir() + (security_base / "security-base.tf").write_text("# Security base configuration\n") + (security_base / "config.tf").write_text( + 'terraform {\n' + ' backend "s3" {\n' + ' key = "account/us-east-1/security-base/terraform.tfstate"\n' + ' }\n' + '}\n' + ) + (security_base / "backend.tfvars").write_text( + 'profile = "bb-account-profile"\n' + 'bucket = "bb-account-terraform-backend"\n' + 'dynamodb_table = "bb-account-terraform-backend-lock"\n' + 'region = "us-east-1"\n' + ) + + return root + +@pytest.fixture +def leverage_runner(monkeypatch): + """ + Creates a CliRunner context manager with patched path functions and authentication. + + Usage: + with leverage_runner(leverage_project) as runner: + runner.invoke(leverage, ["command", "args"]) + + The fixture automatically patches: + - get_root_path and get_working_path in both leverage.path and leverage.conf + - Path.cwd() to return the working directory + - check_sso_token and refresh_layer_credentials to skip authentication + + Args: + leverage_directory: Path to the root of the mock project + working_directory: Optional working directory (defaults to account/us-east-1/security-base) + """ + from contextlib import contextmanager + from leverage.modules import tf, auth + + @contextmanager + def runner(leverage_directory): + # Determine working directory + working_directory = Path(leverage_directory) / "account" / "us-east-1" / "security-base" + + # Ensure paths are Path objects + leverage_directory = Path(leverage_directory) + working_directory = Path(working_directory) + + # Apply patches to leverage.path module + monkeypatch.setattr(lepath, "get_root_path", lambda: leverage_directory) + monkeypatch.setattr(lepath, "get_working_path", lambda: working_directory) + monkeypatch.setattr(Path, "cwd", lambda: working_directory) + + # Also patch in conf module since it imports these functions directly + monkeypatch.setattr(conf, "get_root_path", lambda: leverage_directory) + monkeypatch.setattr(conf, "get_working_path", lambda: working_directory) + + # Patch authentication functions to avoid SSO/credential checks + monkeypatch.setattr(auth, "check_sso_token", lambda *args, **kwargs: None) + monkeypatch.setattr(auth, "refresh_layer_credentials", lambda *args, **kwargs: None) + monkeypatch.setattr(auth, "refresh_layer_credentials_mfa", lambda *args, **kwargs: None) + + # Create and yield the CLI runner + cli_runner = CliRunner() + yield cli_runner + + return runner + +@pytest.fixture +def leverage_context(leverage_project, monkeypatch): + def context(verbose=True, build_script_name="build.py"): + # Set current working directory to security-base layer + working_dir = leverage_project / "account" / "us-east-1" / "security-base" + + # Mock Path.cwd() to return the working directory + monkeypatch.setattr(Path, "cwd", lambda: working_dir) + + # Update get_working_path to return the security-base directory + monkeypatch.setattr(lepath, "get_working_path", lambda: working_dir) + + state = State() + state.verbosity = verbose + state.module = Module(name=build_script_name) + state.config = conf.load() + state.paths = PathsHandler() + + return click.Context(command=click.Command("leverage"), obj=state) + + return context \ No newline at end of file diff --git a/tests/test_containers/__init__.py b/tests/test_containers/__init__.py deleted file mode 100644 index d82826a..0000000 --- a/tests/test_containers/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from unittest.mock import MagicMock, patch, Mock - -FAKE_ENV = {"TERRAFORM_IMAGE_TAG": "test", "PROJECT": "test"} - -FAKE_HOST_CONFIG = { - "NetworkMode": "default", - "SecurityOpt": ["label:disable"], - "Mounts": [], -} diff --git a/tests/test_containers/test_aws.py b/tests/test_containers/test_aws.py deleted file mode 100644 index c2f0686..0000000 --- a/tests/test_containers/test_aws.py +++ /dev/null @@ -1,64 +0,0 @@ -import pytest -from unittest.mock import Mock, patch - -from leverage._utils import ExitError -from leverage.container import AWSCLIContainer -from tests.test_containers import container_fixture_factory - -SSO_CODE_MSG = """ -Attempting to automatically open the SSO authorization page in your default browser. -If the browser does not open or you wish to use a different device to authorize this request, open the following URL: - -https://device.sso.us-east-2.amazonaws.com/ - -Then enter the code: - -TEST-CODE - -""" - - -@pytest.fixture -def aws_container(muted_click_context): - return container_fixture_factory(AWSCLIContainer) - - -@patch.object(AWSCLIContainer, "docker_logs", Mock(return_value=SSO_CODE_MSG)) -def test_get_sso_code(aws_container): - """ - Test that the get_sso_code method is able to extract correctly the SSO code from the `aws sso login` output. - """ - assert aws_container.get_sso_code(Mock()) == "TEST-CODE" - - -@patch.object(AWSCLIContainer, "docker_logs", Mock(return_value="NO CODE!")) -@patch.object(AWSCLIContainer, "AWS_SSO_CODE_WAIT_SECONDS", 0) -def test_get_sso_code_exit_error(aws_container, propagate_logs, caplog): - """ - Test that we don't get into an infinite loop if the SSO code never shows up. - """ - with pytest.raises(ExitError, match="1"): - aws_container.get_sso_code(Mock()) - assert caplog.messages[0] == "Get SSO code timed-out" - - -@patch.object(AWSCLIContainer, "get_sso_region", Mock(return_value="us-east-1")) -@patch.object(AWSCLIContainer, "get_sso_code", Mock(return_value="TEST-CODE")) -@patch.object(AWSCLIContainer, "docker_logs", Mock(side_effect=(SSO_CODE_MSG, "Logged in successfully!"))) -@patch("webbrowser.open_new_tab") -def test_sso_login(mocked_new_tab, aws_container, fake_os_user, propagate_logs, caplog): - """ - Test that we call the correct script and open the correct url. - """ - sso_start_url = "https://test.sso.us-east-1.amazonaws.com" - test_link = "https://test.sso.us-east-1.amazonaws.com/#/device?user_code=TEST-CODE" - with patch.dict(aws_container.paths.common_conf, {"sso_start_url": sso_start_url}): - aws_container.sso_login() - - container_args = aws_container.client.api.create_container.call_args_list[0][1] - # make sure we: point to the correct script - assert container_args["command"] == "/home/leverage/scripts/aws-sso/aws-sso-login.sh" - # the browser tab points to the correct code and the correct region - assert mocked_new_tab.call_args[0][0] == test_link - # and the fallback method is printed - assert caplog.messages[0] == aws_container.FALLBACK_LINK_MSG.format(link=test_link) diff --git a/tests/test_containers/test_kubectl.py b/tests/test_containers/test_kubectl.py deleted file mode 100644 index 26a9f37..0000000 --- a/tests/test_containers/test_kubectl.py +++ /dev/null @@ -1,164 +0,0 @@ -from pathlib import Path, PosixPath -from unittest import mock -from unittest.mock import Mock, patch - -import pytest -from click.exceptions import Exit - -from leverage.containers.kubectl import KubeCtlContainer, ClusterInfo -from leverage.path import PathsHandler -from tests.test_containers import container_fixture_factory - -AWS_EKS_UPDATE_KUBECONFIG = "aws eks update-kubeconfig --name test-cluster --profile test-profile --region us-east-1" - - -@pytest.fixture -def kubectl_container(muted_click_context): - return container_fixture_factory(KubeCtlContainer) - - -############## -# test utils # -############## - - -def test_get_eks_kube_config(kubectl_container): - tf_output = "\r\naws eks update-kubeconfig --name test-cluster --profile test-profile\r\n" - with patch.object(kubectl_container, "_start_with_output", return_value=(0, tf_output)): - kubectl_container.paths.cwd = Path("/project/account/us-east-1/cluster") - cmd = kubectl_container._get_eks_kube_config() - - assert cmd == AWS_EKS_UPDATE_KUBECONFIG - - -def test_get_eks_kube_config_tf_output_error(kubectl_container): - """ - Test that if the TF OUTPUT fails, we get an error back. - """ - with patch.object(kubectl_container, "_start_with_output", return_value=(1, "ERROR!")): - with pytest.raises(Exit): - kubectl_container._get_eks_kube_config() - - -################# -# test commands # -################# - - -def test_start_shell(kubectl_container): - """ - Since this is a shell, we can only test with which parameters the container is spawned. - It must have aws credentials and the .kube config folder sets properly. - """ - kubectl_container.start_shell() - container_args = kubectl_container.client.api.create_container.call_args_list[0][1] - - # we want a shell, so -> /bin/bash with no entrypoint - assert container_args["command"] == "/bin/bash" - assert container_args["entrypoint"] == "" - - # make sure we are pointing to the AWS credentials - assert container_args["environment"]["AWS_CONFIG_FILE"] == "/home/leverage/tmp/test/config" - assert container_args["environment"]["AWS_SHARED_CREDENTIALS_FILE"] == "/home/leverage/tmp/test/credentials" - - # make sure we mounted the .kube config folder - print(container_args["host_config"]) - assert next(m for m in container_args["host_config"]["Mounts"] if m["Target"] == "/home/leverage/.kube") - - # and the aws config folder - assert next(m for m in container_args["host_config"]["Mounts"] if m["Target"] == "/home/leverage/tmp/test") - - -# don't rely on the filesystem -@patch.object(PathsHandler, "check_for_cluster_layer", Mock()) -# nor terraform -@patch.object(KubeCtlContainer, "_get_eks_kube_config", Mock(return_value=AWS_EKS_UPDATE_KUBECONFIG)) -def test_configure(kubectl_container, fake_os_user): - with patch.object(kubectl_container, "_start", return_value=0) as mock_start: - kubectl_container.configure() - - assert mock_start.call_args[0][0] == AWS_EKS_UPDATE_KUBECONFIG - - -##################### -# test auth methods # -##################### - - -def test_start_shell_mfa(kubectl_container): - """ - Make sure the command is executed through the proper MFA script. - """ - kubectl_container.enable_mfa() - # mock the __exit__ of the context manager to avoid the restoration of the values - # otherwise the asserts around /.aws/ wouldn't be possible - with patch("leverage._utils.AwsCredsEntryPoint.__exit__"): - kubectl_container.start_shell() - container_args = kubectl_container.client.api.create_container.call_args_list[0][1] - - # we want a shell, so -> /bin/bash with no entrypoint - assert container_args["command"] == "/bin/bash" - assert container_args["entrypoint"] == "/home/leverage/scripts/aws-mfa/aws-mfa-entrypoint.sh -- " - - # make sure we are pointing to the right AWS credentials: /.aws/ folder for MFA - assert container_args["environment"]["AWS_CONFIG_FILE"] == "/home/leverage/.aws/test/config" - assert container_args["environment"]["AWS_SHARED_CREDENTIALS_FILE"] == "/home/leverage/.aws/test/credentials" - - -@patch("leverage.container.refresh_layer_credentials") -def test_start_shell_sso(mock_refresh, kubectl_container): - """ - Make sure the SSO flag is set properly before the command. - """ - kubectl_container.enable_sso() - kubectl_container._check_sso_token = Mock(return_value=True) - kubectl_container.start_shell() - container_args = kubectl_container.client.api.create_container.call_args_list[0][1] - - # we want a shell, so -> /bin/bash and refresh_sso_credentials flag - assert container_args["command"] == "/bin/bash" - assert mock_refresh.assert_called_once - - # make sure we are pointing to the right AWS credentials: /tmp/ folder for SSO - assert container_args["environment"]["AWS_CONFIG_FILE"] == "/home/leverage/tmp/test/config" - assert container_args["environment"]["AWS_SHARED_CREDENTIALS_FILE"] == "/home/leverage/tmp/test/credentials" - - -def test_scan_clusters(kubectl_container: KubeCtlContainer): - """ - Test that we can find valid metadata.yaml presents in the down the path of the filesystem tree where we are staying. - """ - # mock and call - with mock.patch("os.walk") as mock_walk: - with patch("builtins.open"): - with mock.patch("ruamel.yaml.safe_load") as mock_yaml: - mock_walk.return_value = [ - ("/foo", ["bar"], ("baz",)), - ("/foo/bar", [], ("spam", "metadata.yaml")), - ] - mock_yaml.return_value = {"type": "k8s-eks-cluster"} - - first_found = next(kubectl_container._scan_clusters()) - - # compare - assert first_found[0] == PosixPath("/foo/bar/") - assert first_found[1]["type"] == "k8s-eks-cluster" - - -def test_discover(kubectl_container: KubeCtlContainer): - """ - Test that, given a layer with a valid cluster file, we are able to call the k8s configuration routine. - """ - mocked_cluster_data = { - "type": "k8s-eks-cluster", - "data": {"cluster_name": "test", "profile": "test", "region": "us-east-1"}, - } - with patch.object(kubectl_container, "_scan_clusters", return_value=[(Path.cwd(), mocked_cluster_data)]): - with patch("simple_term_menu.TerminalMenu") as mkd_show: - mkd_show.return_value.show.return_value = 0 # simulate choosing the first result - with patch.object(kubectl_container.paths, "update_cwd") as mkd_update: - with patch.object(kubectl_container, "configure") as mkd_configure: - kubectl_container.discover() - - assert mkd_update.called - assert isinstance(mkd_configure.call_args_list[0][0][0], ClusterInfo) diff --git a/tests/test_containers/test_leverage.py b/tests/test_containers/test_leverage.py deleted file mode 100644 index 5ccf74f..0000000 --- a/tests/test_containers/test_leverage.py +++ /dev/null @@ -1,79 +0,0 @@ -from unittest import mock - -import pytest - -from leverage._utils import ExitError -from leverage.container import LeverageContainer -from tests.test_containers import container_fixture_factory - - -@pytest.fixture -def leverage_container(muted_click_context): - return container_fixture_factory(LeverageContainer) - - -def test_mounts(muted_click_context): - container = container_fixture_factory( - LeverageContainer, mounts=(("/usr/bin", "/usr/bin"), ("/tmp/file.txt", "/tmp/file.txt")) - ) - - assert container.client.api.create_host_config.call_args_list[0][1]["mounts"] == [ - {"Target": "/usr/bin", "Source": "/usr/bin", "Type": "bind", "ReadOnly": False}, - {"Target": "/tmp/file.txt", "Source": "/tmp/file.txt", "Type": "bind", "ReadOnly": False}, - ] - - -def test_env_vars(muted_click_context): - container = container_fixture_factory(LeverageContainer, env_vars={"testing": 123, "foo": "bar"}) - container.start(container.SHELL) - - container_args = container.client.api.create_container.call_args_list[0][1] - assert container_args["environment"] == {"foo": "bar", "testing": 123} - - -def test_ensure_image_already_available(leverage_container: LeverageContainer, fake_os_user, propagate_logs, caplog): - """ - Test that the local image is not re-built when is already available locally. - """ - # already available - with mock.patch.object(leverage_container.client.api, "images", return_value=True) as mocked_images: - leverage_container.ensure_image() - - assert mocked_images.call_args_list[0][0][0] == "binbash/leverage-toolbox:test-5678-1234" - assert caplog.messages[0] == "Checking for local docker image, tag: test-5678-1234..." - assert "OK" in caplog.messages[1] - - -def test_ensure_image_failed(leverage_container: LeverageContainer, fake_os_user, propagate_logs, caplog): - """ - Test that we get a friendly error if re-building the image fails. - """ - build_response = [{"errorDetail": "Something went wrong"}] - # not available - with mock.patch.object(leverage_container.client.api, "images", return_value=False): - with mock.patch.object(leverage_container.client.api, "build", return_value=build_response) as mocked_build: - with pytest.raises(ExitError, match="Failed"): - leverage_container.ensure_image() - - assert caplog.messages[1] == "Image not found, building it..." - assert caplog.messages[2] == "Failed building local image: Something went wrong" - - -def test_ensure_image(leverage_container: LeverageContainer, fake_os_user, propagate_logs, caplog): - """ - Test that the local image is not available locally, thus it has to be re-built. - """ - build_response = [{"stream": "Successfully built"}] - # not available - with mock.patch.object(leverage_container.client.api, "images", return_value=False): - with mock.patch.object(leverage_container.client.api, "build", return_value=build_response) as mocked_build: - leverage_container.ensure_image() - - assert mocked_build.call_args_list[0][1]["buildargs"] == { - "GID": "5678", - "UID": "1234", - "UNAME": "leverage", - "IMAGE_TAG": "test", - } - assert caplog.messages[1] == "Image not found, building it..." - assert "OK" in caplog.messages[2] diff --git a/tests/test_containers/test_tf.py b/tests/test_containers/test_tf.py deleted file mode 100644 index 49754e4..0000000 --- a/tests/test_containers/test_tf.py +++ /dev/null @@ -1,61 +0,0 @@ -from unittest import mock - -import pytest - -from leverage.container import TFContainer -from tests.test_containers import container_fixture_factory - - -@pytest.fixture -def tf_container(muted_click_context, monkeypatch): - monkeypatch.setenv("TF_PLUGIN_CACHE_DIR", "/home/testing/.terraform/cache") - return container_fixture_factory(TFContainer) - - -def test_tf_plugin_cache_dir(tf_container): - """ - Given `TF_PLUGIN_CACHE_DIR` is set as an env var on the host - we expect it to be on the container too, and also as a mounted folder. - """ - # call any command to trigger a container creation - tf_container.start_shell() - container_args = tf_container.client.api.create_container.call_args[1] - - # make sure the env var is on place - assert container_args["environment"]["TF_PLUGIN_CACHE_DIR"] == "/home/testing/.terraform/cache" - - # and the cache folder mounted - assert next(m for m in container_args["host_config"]["Mounts"] if m["Target"] == "/home/testing/.terraform/cache") - - -@mock.patch("leverage.container.refresh_layer_credentials") -def test_refresh_credentials(mock_refresh, tf_container): - tf_container.enable_sso() - tf_container.refresh_credentials() - container_args = tf_container.client.api.create_container.call_args_list[0][1] - - # we want a shell, so -> /bin/bash and refresh_sso_credentials flag - assert container_args["command"] == 'echo "Done."' - mock_refresh.assert_called_once() - - -@mock.patch("leverage.container.refresh_layer_credentials") -def test_auth_method_sso_enabled(mock_refresh, tf_container): - tf_container.sso_enabled = True - tf_container.auth_method() - - mock_refresh.assert_called_once() - - -def test_auth_method_mfa_enabled(tf_container): - tf_container.sso_enabled = False - tf_container.mfa_enabled = True - - assert tf_container.auth_method() == "/home/leverage/scripts/aws-mfa/aws-mfa-entrypoint.sh -- " - - -def test_auth_method_else(tf_container): - tf_container.sso_enabled = False - tf_container.mfa_enabled = False - - assert tf_container.auth_method() == "" diff --git a/tests/test_modules/test_kubectl.py b/tests/test_modules/test_kubectl.py new file mode 100644 index 0000000..1039d31 --- /dev/null +++ b/tests/test_modules/test_kubectl.py @@ -0,0 +1,47 @@ +from pathlib import Path, PosixPath +from unittest import mock +from unittest.mock import Mock, patch + +from click.testing import CliRunner + +from leverage import leverage +from leverage.modules.kubectl import _scan_clusters, ClusterInfo + +def test_scan_clusters(): + """ + Test that we can find valid metadata.yaml presents in the down the path of the filesystem tree where we are staying. + """ + # mock and call + with mock.patch("os.walk") as mock_walk: + with patch("builtins.open"): + with mock.patch("ruamel.yaml.safe_load") as mock_yaml: + mock_walk.return_value = [ + ("/foo", ["bar"], ("baz",)), + ("/foo/bar", [], ("spam", "metadata.yaml")), + ] + mock_yaml.return_value = {"type": "k8s-eks-cluster"} + + first_found = next(_scan_clusters(Path.cwd())) + + # compare + assert first_found[0] == PosixPath("/foo/bar/") + assert first_found[1]["type"] == "k8s-eks-cluster" + + +def test_discover(leverage_project): + """ + Test that, given a layer with a valid cluster file, we are able to call the k8s configuration routine. + """ + mocked_cluster_data = { + "type": "k8s-eks-cluster", + "data": {"cluster_name": "test", "profile": "test", "region": "us-east-1"}, + } + cli_runner = CliRunner() + with cli_runner.isolated_filesystem(leverage_project) as leverage_project_folder: + with patch("leverage.modules.kubectl._scan_clusters", return_value=[(leverage_project_folder, mocked_cluster_data)]) as mkd_scan_clusters: + with patch("simple_term_menu.TerminalMenu") as mkd_show: + mkd_show.return_value.show.return_value = 0 # simulate choosing the first result + with patch("leverage.modules.kubectl._configure") as mkd_configure: + cli_runner.invoke(leverage, ["kubectl", "discover"]) + + assert isinstance(mkd_configure.call_args_list[0][0][1], ClusterInfo) diff --git a/tests/test_modules/test_runner.py b/tests/test_modules/test_runner.py index b588e93..8246712 100644 --- a/tests/test_modules/test_runner.py +++ b/tests/test_modules/test_runner.py @@ -28,52 +28,44 @@ def test_init_with_absolute_path_existing_file(tmp_path): def test_init_with_absolute_path_non_existing_file(tmp_path, mocker): binary_file = tmp_path / "non_existing_binary" - mock_logger = mocker.patch("leverage._utils.logger") - with pytest.raises(ExitError): + with pytest.raises(ExitError) as exc_info: Runner(binary_file) - mock_logger.error.assert_called_once() - error_msg = mock_logger.error.call_args[0][0] + error_msg = str(exc_info.value) assert "not found on system" in error_msg assert str(binary_file) in error_msg def test_init_with_binary_not_in_path(mocker): - mock_logger = mocker.patch("leverage._utils.logger") mocker.patch("shutil.which", return_value=None) - with pytest.raises(ExitError): + with pytest.raises(ExitError) as exc_info: Runner("nonexistent") - mock_logger.error.assert_called_once() - error_msg = mock_logger.error.call_args[0][0] + error_msg = str(exc_info.value) assert "Binary 'nonexistent' not found on system" in error_msg assert "Please install nonexistent" in error_msg def test_init_with_custom_error_message(mocker): custom_error = "Custom error message for missing binary" - mock_logger = mocker.patch("leverage._utils.logger") mocker.patch("shutil.which", return_value=None) - with pytest.raises(ExitError): + with pytest.raises(ExitError) as exc_info: Runner("nonexistent", error_message=custom_error) - mock_logger.error.assert_called_once() - error_msg = mock_logger.error.call_args[0][0] + error_msg = str(exc_info.value) assert error_msg == custom_error def test_init_logs_error_on_missing_binary(mocker): - mock_logger = mocker.patch("leverage._utils.logger") mocker.patch("shutil.which", return_value=None) - with pytest.raises(ExitError): + with pytest.raises(ExitError) as exc_info: Runner("nonexistent") - mock_logger.error.assert_called_once() - error_msg = mock_logger.error.call_args[0][0] + error_msg = str(exc_info.value) assert "Binary 'nonexistent' not found on system" in error_msg @@ -97,7 +89,9 @@ def test_run_interactive_success(mock_runner, mocker): result = mock_runner.run("arg1", "arg2", interactive=True) assert result == 0 - mock_subprocess.assert_called_once_with(["/usr/bin/test_binary", "arg1", "arg2"], env=os.environ.copy(), cwd=None) + mock_subprocess.assert_called_once_with( + ["/usr/bin/test_binary", "arg1", "arg2"], env=os.environ.copy(), cwd=None, capture_output=False, text=False + ) def test_run_interactive_failure(mock_runner, mocker): @@ -133,7 +127,9 @@ def test_run_with_env_vars(mock_runner, mocker): mock_runner.run("arg1", env_vars=env_vars, interactive=True) - mock_subprocess.assert_called_once_with(["/usr/bin/test_binary", "arg1"], env=expected_env, cwd=None) + mock_subprocess.assert_called_once_with( + ["/usr/bin/test_binary", "arg1"], env=expected_env, cwd=None, capture_output=False, text=False + ) def test_run_with_working_directory(mock_runner, tmp_path, mocker): @@ -142,7 +138,9 @@ def test_run_with_working_directory(mock_runner, tmp_path, mocker): mock_runner.run("arg1", working_dir=tmp_path, interactive=True) - mock_subprocess.assert_called_once_with(["/usr/bin/test_binary", "arg1"], env=os.environ.copy(), cwd=tmp_path) + mock_subprocess.assert_called_once_with( + ["/usr/bin/test_binary", "arg1"], env=os.environ.copy(), cwd=tmp_path, capture_output=False, text=False + ) def test_run_with_no_args_defaults_to_empty_list(mock_runner, mocker): @@ -151,7 +149,9 @@ def test_run_with_no_args_defaults_to_empty_list(mock_runner, mocker): mock_runner.run() - mock_subprocess.assert_called_once_with(["/usr/bin/test_binary"], env=os.environ.copy(), cwd=None) + mock_subprocess.assert_called_once_with( + ["/usr/bin/test_binary"], env=os.environ.copy(), cwd=None, capture_output=False, text=False + ) def test_run_with_none_args_defaults_to_empty_list(mock_runner, mocker): @@ -160,7 +160,9 @@ def test_run_with_none_args_defaults_to_empty_list(mock_runner, mocker): mock_runner.run() - mock_subprocess.assert_called_once_with(["/usr/bin/test_binary"], env=os.environ.copy(), cwd=None) + mock_subprocess.assert_called_once_with( + ["/usr/bin/test_binary"], env=os.environ.copy(), cwd=None, capture_output=False, text=False + ) def test_run_with_none_env_vars_defaults_to_empty_dict(mock_runner, mocker): @@ -169,7 +171,9 @@ def test_run_with_none_env_vars_defaults_to_empty_dict(mock_runner, mocker): mock_runner.run(env_vars=None) - mock_subprocess.assert_called_once_with(["/usr/bin/test_binary"], env=os.environ.copy(), cwd=None) + mock_subprocess.assert_called_once_with( + ["/usr/bin/test_binary"], env=os.environ.copy(), cwd=None, capture_output=False, text=False + ) def test_run_logs_debug_information(mock_runner, tmp_path, mocker): @@ -307,7 +311,9 @@ def test_run_with_instance_env_vars_only(mocker): runner.run("arg1") - mock_subprocess.assert_called_once_with(["/usr/bin/test", "arg1"], env=expected_env, cwd=None) + mock_subprocess.assert_called_once_with( + ["/usr/bin/test", "arg1"], env=expected_env, cwd=None, capture_output=False, text=False + ) def test_run_merges_instance_and_run_env_vars(mocker): @@ -325,7 +331,9 @@ def test_run_merges_instance_and_run_env_vars(mocker): runner.run("arg1", env_vars=run_env) - mock_subprocess.assert_called_once_with(["/usr/bin/test", "arg1"], env=expected_env, cwd=None) + mock_subprocess.assert_called_once_with( + ["/usr/bin/test", "arg1"], env=expected_env, cwd=None, capture_output=False, text=False + ) def test_run_env_vars_override_instance_env_vars(mocker): @@ -343,7 +351,9 @@ def test_run_env_vars_override_instance_env_vars(mocker): runner.run("arg1", env_vars=run_env) - mock_subprocess.assert_called_once_with(["/usr/bin/test", "arg1"], env=expected_env, cwd=None) + mock_subprocess.assert_called_once_with( + ["/usr/bin/test", "arg1"], env=expected_env, cwd=None, capture_output=False, text=False + ) def test_instance_env_vars_preserved_across_multiple_runs(mocker): @@ -359,12 +369,16 @@ def test_instance_env_vars_preserved_across_multiple_runs(mocker): # First run runner.run("arg1") - mock_subprocess.assert_called_with(["/usr/bin/test", "arg1"], env=expected_env, cwd=None) + mock_subprocess.assert_called_with( + ["/usr/bin/test", "arg1"], env=expected_env, cwd=None, capture_output=False, text=False + ) # Second run - instance env vars should still be present runner.run("arg2") assert mock_subprocess.call_count == 2 - mock_subprocess.assert_called_with(["/usr/bin/test", "arg2"], env=expected_env, cwd=None) + mock_subprocess.assert_called_with( + ["/usr/bin/test", "arg2"], env=expected_env, cwd=None, capture_output=False, text=False + ) def test_instance_env_vars_not_modified_by_run(mocker): @@ -499,3 +513,74 @@ def test_integration_exec_with_echo(): assert exit_code == 0 assert stdout.strip() == "hello world" assert stderr == "" + + +def test_run_raises_on_failure_when_raises_true(mock_runner, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 1 + mock_subprocess.return_value.stdout = "" + mock_subprocess.return_value.stderr = "Command failed" + + with pytest.raises(ExitError) as exc_info: + mock_runner.run("arg1", interactive=False, raises=True) + + assert exc_info.value.exit_code == 1 + assert "Command execution failed: Command failed" in str(exc_info.value) + + +def test_run_does_not_raise_on_failure_when_raises_false(mock_runner, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 1 + mock_subprocess.return_value.stdout = "" + mock_subprocess.return_value.stderr = "Command failed" + + result = mock_runner.run("arg1", interactive=False, raises=False) + + assert result == (1, "", "Command failed") + + +def test_run_does_not_raise_on_success_when_raises_true(mock_runner, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 0 + mock_subprocess.return_value.stdout = "Success output" + mock_subprocess.return_value.stderr = "" + + result = mock_runner.run("arg1", interactive=False, raises=True) + + assert result == (0, "Success output", "") + + +def test_raises_ignored_in_interactive_mode(mock_runner, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 1 + + # Should return exit code 1, not raise (raises is ignored in interactive mode) + result = mock_runner.run("arg1", interactive=True, raises=True) + + assert result == 1 + # Verify no exception was raised + + +def test_exec_raises_by_default(mock_runner, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 1 + mock_subprocess.return_value.stdout = "" + mock_subprocess.return_value.stderr = "Exec failed" + + with pytest.raises(ExitError) as exc_info: + mock_runner.exec("arg1") + + assert exc_info.value.exit_code == 1 + + +def test_exec_does_not_raise_when_raises_false(mock_runner, mocker): + mock_subprocess = mocker.patch("subprocess.run") + mock_subprocess.return_value.returncode = 1 + mock_subprocess.return_value.stdout = "" + mock_subprocess.return_value.stderr = "Exec failed" + + exit_code, stdout, stderr = mock_runner.exec("arg1", raises=False) + + assert exit_code == 1 + assert stdout == "" + assert stderr == "Exec failed" diff --git a/tests/test_modules/test_tf.py b/tests/test_modules/test_tf.py index bc68e15..806a7dd 100644 --- a/tests/test_modules/test_tf.py +++ b/tests/test_modules/test_tf.py @@ -1,41 +1,52 @@ -from unittest.mock import patch, Mock +from unittest.mock import patch import pytest -from click import get_current_context -from leverage._internals import State -from leverage.modules.tf import _init +from leverage import leverage from leverage.modules.tf import has_a_plan_file @pytest.mark.parametrize( - "args, expected_value", + "args", [ - ([], ["-backend-config=/project/./config/backend.tfvars"]), - (["-migrate-state"], ["-migrate-state", "-backend-config=/project/./config/backend.tfvars"]), - (["-r1", "-r2"], ["-r1", "-r2", "-backend-config=/project/./config/backend.tfvars"]), + ([]), + (["-migrate-state"]), + (["-r1", "-r2"]), ], ) -def test_init_arguments(tf_container, args, expected_value): +def test_init_arguments(leverage_project, leverage_runner, args): """ Test that the arguments for the init command are prepared correctly. """ - with patch.object(tf_container, "start_in_layer", return_value=0) as mocked: - _init(args) + with leverage_runner(leverage_project) as runner: + with patch("leverage.modules.tfrunner.TFRunner.run", return_value=0) as mocked_run: + result = runner.invoke(leverage, ["tf", "init", *args]) - assert mocked.call_args_list[0][0][0] == "init" - assert " ".join(mocked.call_args_list[0][0][1:]) == " ".join(expected_value) + # Check that init was called + assert mocked_run.call_args_list[0][0][0] == "init" + # Check that backend-config is included with the correct path + backend_config_path = str(leverage_project / "account" / "config" / "backend.tfvars") + backend_config_arg = f"-backend-config={backend_config_path}" -def test_init_with_args(tf_container): + # Build expected args: user args + backend-config + expected_args = list(args) + [backend_config_arg] + actual_args = list(mocked_run.call_args_list[0][0][1:]) + + assert actual_args == expected_args + + +def test_init_with_args(leverage_project, leverage_runner): """ Test tf init with arguments. """ - # with patch("dockerpty.exec_command") as mocked_pty: - with patch.object(tf_container, "start_in_layer", return_value=0) as mocked: - _init(["-migrate-state"]) + with leverage_runner(leverage_project) as runner: + with patch("leverage.modules.tfrunner.TFRunner.run", return_value=0) as mocked_run: + result = runner.invoke(leverage, ["tf", "init", "-migrate-state"]) - assert mocked.call_args_list[0][0] == ("init", "-migrate-state", "-backend-config=/project/./config/backend.tfvars") + assert mocked_run.call_args_list[0][0][0] == "init" + assert mocked_run.call_args_list[0][0][1] == "-migrate-state" + assert mocked_run.call_args_list[0][0][2] == f"-backend-config={leverage_project / 'account' / 'config' / 'backend.tfvars'}" @pytest.mark.parametrize( diff --git a/tests/test_path.py b/tests/test_path.py index d52f0d6..84c1b3e 100644 --- a/tests/test_path.py +++ b/tests/test_path.py @@ -109,18 +109,16 @@ def test_get_build_script_path_no_build_script(dir_structure): assert get_build_script_path() is None -def test_check_for_cluster_layer(muted_click_context, propagate_logs, caplog): +def test_check_for_cluster_layer(muted_click_context, propagate_logs): """ Test that if we are not on a cluster layer, we raise an error. """ - paths = PathsHandler({"PROJECT": "test"}, "leverage") + paths = PathsHandler({"PROJECT": "test"}) with patch.object(paths, "check_for_layer_location"): # assume parent method is already tested - with pytest.raises(ExitError): + with pytest.raises(ExitError, match="This command can only run at the \[bold\]cluster layer\[/bold\]\."): paths.cwd = Path("/random") paths.check_for_cluster_layer() - assert caplog.messages[0] == "This command can only run at the [bold]cluster layer[/bold]." - class TestGetProjectPathOrCurrentDir: @patch("leverage.path.get_root_path") From 002d45d31b96e4b684203db4b0f37db846b387f2 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Tue, 6 Jan 2026 16:08:27 -0300 Subject: [PATCH 44/46] Format --- leverage/modules/auth.py | 40 ++++++++++++++++++------------ leverage/modules/kubectl.py | 6 ++--- leverage/modules/runner.py | 16 +++++++++--- leverage/modules/tfrunner.py | 8 +++++- leverage/modules/utils.py | 6 +++-- leverage/path.py | 4 +-- tests/conftest.py | 15 ++++++----- tests/test_modules/test_kubectl.py | 5 +++- tests/test_modules/test_tf.py | 5 +++- 9 files changed, 66 insertions(+), 39 deletions(-) diff --git a/leverage/modules/auth.py b/leverage/modules/auth.py index 43143c2..3616563 100644 --- a/leverage/modules/auth.py +++ b/leverage/modules/auth.py @@ -131,6 +131,7 @@ def authenticate(command): def some_command(tf: TFRunner, paths: PathsHandler, args): # command logic """ + @wraps(command) def new_command(*args, **kwargs): ctx = click.get_current_context() @@ -251,6 +252,7 @@ def refresh_layer_credentials(paths: PathsHandler): ) logger.info(f"Credentials for {account_name} account written successfully.") + def refresh_layer_credentials_mfa(paths: PathsHandler): tf_profile, raw_profiles = get_profiles(paths) config_updater = ConfigUpdater() @@ -271,7 +273,11 @@ def refresh_layer_credentials_mfa(paths: PathsHandler): layer_profile = tf_profile if raw_profile in ("${var.profile}", "each.value.profile") else None # replace variables with their corresponding values - profile_name = raw_profile.replace("${var.profile}", tf_profile).replace("${var.project}", paths.project).replace("each.value.profile", tf_profile) + profile_name = ( + raw_profile.replace("${var.profile}", tf_profile) + .replace("${var.project}", paths.project) + .replace("each.value.profile", tf_profile) + ) # if layer_profile wasn't set, use profile_name if layer_profile is None: @@ -286,14 +292,17 @@ def refresh_layer_credentials_mfa(paths: PathsHandler): raise ExitError( 40, f"Credentials for profile {profile_name} have not been properly configured. Please check your configuration.\n" - f"Check the following link for possible solutions: https://leverage.binbash.co/user-guide/troubleshooting/credentials/") - + f"Check the following link for possible solutions: https://leverage.binbash.co/user-guide/troubleshooting/credentials/", + ) + cache_file = paths.aws_cache_dir / profile_name if cache_file.exists(): logger.debug(f"Found cached credentials in {cache_file}.") cached_credentials = json.loads(cache_file.read_text()) - - expiration = datetime.strptime(cached_credentials.get("Expiration"), "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=tzutc()) + + expiration = datetime.strptime(cached_credentials.get("Expiration"), "%Y-%m-%dT%H:%M:%SZ").replace( + tzinfo=tzutc() + ) renewal = datetime.now(tzutc()) + timedelta(seconds=(30 * 60)) if renewal < expiration: logger.info("Using cached credentials.") @@ -302,10 +311,7 @@ def refresh_layer_credentials_mfa(paths: PathsHandler): else: logger.debug("No cached credentials found.") - client_session = boto3.Session( - botocore_session=session, - profile_name=source_profile - ) + client_session = boto3.Session(botocore_session=session, profile_name=source_profile) client = client_session.client("sts") credentials = None for _ in range(3): @@ -315,7 +321,9 @@ def refresh_layer_credentials_mfa(paths: PathsHandler): raise ExitError(1, "Aborted by user.") try: - logger.debug(f"Assuming role {role_arn} for {profile_name} profile with serial {mfa_serial} and token code {mfa_token_code}") + logger.debug( + f"Assuming role {role_arn} for {profile_name} profile with serial {mfa_serial} and token code {mfa_token_code}" + ) credentials = client.assume_role( RoleArn=role_arn, SourceIdentity=source_profile, @@ -342,15 +350,15 @@ def refresh_layer_credentials_mfa(paths: PathsHandler): elif error.response["Error"]["Code"] == "ExpiredToken": logger.error("Token has expired. Please try again.") continue - elif error.response["Error"]["Code"] == "ValidationError" and "Invalid length for parameter TokenCode" in error.response["Error"]["Message"]: + elif ( + error.response["Error"]["Code"] == "ValidationError" + and "Invalid length for parameter TokenCode" in error.response["Error"]["Message"] + ): logger.error("Invalid token length, it must be 6 digits long. Please try again.") continue elif "An error occurred" in error.response["Error"]["Message"]: - raise ExitError( - 50, - f"Error assuming role: {error}" - ) - + raise ExitError(50, f"Error assuming role: {error}") + if credentials is None: raise ExitError(60, "Failed to get credentials after 3 attempts. Please try again later.") diff --git a/leverage/modules/kubectl.py b/leverage/modules/kubectl.py index 9600a7c..75f9903 100644 --- a/leverage/modules/kubectl.py +++ b/leverage/modules/kubectl.py @@ -53,12 +53,10 @@ def kubectl(context, state, args): ) authenticate = pass_paths(lambda paths: perform_authentication(paths)) - _handle_subcommand( - context=context, runner=state.runner, args=args, pre_invocation_callback=authenticate - ) + _handle_subcommand(context=context, runner=state.runner, args=args, pre_invocation_callback=authenticate) -def _configure(environment: dict, ci: ClusterInfo = None,layer_path: Path = None): +def _configure(environment: dict, ci: ClusterInfo = None, layer_path: Path = None): """ Add the given EKS cluster configuration to the .kube/ files. """ diff --git a/leverage/modules/runner.py b/leverage/modules/runner.py index b579315..ca3057b 100644 --- a/leverage/modules/runner.py +++ b/leverage/modules/runner.py @@ -95,15 +95,23 @@ def run( logger.debug(f"Working directory: {working_dir or Path.cwd()}") logger.debug(f"Additional environment variables: {merged_env_vars}") - process = subprocess.run(command, env=env, cwd=working_dir, capture_output=not interactive, text=not interactive) + process = subprocess.run( + command, env=env, cwd=working_dir, capture_output=not interactive, text=not interactive + ) if raises and not interactive and process.returncode: - raise ExitError(process.returncode, f"Command execution failed: {process.stderr.strip()}") + raise ExitError(process.returncode, f"Command execution failed: {process.stderr.strip()}") - return process.returncode if interactive else (process.returncode, process.stdout.strip(), process.stderr.strip()) + return ( + process.returncode if interactive else (process.returncode, process.stdout.strip(), process.stderr.strip()) + ) def exec( - self, *args: str, env_vars: Optional[Dict[str, str]] = None, working_dir: Optional[Path] = None, raises: bool = True + self, + *args: str, + env_vars: Optional[Dict[str, str]] = None, + working_dir: Optional[Path] = None, + raises: bool = True, ) -> Tuple[int, str, str]: """ Execute command with the binary in non-interactive mode (captures output). diff --git a/leverage/modules/tfrunner.py b/leverage/modules/tfrunner.py index a46e38e..ee4dd7c 100644 --- a/leverage/modules/tfrunner.py +++ b/leverage/modules/tfrunner.py @@ -70,7 +70,13 @@ def run( """ return super().run(*args, env_vars=env_vars, working_dir=working_dir, raises=raises) - def exec(self, *args: str, env_vars: Optional[Dict[str, str]] = None, working_dir: Optional[Path] = None, raises: bool = False): + def exec( + self, + *args: str, + env_vars: Optional[Dict[str, str]] = None, + working_dir: Optional[Path] = None, + raises: bool = False, + ): """ Execute the Terraform/OpenTofu binary in non-interactive mode (captures output). diff --git a/leverage/modules/utils.py b/leverage/modules/utils.py index 8f60779..eae2de2 100644 --- a/leverage/modules/utils.py +++ b/leverage/modules/utils.py @@ -36,10 +36,12 @@ def _handle_subcommand( pre_invocation_callback() exit_code = runner.run(*args) raise Exit(exit_code) - + subcommand = context.command.commands.get(subcommand) # Check that the subcommand arguments are valid - subcommand.make_context(info_name=subcommand.name, args=list(args)[args.index(subcommand.name) + 1:], parent=context) + subcommand.make_context( + info_name=subcommand.name, args=list(args)[args.index(subcommand.name) + 1 :], parent=context + ) # Invoke wrapped command if not subcommand.params: context.invoke(subcommand) diff --git a/leverage/path.py b/leverage/path.py index a9a48b6..04c35e0 100644 --- a/leverage/path.py +++ b/leverage/path.py @@ -129,7 +129,7 @@ def get_build_script_path(filename="build.py"): cur_path = cur_path.parent -class PathsHandler: # TODO: Turn this class into a something that represents a leverage project +class PathsHandler: # TODO: Turn this class into a something that represents a leverage project COMMON_TF_VARS = "common.tfvars" ACCOUNT_TF_VARS = "account.tfvars" BACKEND_TF_VARS = "backend.tfvars" @@ -200,7 +200,7 @@ def aws_config_file(self): @property def aws_credentials_file(self): return self.aws_credentials_dir / "credentials" - + @property def aws_cache_dir(self): return self.aws_credentials_dir / "cache" diff --git a/tests/conftest.py b/tests/conftest.py index 6d78b93..559a198 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -121,10 +121,7 @@ def leverage_project(tmp_path): # Create account/config account_config = account_dir / "config" account_config.mkdir() - (account_config / "account.tfvars").write_text( - 'environment = "account"\n' - 'sso_role = "test-sso-role"\n' - ) + (account_config / "account.tfvars").write_text('environment = "account"\n' 'sso_role = "test-sso-role"\n') (account_config / "backend.tfvars").write_text( 'profile = "bb-account-profile"\n' 'bucket = "bb-account-terraform-backend"\n' @@ -153,11 +150,11 @@ def leverage_project(tmp_path): security_base.mkdir() (security_base / "security-base.tf").write_text("# Security base configuration\n") (security_base / "config.tf").write_text( - 'terraform {\n' + "terraform {\n" ' backend "s3" {\n' ' key = "account/us-east-1/security-base/terraform.tfstate"\n' - ' }\n' - '}\n' + " }\n" + "}\n" ) (security_base / "backend.tfvars").write_text( 'profile = "bb-account-profile"\n' @@ -168,6 +165,7 @@ def leverage_project(tmp_path): return root + @pytest.fixture def leverage_runner(monkeypatch): """ @@ -218,6 +216,7 @@ def runner(leverage_directory): return runner + @pytest.fixture def leverage_context(leverage_project, monkeypatch): def context(verbose=True, build_script_name="build.py"): @@ -238,4 +237,4 @@ def context(verbose=True, build_script_name="build.py"): return click.Context(command=click.Command("leverage"), obj=state) - return context \ No newline at end of file + return context diff --git a/tests/test_modules/test_kubectl.py b/tests/test_modules/test_kubectl.py index 1039d31..35ed597 100644 --- a/tests/test_modules/test_kubectl.py +++ b/tests/test_modules/test_kubectl.py @@ -7,6 +7,7 @@ from leverage import leverage from leverage.modules.kubectl import _scan_clusters, ClusterInfo + def test_scan_clusters(): """ Test that we can find valid metadata.yaml presents in the down the path of the filesystem tree where we are staying. @@ -38,7 +39,9 @@ def test_discover(leverage_project): } cli_runner = CliRunner() with cli_runner.isolated_filesystem(leverage_project) as leverage_project_folder: - with patch("leverage.modules.kubectl._scan_clusters", return_value=[(leverage_project_folder, mocked_cluster_data)]) as mkd_scan_clusters: + with patch( + "leverage.modules.kubectl._scan_clusters", return_value=[(leverage_project_folder, mocked_cluster_data)] + ) as mkd_scan_clusters: with patch("simple_term_menu.TerminalMenu") as mkd_show: mkd_show.return_value.show.return_value = 0 # simulate choosing the first result with patch("leverage.modules.kubectl._configure") as mkd_configure: diff --git a/tests/test_modules/test_tf.py b/tests/test_modules/test_tf.py index 806a7dd..d043f24 100644 --- a/tests/test_modules/test_tf.py +++ b/tests/test_modules/test_tf.py @@ -46,7 +46,10 @@ def test_init_with_args(leverage_project, leverage_runner): assert mocked_run.call_args_list[0][0][0] == "init" assert mocked_run.call_args_list[0][0][1] == "-migrate-state" - assert mocked_run.call_args_list[0][0][2] == f"-backend-config={leverage_project / 'account' / 'config' / 'backend.tfvars'}" + assert ( + mocked_run.call_args_list[0][0][2] + == f"-backend-config={leverage_project / 'account' / 'config' / 'backend.tfvars'}" + ) @pytest.mark.parametrize( From 672c41c1dfe4b4eb18af5cf13d7780b058290246 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Sat, 10 Jan 2026 18:57:48 -0300 Subject: [PATCH 45/46] Make MFA credentials profiles work --- leverage/modules/auth.py | 11 +++++------ leverage/modules/credentials.py | 2 +- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/leverage/modules/auth.py b/leverage/modules/auth.py index 3616563..948b2de 100644 --- a/leverage/modules/auth.py +++ b/leverage/modules/auth.py @@ -270,7 +270,7 @@ def refresh_layer_credentials_mfa(paths: PathsHandler): continue # if it is exactly that variable, we already know the layer profile is tf_profile - layer_profile = tf_profile if raw_profile in ("${var.profile}", "each.value.profile") else None + layer_profile = tf_profile if raw_profile in ("var.profile", "each.value.profile") else None # replace variables with their corresponding values profile_name = ( @@ -284,7 +284,7 @@ def refresh_layer_credentials_mfa(paths: PathsHandler): layer_profile = profile_name logger.info(f"Attempting to get temporary credentials for {profile_name} profile.") - if profile := config_updater.get_section(f"profile {profile_name}"): + if profile := config_updater.get_section(f"profile {profile_name}-mfa"): role_arn = profile.get("role_arn").value mfa_serial = profile.get("mfa_serial").value source_profile = profile.get("source_profile").value @@ -295,6 +295,9 @@ def refresh_layer_credentials_mfa(paths: PathsHandler): f"Check the following link for possible solutions: https://leverage.binbash.co/user-guide/troubleshooting/credentials/", ) + if not paths.aws_cache_dir.exists(): + paths.aws_cache_dir.mkdir(parents=True) + cache_file = paths.aws_cache_dir / profile_name if cache_file.exists(): logger.debug(f"Found cached credentials in {cache_file}.") @@ -321,12 +324,8 @@ def refresh_layer_credentials_mfa(paths: PathsHandler): raise ExitError(1, "Aborted by user.") try: - logger.debug( - f"Assuming role {role_arn} for {profile_name} profile with serial {mfa_serial} and token code {mfa_token_code}" - ) credentials = client.assume_role( RoleArn=role_arn, - SourceIdentity=source_profile, RoleSessionName=f"leverage-{profile_name}", SerialNumber=mfa_serial, TokenCode=mfa_token_code, diff --git a/leverage/modules/credentials.py b/leverage/modules/credentials.py index 140c2ac..4a0d7d7 100644 --- a/leverage/modules/credentials.py +++ b/leverage/modules/credentials.py @@ -611,7 +611,7 @@ def configure_accounts_profiles( if mfa_serial: account_profile["mfa_serial"] = mfa_serial # A profile identifier looks like `le-security-oaar` - account_profiles[f"{short_name}-{account_name}-{PROFILES[_type]['profile_role']}"] = account_profile + account_profiles[f"{short_name}-{account_name}-{PROFILES[_type]['profile_role']}-mfa"] = account_profile logger.info("Backing up account profiles file.") shutil.copy(paths.aws_config_file, paths.aws_config_file.with_suffix(".bkp")) From 1fb46d58cd6a06d01725923ff0bb6cbb98dbc529 Mon Sep 17 00:00:00 2001 From: Angelo Fenoglio Date: Sat, 10 Jan 2026 18:58:13 -0300 Subject: [PATCH 46/46] Fix init for ref-arch v2 --- leverage/modules/tf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/leverage/modules/tf.py b/leverage/modules/tf.py index 06f3d94..3e8033e 100644 --- a/leverage/modules/tf.py +++ b/leverage/modules/tf.py @@ -322,7 +322,7 @@ def _init(tf: TFRunner, paths: PathsHandler, args: Sequence[str], working_dir: P ) init_args = (*filtered_args, f"-backend-config={paths.backend_tfvars}") - tf.run("init", *init_args, working_dir=working_dir) + tf.run("init", *tf_default_args(), *init_args, working_dir=working_dir) @authenticate