diff --git a/.github/workflows/changelog.py b/.github/workflows/changelog.py index 27e604a9b9..bf3d267870 100644 --- a/.github/workflows/changelog.py +++ b/.github/workflows/changelog.py @@ -135,9 +135,7 @@ def _skip_existing_entry_for_this_pr(line: str, same_section: bool = True) -> st # If the line already contains a link to the PR, don't add it again. line = _skip_existing_entry_for_this_pr(line, same_section=False) - if ( - line.startswith("## ") and not line.strip() == "# nf-core/tools: Changelog" - ): # Version header, e.g. "## v2.12dev" + if line.startswith("## ") and line.strip() != "# nf-core/tools: Changelog": # Version header, e.g. "## v2.12dev" print(f"Found version header: {line.strip()}") updated_lines.append(line) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3e8923d7f4..447156fe87 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ ### General - Lock file maintenance ([#4044](https://github.com/nf-core/tools/pull/4044)) +- Add more ruff rules (B, PTH) ([#4034](https://github.com/nf-core/tools/pull/4034)) - Lock file maintenance ([#4045](https://github.com/nf-core/tools/pull/4045)) - Update actions/stale digest to b5d41d4 ([#4047](https://github.com/nf-core/tools/pull/4047)) diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py index d5f75f5ccf..e39d4a17fe 100644 --- a/docs/api/_src/conf.py +++ b/docs/api/_src/conf.py @@ -11,12 +11,12 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # -import os import sys +from pathlib import Path import nf_core -sys.path.insert(0, os.path.abspath("../../../nf_core")) +sys.path.insert(0, str(Path("../../../nf_core").resolve())) # -- Project information ----------------------------------------------------- diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 3481d3ef38..61aee67f1a 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -6,6 +6,7 @@ import sys from pathlib import Path +import requests import rich import rich.console import rich.logging @@ -123,7 +124,7 @@ def run_nf_core(): f"[bold bright_yellow] There is a new version of nf-core/tools available! ({remote_vers})", highlight=False, ) - except Exception as e: + except requests.exceptions.RequestException as e: log.debug(f"Could not check latest version: {e}") stderr.print("\n") # Launch the click cli @@ -131,7 +132,7 @@ def run_nf_core(): @tui(command="interface", help="Launch the nf-core interface") -@click.group(context_settings=dict(help_option_names=["-h", "--help"])) +@click.group(context_settings={"help_option_names": ["-h", "--help"]}) @click.version_option(__version__) @click.option( "-v", @@ -249,7 +250,7 @@ def command_pipelines_create(ctx, name, description, author, version, force, out is_flag=True, default=Path(os.environ.get("GITHUB_REF", "").strip(" '\"")).parent.name in ["master", "main"] and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") - and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", + and os.environ.get("GITHUB_REPOSITORY", "") != "nf-core/tools", help="Execute additional checks for release-ready workflows.", ) @click.option( @@ -485,7 +486,7 @@ def command_pipelines_create_params_file(ctx, pipeline, revision, output, force, "-o", "--params-out", type=click.Path(), - default=os.path.join(os.getcwd(), "nf-params.json"), + default=str(Path.cwd() / "nf-params.json"), help="Path to save run parameters file", ) @click.option( @@ -2220,7 +2221,7 @@ def command_list(ctx, keywords, sort, json, show_archived): "-o", "--params-out", type=click.Path(), - default=os.path.join(os.getcwd(), "nf-params.json"), + default=str(Path.cwd() / "nf-params.json"), help="Path to save run parameters file", ) @click.option( @@ -2433,7 +2434,7 @@ def command_download( is_flag=True, default=Path(os.environ.get("GITHUB_REF", "").strip(" '\"")).parent.name in ["master", "main"] and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") - and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", + and os.environ.get("GITHUB_REPOSITORY", "") != "nf-core/tools", help="Execute additional checks for release-ready workflows.", ) @click.option( diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index f49d021fee..8d1a4fd9a2 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -1,5 +1,4 @@ import logging -import os import sys from pathlib import Path @@ -448,7 +447,7 @@ def pipelines_schema_docs(schema_path, output, format, force, columns): """ Outputs parameter documentation for a pipeline schema. """ - if not os.path.exists(schema_path): + if not schema_path.exists(): log.error("Could not find 'nextflow_schema.json' in current directory. Please specify a path.") sys.exit(1) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 375a711c99..473f5e0c83 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -50,7 +50,7 @@ def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ try: if self.directory: - if self.directory == Path(".") and not nf_dir_req: + if self.directory == Path() and not nf_dir_req: self.no_prompts = True self.directory, self.repo_type, self.org = get_repo_info(self.directory, use_prompt=not self.no_prompts) except UserWarning: @@ -69,6 +69,7 @@ def get_local_components(self) -> list[str]: Get the local modules/subworkflows in a pipeline """ local_component_dir = Path(self.directory, self.component_type, "local") + # TODO: Return list of Path objects instead of strings to avoid unnecessary conversion return [ str(Path(directory).relative_to(local_component_dir)) for directory, _, files in os.walk(local_component_dir) @@ -85,6 +86,7 @@ def get_components_clone_modules(self) -> list[str]: component_base_path = Path(self.directory, self.default_modules_path) elif self.component_type == "subworkflows": component_base_path = Path(self.directory, self.default_subworkflows_path) + # TODO: Return list of Path objects instead of strings to avoid unnecessary conversion return [ str(Path(directory).relative_to(component_base_path)) for directory, _, files in os.walk(component_base_path) @@ -155,6 +157,7 @@ def components_from_repo(self, install_dir: str) -> list[str]: if not repo_dir.exists(): raise LookupError(f"Nothing installed from {install_dir} in pipeline") + # TODO: Return list of Path objects instead of strings to avoid unnecessary conversion return [ str(Path(dir_path).relative_to(repo_dir)) for dir_path, _, files in os.walk(repo_dir) if "main.nf" in files ] @@ -257,6 +260,7 @@ def check_patch_paths(self, patch_path: Path, module_name: str) -> None: and self.modules_repo.repo_path is not None and modules_json.modules_json is not None ): + # TODO: Consider if this needs to be a string or if Path object would work modules_json.modules_json["repos"][self.modules_repo.remote_url]["modules"][ self.modules_repo.repo_path ][module_name]["patch"] = str(patch_path.relative_to(self.directory.resolve())) @@ -275,18 +279,15 @@ def check_if_in_include_stmts(self, component_path: str) -> dict[str, list[dict[ if self.repo_type == "pipeline": workflow_files = Path(self.directory, "workflows").glob("*.nf") for workflow_file in workflow_files: - with open(workflow_file) as fh: + with open(workflow_file) as fh, mmap.mmap(fh.fileno(), 0, access=mmap.ACCESS_READ) as s: # Check if component path is in the file using mmap - with mmap.mmap(fh.fileno(), 0, access=mmap.ACCESS_READ) as s: - if s.find(component_path.encode()) != -1: - # If the component path is in the file, check for include statements - for i, line in enumerate(fh): - if line.startswith("include") and component_path in line: - if str(workflow_file) not in include_stmts: - include_stmts[str(workflow_file)] = [] - include_stmts[str(workflow_file)].append( - {"line_number": i + 1, "line": line.rstrip()} - ) + if s.find(component_path.encode()) != -1: + # If the component path is in the file, check for include statements + for i, line in enumerate(fh): + if line.startswith("include") and component_path in line: + if str(workflow_file) not in include_stmts: + include_stmts[str(workflow_file)] = [] + include_stmts[str(workflow_file)].append({"line_number": i + 1, "line": line.rstrip()}) return include_stmts else: diff --git a/nf_core/components/components_completion.py b/nf_core/components/components_completion.py index 191fa32e21..8abecb4649 100644 --- a/nf_core/components/components_completion.py +++ b/nf_core/components/components_completion.py @@ -1,5 +1,6 @@ import sys +import git from click.shell_completion import CompletionItem from nf_core.modules.list import ModuleList @@ -24,7 +25,7 @@ def autocomplete_components(ctx, param, incomplete: str, component_type: str, li available_components = components_list.modules_repo.get_avail_components(component_type) return [CompletionItem(comp) for comp in available_components if comp.startswith(incomplete)] - except Exception as e: + except (git.exc.GitError, LookupError, OSError) as e: print(f"[ERROR] Autocomplete failed: {e}", file=sys.stderr) return [] diff --git a/nf_core/components/components_differ.py b/nf_core/components/components_differ.py index c92480a50d..b028b55626 100644 --- a/nf_core/components/components_differ.py +++ b/nf_core/components/components_differ.py @@ -187,7 +187,7 @@ def write_diff_file( elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: # The file was removed between the commits fh.write(f"'{Path(dsp_from_dir, file)}' was removed\n") - elif limit_output and not file.suffix == ".nf": + elif limit_output and file.suffix != ".nf": # Skip printing the diff for files other than main.nf fh.write(f"Changes in '{Path(component, file)}' but not shown\n") else: @@ -286,7 +286,7 @@ def print_diff( elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: # The file was removed between the commits log.info(f"'{Path(dsp_from_dir, file)}' was removed") - elif limit_output and not file.suffix == ".nf": + elif limit_output and file.suffix != ".nf": # Skip printing the diff for files other than main.nf log.info(f"Changes in '{Path(component, file)}' but not shown") else: diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 081be82684..925049b911 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -247,10 +247,10 @@ def get_channel_info_from_biotools( inputs = {} outputs = {} - def _iterate_input_output(type) -> DictWithStrAndTuple: + def _iterate_input_output(funct_data, type) -> DictWithStrAndTuple: type_info = {} - if type in funct: - for element in funct[type]: + if type in funct_data: + for element in funct_data[type]: if "data" in element: element_name = "_".join(element["data"]["term"].lower().split(" ")) uris = [element["data"]["uri"]] @@ -272,8 +272,8 @@ def _iterate_input_output(type) -> DictWithStrAndTuple: if "function" in tool: # Parse all tool functions for funct in tool["function"]: - inputs.update(_iterate_input_output("input")) - outputs.update(_iterate_input_output("output")) + inputs.update(_iterate_input_output(funct, "input")) + outputs.update(_iterate_input_output(funct, "output")) return inputs, outputs # If the tool name was not found in the response diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 9fc973f77d..b30f85b0c1 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -2,7 +2,6 @@ The ComponentCreate class handles generating of module and subworkflow templates """ -import glob import json import logging import re @@ -38,7 +37,7 @@ class ComponentCreate(ComponentCommand): def __init__( self, component_type: str, - directory: Path = Path("."), + directory: Path = Path(), component: str = "", author: str | None = None, process_label: str | None = None, @@ -403,7 +402,7 @@ def _get_component_dirs(self) -> dict[str, Path]: ) # If no subtool, check that there isn't already a tool/subtool - tool_glob = glob.glob(f"{Path(self.directory, self.component_type, self.org, self.component)}/*/main.nf") + tool_glob = list(Path(self.directory, self.component_type, self.org, self.component).glob("*/main.nf")) if not self.subtool and tool_glob and not self.migrate_pytest: raise UserWarning( f"Module subtool '{tool_glob[0]}' exists already, cannot make tool '{self.component_name}'" @@ -427,7 +426,7 @@ def _get_username(self): try: gh_auth_user = json.loads(subprocess.check_output(["gh", "api", "/user"], stderr=subprocess.DEVNULL)) author_default = f"@{gh_auth_user['login']}" - except Exception as e: + except (subprocess.CalledProcessError, FileNotFoundError) as e: log.debug(f"Could not find GitHub username using 'gh' cli command: [red]{e}") # Regex to valid GitHub username: https://github.com/shinnn/github-username-regex @@ -626,7 +625,7 @@ def generate_meta_yml_file(self) -> None: if hasattr(self, "outputs") and len(self.outputs) > 0: outputs_dict: dict[str, list | dict] = {} - for i, (output_name, ontologies) in enumerate(self.outputs.items()): + for _i, (output_name, ontologies) in enumerate(self.outputs.items()): channel_contents: list[list[dict] | dict] = [] if self.has_meta: channel_contents.append( diff --git a/nf_core/components/info.py b/nf_core/components/info.py index 69a30929e0..fa47333945 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -1,5 +1,4 @@ import logging -import os from pathlib import Path import questionary @@ -200,9 +199,9 @@ def get_local_yaml(self) -> dict | None: log.debug(f"{self.component_type[:-1].title()} '{self.component}' meta.yml not found locally") else: component_base_path = Path(self.directory, self.component_type, self.org) - if self.component in os.listdir(component_base_path): - comp_dir = Path(component_base_path, self.component) - meta_fn = Path(comp_dir, "meta.yml") + comp_dir = component_base_path / self.component + if comp_dir.is_dir(): + meta_fn = comp_dir / "meta.yml" if meta_fn.exists(): log.debug(f"Found local file: {meta_fn}") with open(meta_fn) as fh: @@ -375,7 +374,7 @@ def generate_component_info_help(self): ) if self.component_type == "subworkflows": subworkflow_config = Path(install_folder, self.component, "nextflow.config").relative_to(self.directory) - if os.path.isfile(subworkflow_config): + if subworkflow_config.exists(): renderables.append( Text.from_markup("\n [blue]Add the following config statement to use this subworkflow:") ) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 61e088ef16..c5c37d5236 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -1,5 +1,4 @@ import logging -import os from pathlib import Path import questionary @@ -248,10 +247,11 @@ def collect_and_verify_name( raise ValueError - if self.current_remote.remote_url == modules_repo.remote_url: - if not modules_repo.component_exists(component, self.component_type, commit=self.current_sha): - warn_msg = f"{self.component_type[:-1].title()} '{component}' not found in remote '{modules_repo.remote_url}' ({modules_repo.branch})" - log.warning(warn_msg) + if self.current_remote.remote_url == modules_repo.remote_url and not modules_repo.component_exists( + component, self.component_type, commit=self.current_sha + ): + warn_msg = f"{self.component_type[:-1].title()} '{component}' not found in remote '{modules_repo.remote_url}' ({modules_repo.branch})" + log.warning(warn_msg) return component @@ -263,7 +263,7 @@ def check_component_installed(self, component, current_version, component_dir, m True: if the component is not installed False: if the component is installed """ - if (current_version is not None and os.path.exists(component_dir)) and not force: + if (current_version is not None and component_dir.exists()) and not force: # make sure included components are also installed if self.component_type == "subworkflows": self.install_included_components(component_dir) @@ -343,9 +343,9 @@ def check_alternate_remotes(self, modules_json): False: if problematic components are not found """ modules_json.load() - for repo_url, repo_content in modules_json.modules_json.get("repos", dict()).items(): + for repo_url, repo_content in modules_json.modules_json.get("repos", {}).items(): for component_type in repo_content: - for directory in repo_content.get(component_type, dict()).keys(): + for directory in repo_content.get(component_type, {}): if directory == self.modules_repo.repo_path and repo_url != self.modules_repo.remote_url: return True return False diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index 63c651a401..efcdc11d90 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -259,7 +259,7 @@ def _print_results(self, show_passed=False, sort_by="test", plain_text=False): try: for lint_result in tests: max_name_len = max(len(lint_result.component_name), max_name_len) - except Exception: + except (AttributeError, TypeError): pass # Helper function to format test links nicely @@ -285,8 +285,9 @@ def format_result(test_results: list[LintResult], table: Table) -> Table: module_name = lint_result.component_name # Make the filename clickable to open in VSCode - file_path = os.path.relpath(lint_result.file_path, self.directory) - file_path_link = f"[link=vscode://file/{os.path.abspath(file_path)}]{file_path}[/link]" + file_path_obj = Path(lint_result.file_path) + file_path_rel = file_path_obj.relative_to(self.directory) + file_path_link = f"[link=vscode://file/{file_path_obj.resolve()}]{file_path_rel}[/link]" # Add link to the test documentation tools_version = __version__ diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index fa468be970..37744bf291 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -295,11 +295,8 @@ def get_inputs_from_main_nf(self) -> None: return # get all lines between "take" and "main" or "emit" input_data = data.split("take:")[1].split("main:")[0].split("emit:")[0] - for line in input_data.split("\n"): - try: - inputs.append(line.split()[0]) - except IndexError: - pass # Empty lines + # Extract first word from non-empty lines + inputs = [line.split()[0] for line in input_data.split("\n") if line.split()] log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") self.inputs = inputs @@ -341,12 +338,8 @@ def get_outputs_from_main_nf(self): log.debug(f"Could not find any outputs in {self.main_nf}") return outputs output_data = data.split("emit:")[1].split("}")[0] - for line in output_data.split("\n"): - try: - outputs.append(line.split("=")[0].split()[0]) - except IndexError: - # Empty lines - pass + # Extract first word before '=' from non-empty lines + outputs = [parts[0] for line in output_data.split("\n") if (parts := line.split("=")[0].split())] log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") self.outputs = outputs diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py index 59ec7a381b..d9398ead17 100644 --- a/nf_core/components/patch.py +++ b/nf_core/components/patch.py @@ -1,5 +1,4 @@ import logging -import os import shutil import tempfile from pathlib import Path @@ -99,7 +98,7 @@ def patch(self, component=None): style=nf_core.utils.nfcore_question_style, ).unsafe_ask() if remove: - os.remove(patch_path) + patch_path.unlink() else: return @@ -126,7 +125,9 @@ def patch(self, component=None): ) log.debug(f"Patch file wrote to a temporary directory {patch_temp_path}") except UserWarning: - raise UserWarning(f"{self.component_type[:-1]} '{component_fullname}' is unchanged. No patch to compute") + raise UserWarning( + f"{self.component_type[:-1]} '{component_fullname}' is unchanged. No patch to compute" + ) from None # Write changes to modules.json self.modules_json.add_patch_entry( @@ -213,9 +214,9 @@ def remove(self, component): try: for file in Path(temp_component_dir).glob("*"): file.rename(component_path.joinpath(file.name)) - os.rmdir(temp_component_dir) + Path(temp_component_dir).rmdir() except Exception as err: - raise UserWarning(f"There was a problem reverting the patched file: {err}") + raise UserWarning(f"There was a problem reverting the patched file: {err}") from err log.info(f"Patch for {component} reverted!") # Remove patch file if we could revert the patch diff --git a/nf_core/components/remove.py b/nf_core/components/remove.py index afe12c88a9..5d42690779 100644 --- a/nf_core/components/remove.py +++ b/nf_core/components/remove.py @@ -129,26 +129,23 @@ def remove(self, component, repo_url=None, repo_path=None, removed_by=None, remo ) ) # ask the user if they still want to remove the component, add it back otherwise - if not force: - if not questionary.confirm( + if not force and ( + not questionary.confirm( f"Do you still want to remove the {self.component_type[:-1]} '{component}'?", style=nf_core.utils.nfcore_question_style, - ).unsafe_ask(): - # add the component back to modules.json - if not ComponentInstall(self.directory, self.component_type, force=True).install( - component, silent=True - ): - log.warning( - f"Could not install the {self.component_type[:-1]} '{component}', please install it manually with 'nf-core {self.component_type} install {component}'." - ) - removed_components.append(component) - return removed + ).unsafe_ask() + ): + # add the component back to modules.json + if not ComponentInstall(self.directory, self.component_type, force=True).install( + component, silent=True + ): + log.warning( + f"Could not install the {self.component_type[:-1]} '{component}', please install it manually with 'nf-core {self.component_type} install {component}'." + ) + removed_components.append(component) + return removed # Remove the component files of all entries removed from modules.json - removed = ( - True - if self.clear_component_dir(component, Path(self.directory, removed_component_dir)) or removed - else False - ) + removed = bool(self.clear_component_dir(component, Path(self.directory, removed_component_dir)) or removed) removed_components.append(component) if removed: diff --git a/nf_core/components/update.py b/nf_core/components/update.py index fcdb005e9f..bf85f21cfa 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -1,5 +1,4 @@ import logging -import os import shutil import tempfile from pathlib import Path @@ -190,13 +189,12 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr else: version = modules_repo.get_latest_component_version(component, self.component_type) - if current_version is not None and not self.force: - if current_version == version: - if self.sha or self.prompt: - log.info(f"'{component_fullname}' is already installed at {version}") - else: - log.info(f"'{component_fullname}' is already up to date") - continue + if current_version is not None and not self.force and current_version == version: + if self.sha or self.prompt: + log.info(f"'{component_fullname}' is already installed at {version}") + else: + log.info(f"'{component_fullname}' is already up to date") + continue # Download component files if not self.install_component_files(component, version, modules_repo, str(install_tmp_dir)): @@ -300,21 +298,20 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr updated.append(component) recursive_update = True modules_to_update, subworkflows_to_update = self.get_components_to_update(component) - if not silent and len(modules_to_update + subworkflows_to_update) > 0: - if not self.update_all: - log.warning( - f"All modules and subworkflows linked to the updated {self.component_type[:-1]} will be {'asked for update' if self.show_diff else 'automatically updated'}.\n" - "It is advised to keep all your modules and subworkflows up to date.\n" - "It is not guaranteed that a subworkflow will continue working as expected if all modules/subworkflows used in it are not up to date.\n" - ) - if self.update_deps: - recursive_update = True - else: - recursive_update = questionary.confirm( - "Would you like to continue updating all modules and subworkflows?", - default=True, - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() + if not silent and not self.update_all and len(modules_to_update + subworkflows_to_update) > 0: + log.warning( + f"All modules and subworkflows linked to the updated {self.component_type[:-1]} will be {'asked for update' if self.show_diff else 'automatically updated'}.\n" + "It is advised to keep all your modules and subworkflows up to date.\n" + "It is not guaranteed that a subworkflow will continue working as expected if all modules/subworkflows used in it are not up to date.\n" + ) + if self.update_deps: + recursive_update = True + else: + recursive_update = questionary.confirm( + "Would you like to continue updating all modules and subworkflows?", + default=True, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() if recursive_update and len(modules_to_update + subworkflows_to_update) > 0: # Update linked components self.update_linked_components(modules_to_update, subworkflows_to_update, updated) @@ -383,8 +380,8 @@ def get_single_component_info(self, component): # Get component installation directory try: install_dir = [dir for dir, m in components if component == m][0] - except IndexError: - raise UserWarning(f"{self.component_type[:-1].title()} '{component}' not found in 'modules.json'.") + except IndexError as e: + raise UserWarning(f"{self.component_type[:-1].title()} '{component}' not found in 'modules.json'.") from e # Check if component is installed before trying to update if component not in choices: @@ -403,12 +400,10 @@ def get_single_component_info(self, component): config_entry = None if self.update_config is not None: if any( - [ - entry.count("/") == 1 - and (entry.endswith("modules") or entry.endswith("subworkflows")) - and not (entry.endswith(".git") or entry.endswith(".git/")) - for entry in self.update_config.keys() - ] + entry.count("/") == 1 + and (entry.endswith("modules") or entry.endswith("subworkflows")) + and not (entry.endswith(".git") or entry.endswith(".git/")) + for entry in self.update_config ): raise UserWarning( "Your '.nf-core.yml' file format is outdated. " @@ -423,7 +418,7 @@ def get_single_component_info(self, component): config_entry = self.update_config[self.modules_repo.remote_url][install_dir].get(component) if config_entry is not None and config_entry is not True: if config_entry is False: - log.warn( + log.warning( f"{self.component_type[:-1].title()}'s update entry in '.nf-core.yml' for '{component}' is set to False" ) return (self.modules_repo, None, None, None) @@ -517,7 +512,7 @@ def get_all_components_info(self, branch=None): ] elif isinstance(self.update_config, dict) and isinstance(self.update_config[repo_name], dict): # If it is a dict, then there are entries for individual components or component directories - for component_dir in set([dir for dir, _ in components]): + for component_dir in {dir for dir, _ in components}: if isinstance(self.update_config[repo_name][component_dir], str): # If a string is given it is the commit SHA to which we should update to custom_sha = self.update_config[repo_name][component_dir] @@ -547,7 +542,7 @@ def get_all_components_info(self, branch=None): if self.sha is not None: overridden_repos.append(repo_name) elif self.update_config[repo_name][component_dir] is False: - for directory, component in components: + for directory, _component in components: if directory == component_dir: skipped_components.append(f"{component_dir}/{components}") elif isinstance(self.update_config[repo_name][component_dir], dict): @@ -664,7 +659,7 @@ def get_all_components_info(self, branch=None): # Loop through components_info and create on ModulesRepo object per remote and branch repos_and_branches = {} for repo_name, repo_content in components_info.items(): - for component_dir, comps in repo_content.items(): + for _component_dir, comps in repo_content.items(): for comp, sha, comp_branch in comps: if branch is not None: comp_branch = branch @@ -741,7 +736,7 @@ def setup_diff_file(self, check_diff_exist=True): # Check if filename already exists (questionary or cli) while self.save_diff_fn.exists(): if questionary.confirm(f"'{self.save_diff_fn}' exists. Remove file?").unsafe_ask(): - os.remove(self.save_diff_fn) + self.save_diff_fn.unlink() break self.save_diff_fn = questionary.path( "Enter a new filename: ", @@ -768,6 +763,7 @@ def move_files_from_tmp_dir(self, component: str, install_folder: str, repo_path if pipeline_path.exists(): pipeline_files = [f.name for f in pipeline_path.iterdir() if f.is_file()] # check if any *.config file exists in the pipeline + # TODO: Use f.suffix == ".config" instead of str(f).endswith(".config") config_files = [f for f in pipeline_files if str(f).endswith(".config")] for config_file in config_files: log.debug(f"Moving '{component}/{config_file}' to updated component") diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index d4f325376b..b5787c5941 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -3,6 +3,7 @@ or for a single module """ +import contextlib import logging import os import re @@ -313,10 +314,8 @@ def _print_results(self) -> None: # Find maximum module name length max_mod_name_len = 40 for m in [self.up_to_date, self.updated, self.failed]: - try: + with contextlib.suppress(Exception): max_mod_name_len = max(len(m[2]), max_mod_name_len) - except Exception: - pass def format_result(module_updates: list[tuple[str, str]], table: Table) -> Table: """ @@ -328,10 +327,7 @@ def format_result(module_updates: list[tuple[str, str]], table: Table) -> Table: row_style = None for module_update in module_updates: if last_modname and module_update[1] != last_modname: - if row_style: - row_style = None - else: - row_style = "magenta" + row_style = None if row_style else "magenta" last_modname = module_update[1] table.add_row( Markdown(f"{module_update[1]}"), diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index d4bc1ef974..3fe02551ed 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -375,7 +375,7 @@ def _sort_meta_yml(meta_yml: dict) -> dict: schema = self.load_meta_schema() schema_keys = list(schema["properties"].keys()) except (LintExceptionError, KeyError) as e: - raise UserWarning("Failed to load meta schema", e) + raise UserWarning("Failed to load meta schema", e) from e result: dict = {} @@ -385,7 +385,7 @@ def _sort_meta_yml(meta_yml: dict) -> dict: result[key] = meta_yml[key] # Then add any keys that aren't in the schema (to preserve custom keys) - for key in meta_yml.keys(): + for key in meta_yml: if key not in result: result[key] = meta_yml[key] @@ -413,7 +413,7 @@ def _sort_meta_yml(meta_yml: dict) -> dict: versions_entry = template_meta.get("topics", {}).get("versions", [[]])[0] if len(versions_entry) == 3: topic_metadata = [next(iter(item.values())) for item in versions_entry] - except Exception as e: + except (OSError, yaml.YAMLError, IndexError, StopIteration) as e: log.debug(f"Could not load topic template metadata: {e}") def _populate_channel_elements(io_type, correct_value, meta_value, mod_io_data, meta_yml_io, check_exists=True): @@ -488,7 +488,7 @@ def _process_element(element, index, is_output=False): # Only update structure when it differs from main.nf corrected_data = meta_yml_io.copy() if meta_yml_io else mod_io_data.copy() - for ch_name in mod_io_data.keys(): + for ch_name in mod_io_data: # Ensure channel exists in corrected_data if ch_name not in corrected_data: corrected_data[ch_name] = [] @@ -603,7 +603,7 @@ def _add_edam_ontologies(section, edam_formats, desc): if extension in edam_formats: expected_ontologies.append((edam_formats[extension][0], extension)) # remove duplicated entries - expected_ontologies = list({k: v for k, v in expected_ontologies}.items()) + expected_ontologies = list(dict(expected_ontologies).items()) if "ontologies" in section: for ontology in section["ontologies"]: try: @@ -639,7 +639,7 @@ def _add_edam_ontologies(section, edam_formats, desc): ) if "output" in meta_yml: - for ch_name in corrected_meta_yml["output"].keys(): + for ch_name in corrected_meta_yml["output"]: ch_content = corrected_meta_yml["output"][ch_name][0] if isinstance(ch_content, list): for i, element in enumerate(ch_content): @@ -667,7 +667,7 @@ def _add_edam_ontologies(section, edam_formats, desc): # Create YAML anchors for versions_* keys in output that match "versions" in topics # Since we now populate metadata for both output and topics, set up anchors to reference output from topics if "output" in corrected_meta_yml and "topics" in corrected_meta_yml: - versions_keys = [key for key in corrected_meta_yml["output"].keys() if key.startswith("versions_")] + versions_keys = [key for key in corrected_meta_yml["output"] if key.startswith("versions_")] if versions_keys and "versions" in corrected_meta_yml["topics"]: # Set topics["versions"] to reference output versions (now with populated metadata) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index fcd08b3b46..3f7251b634 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -65,15 +65,12 @@ def main_nf( with open(module.main_nf) as fh: lines = fh.readlines() module.passed.append(("main_nf", "main_nf_exists", "Module file exists", module.main_nf)) - except FileNotFoundError: + except FileNotFoundError as e: module.failed.append(("main_nf", "main_nf_exists", "Module file does not exist", module.main_nf)) - raise FileNotFoundError(f"Module file does not exist: {module.main_nf}") + raise FileNotFoundError(f"Module file does not exist: {module.main_nf}") from e deprecated_i = ["initOptions", "saveFiles", "getSoftwareName", "getProcessName", "publishDir"] - if len(lines) > 0: - lines_j = "\n".join(lines) - else: - lines_j = "" + lines_j = "\n".join(lines) if len(lines) > 0 else "" for i in deprecated_i: if i in lines_j: @@ -191,28 +188,27 @@ def main_nf( ) # Check whether 'meta' is emitted when given as input - if inputs: - if "meta" in inputs: - module.has_meta = True - if emits: - if "meta" in emits: - module.passed.append( - ( - "main_nf", - "main_nf_meta_output", - "'meta' map emitted in output channel(s)", - module.main_nf, - ) + if inputs and "meta" in inputs: + module.has_meta = True + if emits: + if "meta" in emits: + module.passed.append( + ( + "main_nf", + "main_nf_meta_output", + "'meta' map emitted in output channel(s)", + module.main_nf, ) - else: - module.failed.append( - ( - "main_nf", - "main_nf_meta_output", - "'meta' map not emitted in output channel(s)", - module.main_nf, - ) + ) + else: + module.failed.append( + ( + "main_nf", + "main_nf_meta_output", + "'meta' map not emitted in output channel(s)", + module.main_nf, ) + ) # Check that a software version is emitted if topics: @@ -327,7 +323,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): check_process_labels(self, lines) # Deprecated enable_conda - for i, raw_line in enumerate(lines): + for _i, raw_line in enumerate(lines): url = None line = raw_line.strip(" \n'\"}:?") @@ -408,7 +404,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): if url is None: continue try: - container_url = "https://" + urlunparse(url) if not url.scheme == "https" else urlunparse(url) + container_url = "https://" + urlunparse(url) if url.scheme != "https" else urlunparse(url) log.debug(f"Trying to connect to URL: {container_url}") response = requests.head( container_url, @@ -416,7 +412,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): allow_redirects=True, ) log.debug( - f"Connected to URL: {'https://' + urlunparse(url) if not url.scheme == 'https' else urlunparse(url)}, " + f"Connected to URL: {'https://' + urlunparse(url) if url.scheme != 'https' else urlunparse(url)}, " f"status_code: {response.status_code}" ) except (requests.exceptions.RequestException, sqlite3.InterfaceError) as e: @@ -615,7 +611,7 @@ def check_container_link_line(self, raw_line, registry): ( "main_nf", "container_links", - f"Too many double quotes found when specifying container: {line.lstrip('container ')}", + f"Too many double quotes found when specifying container: {line.removeprefix('container ')}", self.main_nf, ) ) @@ -624,7 +620,7 @@ def check_container_link_line(self, raw_line, registry): ( "main_nf", "container_links", - f"Correct number of double quotes found when specifying container: {line.lstrip('container ')}", + f"Correct number of double quotes found when specifying container: {line.removeprefix('container ')}", self.main_nf, ) ) diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index c663b2defa..6f2d249a0b 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -336,7 +336,7 @@ def obtain_outputs(_, outputs: dict | list) -> dict | list: if old_structure: outputs = {k: v for d in outputs for k, v in d.items()} assert isinstance(outputs, dict) # mypy - for channel_name in outputs.keys(): + for channel_name in outputs: output_channel = outputs[channel_name] channel_elements: list = [] for element in output_channel: @@ -367,7 +367,7 @@ def obtain_topics(_, topics: dict) -> dict: formatted_topics (dict): A dictionary containing the topics and their elements obtained from main.nf or meta.yml files. """ formatted_topics: dict = {} - for name in topics.keys(): + for name in topics: content = topics[name] t_elements: list = [] for element in content: diff --git a/nf_core/modules/lint/module_deprecations.py b/nf_core/modules/lint/module_deprecations.py index dc232a81c3..cdf2c636f3 100644 --- a/nf_core/modules/lint/module_deprecations.py +++ b/nf_core/modules/lint/module_deprecations.py @@ -1,5 +1,4 @@ import logging -import os log = logging.getLogger(__name__) @@ -9,7 +8,7 @@ def module_deprecations(_, module): Check that the modules are up to the latest nf-core standard """ module.wf_path = module.component_dir - if "functions.nf" in os.listdir(module.component_dir): + if (module.component_dir / "functions.nf").exists(): module.failed.append( ( "module_deprecations", diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index 16939a767d..114b87f892 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -120,7 +120,7 @@ def module_tests(_, module: NFCoreComponent, allow_missing: bool = False): with open(snap_file) as snap_fh: try: snap_content = json.load(snap_fh) - for test_name in snap_content.keys(): + for test_name in snap_content: if "d41d8cd98f00b204e9800998ecf8427e" in str(snap_content[test_name]): if "stub" not in test_name: module.failed.append( diff --git a/nf_core/modules/lint/module_todos.py b/nf_core/modules/lint/module_todos.py index ce8ec34976..15c4939f9d 100644 --- a/nf_core/modules/lint/module_todos.py +++ b/nf_core/modules/lint/module_todos.py @@ -36,5 +36,5 @@ def module_todos(_, module): mod_results = pipeline_todos(None, root_dir=module.component_dir) for i, warning in enumerate(mod_results["warned"]): module.warned.append(("module_todos", "module_todo", warning, mod_results["file_paths"][i])) - for i, passed in enumerate(mod_results["passed"]): + for _i, passed in enumerate(mod_results["passed"]): module.passed.append(("module_todos", "module_todo", passed, module.component_dir)) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 4c757f7b8a..348b852119 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -165,7 +165,7 @@ def get_pipeline_module_repositories( if repos is None: repos = {} # Check if there are any nf-core modules installed - if (directory / NF_CORE_MODULES_NAME).exists() and NF_CORE_MODULES_REMOTE not in repos.keys(): + if (directory / NF_CORE_MODULES_NAME).exists() and NF_CORE_MODULES_REMOTE not in repos: repos[NF_CORE_MODULES_REMOTE] = {} # The function might rename some directories, keep track of them renamed_dirs = {} @@ -253,7 +253,7 @@ def dir_tree_uncovered(self, components_directory, repos): repos_at_level = {Path(*repo.parts[:depth]): len(repo.parts) for repo in repos} for directory in fifo: rel_dir = directory.relative_to(components_directory) - if rel_dir in repos_at_level.keys(): + if rel_dir in repos_at_level: # Go the next depth if this directory is not one of the repos if depth < repos_at_level[rel_dir]: temp_queue.extend(directory.iterdir()) @@ -449,6 +449,7 @@ def unsynced_components(self) -> tuple[list[str], list[str], dict]: # Add all modules from modules.json to missing_installation missing_installation = copy.deepcopy(self.modules_json["repos"]) # Obtain the path of all installed modules + # TODO: Use Path.parts instead of str().startswith() to avoid unnecessary string conversion module_dirs = [ Path(dir_name).relative_to(self.modules_dir) for dir_name, _, file_names in os.walk(self.modules_dir) @@ -457,6 +458,7 @@ def unsynced_components(self) -> tuple[list[str], list[str], dict]: untracked_dirs_modules, missing_installation = self.parse_dirs(module_dirs, missing_installation, "modules") # Obtain the path of all installed subworkflows + # TODO: Use Path.parts instead of str().startswith() to avoid unnecessary string conversion subworkflow_dirs = [ Path(dir_name).relative_to(self.subworkflows_dir) for dir_name, _, file_names in os.walk(self.subworkflows_dir) @@ -492,12 +494,14 @@ def parse_dirs(self, dirs: list[Path], missing_installation: dict, component_typ git_url = "" for repo in missing_installation: - if component_type in missing_installation[repo]: - if install_dir in missing_installation[repo][component_type]: - if component in missing_installation[repo][component_type][install_dir]: - component_in_file = True - git_url = repo - break + if ( + component_type in missing_installation[repo] + and install_dir in missing_installation[repo][component_type] + and component in missing_installation[repo][component_type][install_dir] + ): + component_in_file = True + git_url = repo + break if not component_in_file: # If it is not, add it to the list of missing components untracked_dirs.append(component) @@ -618,7 +622,7 @@ def check_up_to_date(self): for _, repo_entry in self.modules_json.get("repos", {}).items(): for component_type in ["modules", "subworkflows"]: if component_type in repo_entry: - for install_dir, install_dir_entry in repo_entry[component_type].items(): + for _install_dir, install_dir_entry in repo_entry[component_type].items(): for _, component in install_dir_entry.items(): if "installed_by" in component and isinstance(component["installed_by"], str): log.debug(f"Updating {component} in modules.json") @@ -640,12 +644,10 @@ def check_up_to_date(self): # we try to reinstall them if len(missing_installation) > 0: if "subworkflows" in [ - c_type for _, repo_content in missing_installation.items() for c_type in repo_content.keys() + c_type for _, repo_content in missing_installation.items() for c_type in repo_content ]: self.resolve_missing_installation(missing_installation, "subworkflows") - if "modules" in [ - c_type for _, repo_content in missing_installation.items() for c_type in repo_content.keys() - ]: + if "modules" in [c_type for _, repo_content in missing_installation.items() for c_type in repo_content]: self.resolve_missing_installation(missing_installation, "modules") # If some modules/subworkflows didn't have an entry in the 'modules.json' file @@ -697,10 +699,10 @@ def load(self) -> None: try: self.modules_json = json.load(fh) except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{self.modules_json_path}' due to error {e}") + raise UserWarning(f"Unable to load JSON file '{self.modules_json_path}' due to error {e}") from e - except FileNotFoundError: - raise UserWarning("File 'modules.json' is missing") + except FileNotFoundError as e: + raise UserWarning("File 'modules.json' is missing") from e def update( self, @@ -889,7 +891,7 @@ def try_apply_patch_reverse(self, component_type, component, repo_name, patch_re except LookupError as e: raise LookupError( f"Failed to apply patch in reverse for {component_type[:-1]} '{component_fullname}' due to: {e}" - ) + ) from e # Write the patched files to a temporary directory log.debug("Writing patched files to tmpdir") @@ -1061,7 +1063,7 @@ def get_dependent_components( component_types = ["modules"] if component_type == "modules" else ["modules", "subworkflows"] # Find all components that have an entry of install by of a given component, recursively call this function for subworkflows for type in component_types: - for repo_url in self.modules_json["repos"].keys(): + for repo_url in self.modules_json["repos"]: modules_repo = ModulesRepo(repo_url) install_dir = modules_repo.repo_path try: @@ -1189,7 +1191,7 @@ def resolve_missing_from_modules_json(self, missing_from_modules_json, component ) assert self.modules_json is not None # mypy # Get the remotes we are missing - tracked_repos = {repo_url: (repo_entry) for repo_url, repo_entry in self.modules_json["repos"].items()} + tracked_repos = dict(self.modules_json["repos"].items()) repos, _ = self.get_pipeline_module_repositories(component_type, self.modules_dir, tracked_repos) # Get tuples of components that miss installation and their install directory @@ -1204,10 +1206,11 @@ def components_with_repos(): modules_repo.repo_path, ) for dir_name, _, _ in os.walk(repo_url_path): - if component_type == "modules": - if len(Path(directory).parts) > 1: # The module name is TOOL/SUBTOOL - paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) - pass + if ( + component_type == "modules" and len(Path(directory).parts) > 1 + ): # The module name is TOOL/SUBTOOL + paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) + pass paths_in_directory.append(Path(dir_name).parts[-1]) if directory in paths_in_directory: yield (modules_repo.repo_path, directory) diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index ec310c0001..3465414a13 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -61,8 +61,8 @@ def __init__( raise UserWarning(f"Could not find a configuration file in {self.local_repo_dir}") try: self.repo_path = repo_config.org_path - except KeyError: - raise UserWarning(f"'org_path' key not present in {config_fn.name}") + except KeyError as e: + raise UserWarning(f"'org_path' key not present in {config_fn.name}") from e # Verify that the repo seems to be correctly configured if self.repo_path != NF_CORE_MODULES_NAME or self.branch: @@ -94,7 +94,7 @@ def setup_local_repo(self, remote, branch, hide_progress=True, in_cache=False): """ self.local_repo_dir = Path(NFCORE_DIR if not in_cache else NFCORE_CACHE_DIR, self.fullname) try: - if not os.path.exists(self.local_repo_dir): + if not self.local_repo_dir.exists(): try: pbar = rich.progress.Progress( "[bold blue]{task.description}", @@ -110,8 +110,8 @@ def setup_local_repo(self, remote, branch, hide_progress=True, in_cache=False): progress=RemoteProgressbar(pbar, self.fullname, self.remote_url, "Cloning"), ) ModulesRepo.update_local_repo_status(self.fullname, True) - except GitCommandError: - raise LookupError(f"Failed to clone from the remote: `{remote}`") + except GitCommandError as e: + raise LookupError(f"Failed to clone from the remote: `{remote}`") from e # Verify that the requested branch exists by checking it out self.setup_branch(branch) else: @@ -150,4 +150,4 @@ def setup_local_repo(self, remote, branch, hide_progress=True, in_cache=False): shutil.rmtree(self.local_repo_dir) self.setup_local_repo(remote, branch, hide_progress) else: - raise LookupError("Exiting due to error with local modules git repo") + raise LookupError("Exiting due to error with local modules git repo") from e diff --git a/nf_core/modules/modules_utils.py b/nf_core/modules/modules_utils.py index 105b5af681..683bcda1cf 100644 --- a/nf_core/modules/modules_utils.py +++ b/nf_core/modules/modules_utils.py @@ -1,5 +1,4 @@ import logging -import os from pathlib import Path from urllib.parse import urlparse @@ -32,7 +31,7 @@ def repo_full_name_from_remote(remote_url: str) -> str: path = urlparse(remote_url).path # Remove the file extension from the path - path, _ = os.path.splitext(path) + path = str(Path(path).with_suffix("")) return path @@ -62,12 +61,11 @@ def get_installed_modules(directory: Path, repo_type="modules") -> tuple[list[st # Filter local modules if local_modules_dir.exists(): - local_modules = os.listdir(local_modules_dir) - local_modules = sorted([x for x in local_modules if x.endswith(".nf")]) + local_modules = sorted([x.name for x in local_modules_dir.iterdir() if x.suffix == ".nf"]) # Get nf-core modules if nfcore_modules_dir.exists(): - for m in sorted([m for m in nfcore_modules_dir.iterdir() if not m == "lib"]): + for m in sorted([m for m in nfcore_modules_dir.iterdir() if m != "lib"]): if not m.is_dir(): raise ModuleExceptionError( f"File found in '{nfcore_modules_dir}': '{m}'! This directory should only contain module directories." @@ -83,7 +81,7 @@ def get_installed_modules(directory: Path, repo_type="modules") -> tuple[list[st # Make full (relative) file paths and create NFCoreComponent objects if local_modules_dir: - local_modules = [os.path.join(local_modules_dir, m) for m in local_modules] + local_modules = [str(local_modules_dir / m) for m in local_modules] nfcore_modules = [ NFCoreComponent( @@ -110,13 +108,11 @@ def load_edam(): return edam_formats for line in response.content.splitlines(): fields = line.decode("utf-8").split("\t") - if fields[0].split("/")[-1].startswith("format"): - # We choose an already provided extension - if fields[14]: - extensions = fields[14].split("|") - for extension in extensions: - if extension not in edam_formats: - edam_formats[extension] = (fields[0], fields[1]) # URL, name + if fields[0].split("/")[-1].startswith("format") and fields[14]: # We choose an already provided extension + extensions = fields[14].split("|") + for extension in extensions: + if extension not in edam_formats: + edam_formats[extension] = (fields[0], fields[1]) # URL, name return edam_formats diff --git a/nf_core/pipelines/bump_version.py b/nf_core/pipelines/bump_version.py index b826e7fa51..604a85801d 100644 --- a/nf_core/pipelines/bump_version.py +++ b/nf_core/pipelines/bump_version.py @@ -336,7 +336,7 @@ def log_change(old_content: str, new_content: str): old_lines = old_content.splitlines() new_lines = new_content.splitlines() - for old_line, new_line in zip(old_lines, new_lines): + for old_line, new_line in zip(old_lines, new_lines, strict=False): if old_line != new_line: stderr.print(f" [red] - {old_line.strip()}", highlight=False) stderr.print(f" [green] + {new_line.strip()}", highlight=False) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index bfc8a97efb..c5ea0c6894 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -74,9 +74,7 @@ def __init__( self.config.outdir = outdir if outdir else Path().cwd() except (FileNotFoundError, UserWarning): log.debug("The '.nf-core.yml' configuration file was not found.") - elif (name and description and author) or ( - template_config and (isinstance(template_config, str) or isinstance(template_config, Path)) - ): + elif (name and description and author) or (template_config and (isinstance(template_config, (str, Path)))): # Obtain a CreateConfig object from the template yaml file self.config = self.check_template_yaml_info(template_config, name, description, author) self.update_config(organisation, version, force, outdir) @@ -153,8 +151,8 @@ def check_template_yaml_info(self, template_yaml, name, description, author): with open(template_yaml) as f: template_yaml = yaml.safe_load(f) config = CreateConfig(**template_yaml) - except FileNotFoundError: - raise UserWarning(f"Template YAML file '{template_yaml}' not found.") + except FileNotFoundError as e: + raise UserWarning(f"Template YAML file '{template_yaml}' not found.") from e # Check required fields missing_fields = [] @@ -221,7 +219,7 @@ def obtain_jinja_params_dict(self, features_to_skip: list[str], pipeline_dir: st skip_areas = [ t_area for section in self.template_features_yml.values() - for t_area in section["features"].keys() + for t_area in section["features"] if t_area in features_to_skip and section["features"][t_area]["skippable_paths"] # for t_area in section["features"][t_area]["skippable_paths"] ] @@ -249,13 +247,12 @@ def obtain_jinja_params_dict(self, features_to_skip: list[str], pipeline_dir: st jinja_params["logo_light"] = f"{jinja_params['name_noslash']}_logo_light.png" jinja_params["logo_dark"] = f"{jinja_params['name_noslash']}_logo_dark.png" jinja_params["default_branch"] = self.default_branch - if config_yml is not None: - if ( - hasattr(config_yml, "lint") - and hasattr(config_yml["lint"], "nextflow_config") - and hasattr(config_yml["lint"]["nextflow_config"], "manifest.name") - ): - return jinja_params, skip_areas + if config_yml is not None and ( + hasattr(config_yml, "lint") + and hasattr(config_yml["lint"], "nextflow_config") + and hasattr(config_yml["lint"]["nextflow_config"], "manifest.name") + ): + return jinja_params, skip_areas # Check that the pipeline name matches the requirements if not re.match(r"^[a-z]+$", jinja_params["short_name"]): @@ -331,12 +328,13 @@ def render_template(self) -> None: for template_fn_path in template_files: # Skip files that are in the self.skip_paths list for skip_path in self.skip_paths: + # TODO: Use Path.parts instead of str().startswith() to avoid unnecessary string conversion if str(template_fn_path.relative_to(template_dir)).startswith(skip_path): break else: if template_fn_path.is_dir(): continue - if any([s in str(template_fn_path) for s in ignore_strs]): + if any(s in str(template_fn_path) for s in ignore_strs): log.debug(f"Ignoring '{template_fn_path}' in jinja2 template creation") continue @@ -368,14 +366,13 @@ def render_template(self) -> None: log.debug(f"Copying file without Jinja: '{output_path}' - {e}") shutil.copy(template_fn_path, output_path) - # Something else went wrong - except Exception as e: + # Jinja rendering error or other template issues + except (jinja2.TemplateError, OSError) as e: log.error(f"Copying raw file as error rendering with Jinja: '{output_path}' - {e}") shutil.copy(template_fn_path, output_path) # Mirror file permissions - template_stat = os.stat(template_fn_path) - os.chmod(output_path, template_stat.st_mode) + output_path.chmod(template_fn_path.stat().st_mode) if self.config.is_nfcore: # Make a logo and save it, if it is a nf-core pipeline @@ -410,7 +407,7 @@ def fix_linting(self): lint_config = {} for area in set((self.config.skip_features or []) + self.skip_areas): try: - for section_name in self.template_features_yml.keys(): + for section_name in self.template_features_yml: if area in self.template_features_yml[section_name]["features"]: for lint_test in self.template_features_yml[section_name]["features"][area]["linting"]: try: @@ -502,7 +499,7 @@ def git_init_pipeline(self) -> None: else: raise UserWarning( "Branches 'TEMPLATE' and 'dev' already exist. Use --force to overwrite existing branches." - ) + ) from e if self.is_interactive: try: log.info(f"Pipeline created: ./{self.outdir.relative_to(Path.cwd())}") diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index dad81689a9..4d114dd7c7 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -1,5 +1,6 @@ """A Textual app to create a pipeline.""" +import contextlib from pathlib import Path from textwrap import dedent @@ -71,10 +72,8 @@ def on_button_pressed(self, event: Button.Pressed) -> None: text_input.query_one(".validation_msg").update("\n".join(validation_result.failure_descriptions)) else: text_input.query_one(".validation_msg").update("") - try: + with contextlib.suppress(ValueError): self.parent.TEMPLATE_CONFIG.__dict__.update(new_config) - except ValueError: - pass # Create the new pipeline self._create_pipeline() diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 4ddf0092e1..a9bfd56fac 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -1,3 +1,4 @@ +import contextlib import logging import os from pathlib import Path @@ -134,13 +135,13 @@ def on_button_pressed(self, event: Button.Pressed) -> None: org = None # Make sure that the authentication was successful try: - user.login - log.debug("GitHub authentication successful") - except GithubException: + user_name = user.login + log.debug(f"GitHub authentication successful for user '{user_name}'") + except GithubException as e: raise UserWarning( f"Could not authenticate to GitHub with user name '{github_variables['gh_username']}'." "Please make sure that the provided user name and token are correct." - ) + ) from e # Check if organisation exists # If the organisation is nf-core or it doesn't exist, the repo will be created in the user account @@ -151,7 +152,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: f"Repo will be created in the GitHub organisation account '{github_variables['repo_org']}'" ) except UnknownObjectException: - log.warn(f"Provided organisation '{github_variables['repo_org']}' not found. ") + log.warning(f"Provided organisation '{github_variables['repo_org']}' not found. ") # Create the repo try: @@ -199,8 +200,8 @@ def _create_repo_and_push(self, org, repo_name, pipeline_repo, private): repo = org.get_repo(repo_name) # Check if it has a commit history try: - repo.get_commits().totalCount - raise UserWarning(f"GitHub repository '{repo_name}' already exists") + n = repo.get_commits().totalCount + raise UserWarning(f"GitHub repository '{repo_name}' already exists, with {n} commits") except GithubException: # Repo is empty repo_exists = True @@ -219,12 +220,9 @@ def _create_repo_and_push(self, org, repo_name, pipeline_repo, private): log.info(f"GitHub repository '{repo_name}' created successfully") remove_hide_class(self.parent, "close_app") - # Add the remote - try: + # Add the remote (if it doesn't already exist) + with contextlib.suppress(git.exc.GitCommandError): pipeline_repo.create_remote("origin", repo.clone_url) - except git.exc.GitCommandError: - # Remote already exists - pass # Push all branches pipeline_repo.remotes.origin.push(all=True).raise_if_error() @@ -239,8 +237,8 @@ def _get_github_credentials(self): gh_user = None gh_token = None # Use gh CLI config if installed - gh_cli_config_fn = os.path.expanduser("~/.config/gh/hosts.yml") - if os.path.exists(gh_cli_config_fn): + gh_cli_config_fn = Path.home() / ".config" / "gh" / "hosts.yml" + if gh_cli_config_fn.exists(): try: with open(gh_cli_config_fn) as fh: gh_cli_config = yaml.safe_load(fh) diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index a0c280185d..5751ddbd77 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -19,7 +19,7 @@ from nf_core.utils import NFCoreTemplateConfig # Use ContextVar to define a context on the model initialization -_init_context_var: ContextVar = ContextVar("_init_context_var", default={}) +_init_context_var: ContextVar = ContextVar("_init_context_var", default=None) @contextmanager @@ -45,10 +45,11 @@ class CreateConfig(NFCoreTemplateConfig): def __init__(self, /, **data: Any) -> None: """Custom init method to allow using a context on the model initialization.""" + context = _init_context_var.get() self.__pydantic_validator__.validate_python( data, self_instance=self, - context=_init_context_var.get(), + context=context if context is not None else {}, ) @field_validator("name") diff --git a/nf_core/pipelines/create_logo.py b/nf_core/pipelines/create_logo.py index 85ae2cdf02..9218001005 100644 --- a/nf_core/pipelines/create_logo.py +++ b/nf_core/pipelines/create_logo.py @@ -40,14 +40,14 @@ def create_logo( svg = svg.replace("#050505", "#fafafa") # save the svg - logo_filename = f"nf-core-{text}_logo_{theme}.svg" if not filename else filename + logo_filename = filename if filename else f"nf-core-{text}_logo_{theme}.svg" logo_filename = f"{logo_filename}.svg" if not logo_filename.lower().endswith(".svg") else logo_filename logo_path = Path(directory, logo_filename) with open(logo_path, "w") as fh: fh.write(svg) else: - logo_filename = f"nf-core-{text}_logo_{theme}.png" if not filename else filename + logo_filename = filename if filename else f"nf-core-{text}_logo_{theme}.png" logo_filename = f"{logo_filename}.png" if not logo_filename.lower().endswith(".png") else logo_filename cache_name = f"nf-core-{text}_logo_{theme}_{width}.png" logo_path = Path(directory, logo_filename) diff --git a/nf_core/pipelines/download/container_fetcher.py b/nf_core/pipelines/download/container_fetcher.py index faa0ddf763..bfcda9ed54 100644 --- a/nf_core/pipelines/download/container_fetcher.py +++ b/nf_core/pipelines/download/container_fetcher.py @@ -274,7 +274,7 @@ def gather_registries(self, workflow_directory: Path) -> set[str]: """ pass - def gather_config_registries(self, workflow_directory: Path, registry_keys: list[str] = []) -> set[str]: + def gather_config_registries(self, workflow_directory: Path, registry_keys: list[str] | None = None) -> set[str]: """ Gather the registries from the pipeline config and store them in a set. @@ -286,6 +286,8 @@ def gather_config_registries(self, workflow_directory: Path, registry_keys: list set[str]: The set of registries defined in the pipeline config """ # Fetch the pipeline config + if registry_keys is None: + registry_keys = [] nf_config = nf_core.utils.fetch_wf_config(workflow_directory) config_registries = set() @@ -492,5 +494,6 @@ def copy_image(self, container: str, src_path: Path, dest_path: Path) -> None: def cleanup(self) -> None: """ Cleanup any temporary files or resources. + Default implementation logs completion; subclasses can override to add specific cleanup. """ - pass + log.debug(f"Container fetching complete. Images saved to {self.get_container_output_dir()}") diff --git a/nf_core/pipelines/download/docker.py b/nf_core/pipelines/download/docker.py index 6d7cae8a19..23062ace0c 100644 --- a/nf_core/pipelines/download/docker.py +++ b/nf_core/pipelines/download/docker.py @@ -82,10 +82,10 @@ def check_and_set_implementation(self) -> None: try: nf_core.utils.run_cmd(docker_binary, "info") - except RuntimeError: + except RuntimeError as e: raise OSError( "Docker daemon is required to pull images, but it is not running or unavailable to the docker client" - ) + ) from e self.implementation = "docker" @@ -154,7 +154,7 @@ def fetch_remote_containers(self, containers: list[tuple[str, Path]], parallel: future.result() # This will raise an exception if the pull or save failed except DockerError as e: log.error(f"Error while processing container {e.container}: {e.message}") - except Exception as e: + except OSError as e: log.error(f"Unexpected error: {e}") except KeyboardInterrupt: diff --git a/nf_core/pipelines/download/download.py b/nf_core/pipelines/download/download.py index 05c3ccea8d..66f8b11f49 100644 --- a/nf_core/pipelines/download/download.py +++ b/nf_core/pipelines/download/download.py @@ -107,7 +107,7 @@ def __init__( self.platform = platform self.fullname: str | None = None # downloading configs is not supported for Seqera Platform downloads. - self.include_configs = True if download_configuration == "yes" and not bool(platform) else False + self.include_configs = bool(download_configuration == "yes" and not bool(platform)) # Additional tags to add to the downloaded pipeline. This enables to mark particular commits or revisions with # additional tags, e.g. "stable", "testing", "validated", "production" etc. Since this requires a git-repo, it is only # available for the bare / Seqera Platform download. @@ -317,7 +317,9 @@ def download_workflow_static(self) -> None: # Download the pipeline files for each selected revision log.info("Downloading workflow files from GitHub") - for revision, wf_sha, download_url in zip(self.revision, self.wf_sha.values(), self.wf_download_url.values()): + for revision, wf_sha, download_url in zip( + self.revision, self.wf_sha.values(), self.wf_download_url.values(), strict=False + ): revision_dirname = self.download_wf_files(revision=revision, wf_sha=wf_sha, download_url=download_url) if self.include_configs: @@ -428,7 +430,7 @@ def get_revision_hash(self) -> None: for revision in self.revision: # revision is a list of strings, but may be of length 1 # Branch - if revision in self.wf_branches.keys(): + if revision in self.wf_branches: self.wf_sha = {**self.wf_sha, revision: self.wf_branches[revision]} else: @@ -530,7 +532,7 @@ def setup_container_fetcher(self) -> None: else: self.container_fetcher = None except OSError as e: - raise DownloadError(e) + raise DownloadError(e) from e def prompt_use_singularity(self, fail_message: str) -> None: use_singularity = questionary.confirm( @@ -686,11 +688,11 @@ def find_container_images( except RuntimeError as e: log.error("Running 'nextflow inspect' failed with the following error") - raise DownloadError(e) + raise DownloadError(e) from e except KeyError as e: log.error("Failed to parse output of 'nextflow inspect' to extract containers") - raise DownloadError(e) + raise DownloadError(e) from e def get_container_output_dir(self) -> Path: assert self.outdir is not None # mypy diff --git a/nf_core/pipelines/download/singularity.py b/nf_core/pipelines/download/singularity.py index 286a90bbe6..1399c7ad4a 100644 --- a/nf_core/pipelines/download/singularity.py +++ b/nf_core/pipelines/download/singularity.py @@ -113,10 +113,7 @@ def __init__( f"Proceeding without consideration of the remote ${SINGULARITY_CACHE_DIR_ENV_VAR} index." ) self.container_cache_index = None - if os.environ.get(SINGULARITY_CACHE_DIR_ENV_VAR): - container_cache_utilisation = "copy" # default to copy if possible, otherwise skip. - else: - container_cache_utilisation = None + container_cache_utilisation = "copy" if os.environ.get(SINGULARITY_CACHE_DIR_ENV_VAR) else None else: log.warning("[red]No remote cache index specified, skipping remote container download.[/]") @@ -372,7 +369,7 @@ def read_remote_singularity_containers(container_cache_index: Path) -> list[str] containers_remote.append(match.group(0)) if n_total_images == 0: raise LookupError("Could not find valid container names in the index file.") - containers_remote = sorted(list(set(containers_remote))) + containers_remote = sorted(set(containers_remote)) log.debug(containers_remote) return containers_remote @@ -487,7 +484,8 @@ def update_file_progress(input_params: tuple[str, Path], status: FileDownloader. container, output_path = input_params try: self.progress.advance_remote_fetch_task() - except Exception as e: + except RuntimeError as e: + # Rich progress may raise RuntimeError if called from wrong thread log.error(f"Error updating progress bar: {e}") if status == FileDownloader.Status.DONE: @@ -636,6 +634,12 @@ def _ensure_output_dir_exists(self) -> None: log.debug(f"Container output directory not found, creating: {self.get_container_output_dir()}") self.get_container_output_dir().mkdir(parents=True, exist_ok=True) + def cleanup(self) -> None: + """ + Cleanup any temporary files or resources. + """ + super().cleanup() + # Distinct errors for the Singularity container download, required for acting on the exceptions class SingularityError(Exception): @@ -889,31 +893,34 @@ def download_file(self, remote_path: str, output_path: Path) -> None: # Set up download progress bar as a new task nice_name = self.nice_name(remote_path) - with self.progress.sub_task(nice_name, start=False, total=False, progress_type="download") as task: + with ( + self.progress.sub_task(nice_name, start=False, total=False, progress_type="download") as task, + intermediate_file(output_path) as fh, + ): # Open file handle and download # This temporary will be automatically renamed to the target if there are no errors - with intermediate_file(output_path) as fh: - # Disable caching as this breaks streamed downloads - with requests_cache.disabled(): - r = requests.get(remote_path, allow_redirects=True, stream=True, timeout=60 * 5) - filesize = r.headers.get("Content-length") - if filesize: - self.progress.update(task, total=int(filesize)) - self.progress.start_task(task) - - # Stream download - has_content = False - for data in r.iter_content(chunk_size=io.DEFAULT_BUFFER_SIZE): - # Check that the user didn't hit ctrl-c - if self.kill_with_fire: - raise KeyboardInterrupt - self.progress.update(task, advance=len(data)) - fh.write(data) - has_content = True - - # Check that we actually downloaded something - if not has_content: - raise DownloadError(f"Downloaded file '{remote_path}' is empty") - - # Set image file permissions to user=read,write,execute group/all=read,execute - os.chmod(fh.name, 0o755) + + # Disable caching as this breaks streamed downloads + with requests_cache.disabled(): + r = requests.get(remote_path, allow_redirects=True, stream=True, timeout=60 * 5) + filesize = r.headers.get("Content-length") + if filesize: + self.progress.update(task, total=int(filesize)) + self.progress.start_task(task) + + # Stream download + has_content = False + for data in r.iter_content(chunk_size=io.DEFAULT_BUFFER_SIZE): + # Check that the user didn't hit ctrl-c + if self.kill_with_fire: + raise KeyboardInterrupt + self.progress.update(task, advance=len(data)) + fh.write(data) + has_content = True + + # Check that we actually downloaded something + if not has_content: + raise DownloadError(f"Downloaded file '{remote_path}' is empty") + + # Set image file permissions to user=read,write,execute group/all=read,execute + Path(fh.name).chmod(0o755) diff --git a/nf_core/pipelines/download/utils.py b/nf_core/pipelines/download/utils.py index 17c3426d64..bde3845c09 100644 --- a/nf_core/pipelines/download/utils.py +++ b/nf_core/pipelines/download/utils.py @@ -21,9 +21,11 @@ def copy_container_load_scripts(container_system: str, dest_dir: Path, make_exec container_load_scripts_subpackage = "nf_core.pipelines.download.load_scripts" script_name = f"{container_system}-load.sh" dest_path = dest_dir / script_name - with importlib.resources.open_text(container_load_scripts_subpackage, script_name) as src: - with open(dest_path, "w") as dest: - shutil.copyfileobj(src, dest) + with ( + importlib.resources.open_text(container_load_scripts_subpackage, script_name) as src, + open(dest_path, "w") as dest, + ): + shutil.copyfileobj(src, dest) if make_exec: dest_path.chmod(0o775) return script_name, dest_path @@ -47,16 +49,16 @@ def intermediate_file(output_path: Path) -> Generator[tempfile._TemporaryFileWra if output_path.is_symlink(): raise DownloadError(f"Output path '{output_path}' is a symbolic link") - tmp = tempfile.NamedTemporaryFile(dir=output_path.parent, delete=False) - try: - yield tmp - tmp.close() - Path(tmp.name).rename(output_path) - except: - tmp_path = Path(tmp.name) - if tmp_path.exists(): - tmp_path.unlink() - raise + with tempfile.NamedTemporaryFile(dir=output_path.parent, delete=False) as tmp: + tmp_name = tmp.name + try: + yield tmp + except BaseException: + # File is closed automatically when exiting with block + Path(tmp_name).unlink(missing_ok=True) + raise + # File is now closed, safe to rename + Path(tmp_name).rename(output_path) @contextlib.contextmanager @@ -85,7 +87,7 @@ def intermediate_file_no_creation(output_path: Path) -> Generator[Path, None, No @contextlib.contextmanager -def intermediate_dir_with_cd(original_dir: Path, base_dir: Path = Path(".")): +def intermediate_dir_with_cd(original_dir: Path, base_dir: Path = Path()): """ Context manager to provide and change into a tempdir and ensure its removal and return to the original_dir upon exceptions. diff --git a/nf_core/pipelines/download/workflow_repo.py b/nf_core/pipelines/download/workflow_repo.py index 184555ac19..2c324a7cf7 100644 --- a/nf_core/pipelines/download/workflow_repo.py +++ b/nf_core/pipelines/download/workflow_repo.py @@ -150,8 +150,8 @@ def setup_local_repo(self, remote, location=None, in_cache=True): progress=RemoteProgressbar(pbar, self.fullname, self.remote_url, "Cloning"), ) super().update_local_repo_status(self.fullname, True) - except GitCommandError: - raise DownloadError(f"Failed to clone from the remote: `{remote}`") + except GitCommandError as e: + raise DownloadError(f"Failed to clone from the remote: `{remote}`") from e else: self.repo = git.Repo(self.local_repo_dir) diff --git a/nf_core/pipelines/launch.py b/nf_core/pipelines/launch.py index 364da1c113..8fce38e83a 100644 --- a/nf_core/pipelines/launch.py +++ b/nf_core/pipelines/launch.py @@ -45,7 +45,7 @@ def __init__( self.pipeline = pipeline self.pipeline_revision = revision self.schema_obj = None - self.use_params_file = False if command_only else True + self.use_params_file = not command_only self.params_in = params_in self.params_out = params_out if params_out else Path.cwd() / "nf-params.json" self.save_all = save_all @@ -117,17 +117,18 @@ def launch_pipeline(self): ).unsafe_ask() # Check if the output file exists already - if os.path.exists(self.params_out): + if self.params_out.exists(): # if params_in has the same name as params_out, don't ask to overwrite - if self.params_in and os.path.abspath(self.params_in) == os.path.abspath(self.params_out): + params_in_path = Path(self.params_in) if self.params_in else None + if params_in_path and params_in_path.exists() and params_in_path.samefile(self.params_out): log.warning( f"The parameter input file has the same name as the output file! {os.path.relpath(self.params_out)} will be overwritten." ) else: log.warning(f"Parameter output file already exists! {os.path.relpath(self.params_out)}") if Confirm.ask("[yellow]Do you want to overwrite this file?"): - if not (self.params_in and os.path.abspath(self.params_in) == os.path.abspath(self.params_out)): - os.remove(self.params_out) + if not (params_in_path and params_in_path.exists() and params_in_path.samefile(self.params_out)): + self.params_out.unlink() log.info(f"Deleted {self.params_out}\n") else: log.info("Exiting. Use --params-out to specify a custom output filename.") @@ -195,10 +196,10 @@ def get_pipeline_schema(self): self.schema_obj = nf_core.pipelines.schema.PipelineSchema() # Check if this is a local directory - localpath = os.path.abspath(os.path.expanduser(self.pipeline)) - if os.path.exists(localpath): + localpath = Path(self.pipeline).expanduser().resolve() + if localpath.exists(): # Set the nextflow launch command to use full paths - self.pipeline = localpath + self.pipeline = str(localpath) self.nextflow_cmd = f"nextflow run {localpath}" else: # Assume nf-core if no org given @@ -225,12 +226,11 @@ def get_pipeline_schema(self): except AssertionError: # No schema found # Check that this was actually a pipeline - if self.schema_obj.pipeline_dir is None or not os.path.exists(self.schema_obj.pipeline_dir): + pipeline_dir_path = Path(self.schema_obj.pipeline_dir) if self.schema_obj.pipeline_dir else None + if pipeline_dir_path is None or not pipeline_dir_path.exists(): log.error(f"Could not find pipeline: {self.pipeline} ({self.schema_obj.pipeline_dir})") return False - if not os.path.exists(os.path.join(self.schema_obj.pipeline_dir, "nextflow.config")) and not os.path.exists( - os.path.join(self.schema_obj.pipeline_dir, "main.nf") - ): + if not (pipeline_dir_path / "nextflow.config").exists() and not (pipeline_dir_path / "main.nf").exists(): log.error("Could not find a 'main.nf' or 'nextflow.config' file, are you sure this is a pipeline?") return False @@ -321,12 +321,12 @@ def launch_web_gui(self): raise AssertionError( f'web_response["status"] should be "recieved", but it is "{web_response["status"]}"' ) - except AssertionError: + except AssertionError as e: log.debug(f"Response content:\n{json.dumps(web_response, indent=4)}") raise AssertionError( f"Web launch response not recognised: {self.web_schema_launch_url}\n " "See verbose log for full response (nf-core -v launch)" - ) + ) from e else: self.web_schema_launch_web_url = web_response["web_url"] self.web_schema_launch_api_url = web_response["api_url"] @@ -363,10 +363,12 @@ def get_web_launch_response(self): # Sanitise form inputs, set proper variable types etc self.sanitise_web_response() except KeyError as e: - raise AssertionError(f"Missing return key from web API: {e}") + raise AssertionError(f"Missing return key from web API: {e}") from e except Exception as e: log.debug(web_response) - raise AssertionError(f"Unknown exception ({type(e).__name__}) - see verbose log for details. {e}") + raise AssertionError( + f"Unknown exception ({type(e).__name__}) - see verbose log for details. {e}" + ) from e return True else: log.debug(f"Response content:\n{json.dumps(web_response, indent=4)}") @@ -571,7 +573,7 @@ def single_param_to_questionary(self, param_id, param_obj, answers=None, print_h # Overwrite default with parsed schema, includes --params-in etc if self.schema_obj is not None and param_id in self.schema_obj.input_params: if param_obj["type"] == "boolean" and isinstance(self.schema_obj.input_params[param_id], str): - question["default"] = "true" == self.schema_obj.input_params[param_id].lower() + question["default"] = self.schema_obj.input_params[param_id].lower() == "true" else: question["default"] = self.schema_obj.input_params[param_id] @@ -725,7 +727,7 @@ def build_command(self): if isinstance(val, bool) and val: self.nextflow_cmd += f" --{param}" # No quotes for numbers - elif (isinstance(val, int) or isinstance(val, float)) and val: + elif (isinstance(val, (int, float))) and val: self.nextflow_cmd += " --{} {}".format(param, str(val).replace('"', '\\"')) # everything else else: diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index 13e3b7afe7..dc17996be2 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -209,7 +209,7 @@ def _lint_pipeline(self) -> None: log.info("Including --release mode tests") # Check that we recognise all --fix arguments - unrecognised_fixes = list(test for test in self.fix if test not in self.lint_tests) + unrecognised_fixes = [test for test in self.fix if test not in self.lint_tests] if len(unrecognised_fixes): raise AssertionError( "Unrecognised lint test{} for '--fix': '{}'".format( @@ -239,11 +239,11 @@ def _lint_pipeline(self) -> None: log.info("Attempting to automatically fix failing tests") try: repo = git.Repo(self.wf_path) - except InvalidGitRepositoryError: + except InvalidGitRepositoryError as e: raise AssertionError( f"'{self.wf_path}' does not appear to be a git repository, " "this is required when running with '--fix'" - ) + ) from e # Check that we have no uncommitted changes if repo.is_dirty(untracked_files=True): raise AssertionError( @@ -606,12 +606,8 @@ def run_linting( ) log.info("Only running tests: '{}'".format("', '".join(key))) - # Check if we were given any keys, and if they match any pipeline tests - if key: - pipeline_keys = list(set(key).intersection(set(PipelineLint._get_all_lint_tests(release_mode)))) - else: - # If no key is supplied, run all tests - pipeline_keys = None + # Check if we were given any keys, and if they match any pipeline tests. If no key is supplied, run all tests + pipeline_keys = list(set(key).intersection(set(PipelineLint._get_all_lint_tests(release_mode)))) if key else None # Create the lint object lint_obj = PipelineLint(pipeline_dir, release_mode, fix, pipeline_keys, fail_ignored, fail_warned, hide_progress) @@ -649,8 +645,8 @@ def run_linting( ) else: # If no key is supplied, run the default modules tests - module_lint_tests = list(("module_changes", "module_version")) - subworkflow_lint_tests = list(("subworkflow_changes", "subworkflow_version")) + module_lint_tests = ["module_changes", "module_version"] + subworkflow_lint_tests = ["subworkflow_changes", "subworkflow_version"] module_lint_obj.filter_tests_by_key(module_lint_tests) if subworkflow_lint_obj is not None: subworkflow_lint_obj.filter_tests_by_key(subworkflow_lint_tests) @@ -704,7 +700,6 @@ def run_linting( lint_obj._save_json_results(json_fn) # Reminder about --release mode flag if we had failures - if len(lint_obj.failed) > 0: - if release_mode: - log.info("Reminder: Lint tests were run in --release mode.") + if len(lint_obj.failed) > 0 and release_mode: + log.info("Reminder: Lint tests were run in --release mode.") return lint_obj, module_lint_obj, subworkflow_lint_obj diff --git a/nf_core/pipelines/lint/actions_awsfulltest.py b/nf_core/pipelines/lint/actions_awsfulltest.py index 5c6fdb2717..b740f49803 100644 --- a/nf_core/pipelines/lint/actions_awsfulltest.py +++ b/nf_core/pipelines/lint/actions_awsfulltest.py @@ -34,7 +34,7 @@ def actions_awsfulltest(self) -> dict[str, list[str]]: try: with open(fn) as fh: wf = yaml.safe_load(fh) - except Exception as e: + except (OSError, yaml.YAMLError) as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} aws_profile = "-profile test " @@ -42,11 +42,11 @@ def actions_awsfulltest(self) -> dict[str, list[str]]: # Check that the action is only turned on for published releases try: if wf[True]["pull_request_review"]["types"] != ["submitted"]: - raise AssertionError() + raise AssertionError if "workflow_dispatch" not in wf[True]: - raise AssertionError() + raise AssertionError if wf[True]["release"]["types"] != ["published"]: - raise AssertionError() + raise AssertionError except (AssertionError, KeyError, TypeError): failed.append("`.github/workflows/awsfulltest.yml` is not triggered correctly") else: @@ -55,8 +55,8 @@ def actions_awsfulltest(self) -> dict[str, list[str]]: # Warn if `-profile test` is still unchanged try: steps = wf["jobs"]["run-platform"]["steps"] - if not any(aws_profile in step["run"] for step in steps if "run" in step.keys()): - raise AssertionError() + if not any(aws_profile in step["run"] for step in steps if "run" in step): + raise AssertionError except (AssertionError, KeyError, TypeError): passed.append("`.github/workflows/awsfulltest.yml` does not use `-profile test`") else: diff --git a/nf_core/pipelines/lint/actions_awstest.py b/nf_core/pipelines/lint/actions_awstest.py index 7e4c0fc497..d61d39f966 100644 --- a/nf_core/pipelines/lint/actions_awstest.py +++ b/nf_core/pipelines/lint/actions_awstest.py @@ -29,15 +29,15 @@ def actions_awstest(self): try: with open(fn) as fh: wf = yaml.safe_load(fh) - except Exception as e: + except (OSError, yaml.YAMLError) as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} # Check that the action is only turned on for workflow_dispatch try: if "workflow_dispatch" not in wf[True]: - raise AssertionError() + raise AssertionError if "push" in wf[True] or "pull_request" in wf[True]: - raise AssertionError() + raise AssertionError except (AssertionError, KeyError, TypeError): return {"failed": ["'.github/workflows/awstest.yml' is not triggered correctly"]} else: diff --git a/nf_core/pipelines/lint/actions_nf_test.py b/nf_core/pipelines/lint/actions_nf_test.py index 42761c3323..312b806ebe 100644 --- a/nf_core/pipelines/lint/actions_nf_test.py +++ b/nf_core/pipelines/lint/actions_nf_test.py @@ -45,7 +45,7 @@ def actions_nf_test(self): try: with open(fn) as fh: ciwf = yaml.safe_load(fh) - except Exception as e: + except (OSError, yaml.YAMLError) as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} # Check that the action is turned on for the correct events @@ -53,9 +53,9 @@ def actions_nf_test(self): # NB: YAML dict key 'on' is evaluated to a Python dict key True pr_subtree = ciwf[True]["pull_request"] if pr_subtree is None: - raise AssertionError() + raise AssertionError if "published" not in ciwf[True]["release"]["types"]: - raise AssertionError() + raise AssertionError except (AssertionError, KeyError, TypeError): failed.append("'.github/workflows/nf-test.yml' is not triggered on expected events") else: @@ -65,7 +65,7 @@ def actions_nf_test(self): try: nxf_ver = ciwf["jobs"]["nf-test"]["strategy"]["matrix"]["NXF_VER"] if not any(i == self.minNextflowVersion for i in nxf_ver): - raise AssertionError() + raise AssertionError except (KeyError, TypeError): failed.append("'.github/workflows/nf-test.yml' does not check minimum NF version") except AssertionError: diff --git a/nf_core/pipelines/lint/actions_schema_validation.py b/nf_core/pipelines/lint/actions_schema_validation.py index cefbb009fa..e3d18e6efa 100644 --- a/nf_core/pipelines/lint/actions_schema_validation.py +++ b/nf_core/pipelines/lint/actions_schema_validation.py @@ -45,21 +45,21 @@ def actions_schema_validation(self) -> dict[str, list[str]]: try: with open(wf_path) as fh: wf_json = yaml.safe_load(fh) - except Exception as e: + except (OSError, yaml.YAMLError) as e: failed.append(f"Could not parse yaml file: {wf}, {e}") continue # yaml parses 'on' as True --> try to fix it before schema validation try: wf_json["on"] = wf_json.pop(True) - except Exception: + except KeyError: failed.append(f"Missing 'on' keyword in {wf}") # Validate the workflow try: jsonschema.validate(wf_json, schema) passed.append(f"Workflow validation passed: {wf}") - except Exception as e: + except jsonschema.ValidationError as e: failed.append(f"Workflow validation failed for {wf}: {e}") return {"passed": passed, "failed": failed, "warned": warned} diff --git a/nf_core/pipelines/lint/configs.py b/nf_core/pipelines/lint/configs.py index c90b5faec1..05b4bde59e 100644 --- a/nf_core/pipelines/lint/configs.py +++ b/nf_core/pipelines/lint/configs.py @@ -30,7 +30,7 @@ def lint_file(self, lint_name: str, file_path: Path) -> dict[str, list[str]]: try: with open(fn) as fh: config = fh.read() - except Exception as e: + except OSError as e: return {"failed": [f"Could not parse file: {fn}, {e}"]} # find sections with a withName: prefix diff --git a/nf_core/pipelines/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py index e99b97c246..202906c184 100644 --- a/nf_core/pipelines/lint/files_exist.py +++ b/nf_core/pipelines/lint/files_exist.py @@ -224,18 +224,18 @@ def pf(file_path: str | Path) -> Path: # Files that cause an error if they don't exist for files in files_fail: - if any([str(f) in ignore_files for f in files]): + if any(str(f) in ignore_files for f in files): continue - if any([pf(f).is_file() for f in files]): + if any(pf(f).is_file() for f in files): passed.append(f"File found: {self._wrap_quotes(files)}") else: failed.append(f"File not found: {self._wrap_quotes(files)}") # Files that cause a warning if they don't exist for files in files_warn: - if any([str(f) in ignore_files for f in files]): + if any(str(f) in ignore_files for f in files): continue - if any([pf(f).is_file() for f in files]): + if any(pf(f).is_file() for f in files): passed.append(f"File found: {self._wrap_quotes(files)}") else: hint = "" diff --git a/nf_core/pipelines/lint/files_unchanged.py b/nf_core/pipelines/lint/files_unchanged.py index 8ff61171d3..4f9fe3fcd2 100644 --- a/nf_core/pipelines/lint/files_unchanged.py +++ b/nf_core/pipelines/lint/files_unchanged.py @@ -1,6 +1,5 @@ import filecmp import logging -import os import re import shutil import tempfile @@ -156,11 +155,11 @@ def _tf(file_path: str | Path) -> Path: # Files that must be completely unchanged from template for files in files_exact: # Ignore if file specified in linting config - if any([str(f) in ignore_files for f in files]): + if any(str(f) in ignore_files for f in files): ignored.append(f"File ignored due to lint config: {self._wrap_quotes(files)}") # Ignore if we can't find the file - elif not any([_pf(f).is_file() for f in files]): + elif not any(_pf(f).is_file() for f in files): ignored.append(f"File does not exist: {self._wrap_quotes(files)}") # Check that the file has an identical match @@ -170,8 +169,8 @@ def _tf(file_path: str | Path) -> Path: if filecmp.cmp(_pf(f), _tf(f), shallow=True): passed.append(f"`{f}` matches the template") else: - if f.name.endswith(".png") and int(os.stat(_pf(f)).st_size / 500) == int( - os.stat(_tf(f)).st_size / 500 + if f.name.endswith(".png") and int(_pf(f).stat().st_size / 500) == int( + _tf(f).stat().st_size / 500 ): # almost the same file, good enough for the logo log.debug(f"Files are almost the same. Will pass: {f}") @@ -196,11 +195,11 @@ def _tf(file_path: str | Path) -> Path: # Files that can be added to, but that must contain the template contents for files in files_partial: # Ignore if file specified in linting config - if any([str(f) in ignore_files for f in files]): + if any(str(f) in ignore_files for f in files): ignored.append(f"File ignored due to lint config: {self._wrap_quotes(files)}") # Ignore if we can't find the file - elif not any([_pf(f).is_file() for f in files]): + elif not any(_pf(f).is_file() for f in files): ignored.append(f"File does not exist: {self._wrap_quotes(files)}") # Check that the file contains the template file contents diff --git a/nf_core/pipelines/lint/modules_json.py b/nf_core/pipelines/lint/modules_json.py index dc9faadf5d..17ad46b663 100644 --- a/nf_core/pipelines/lint/modules_json.py +++ b/nf_core/pipelines/lint/modules_json.py @@ -24,7 +24,7 @@ def modules_json(self) -> dict[str, list[str]]: if _modules_json and modules_json_dict is not None: all_modules_passed = True - for repo in modules_json_dict["repos"].keys(): + for repo in modules_json_dict["repos"]: # Check if the modules.json has been updated to keep the if "modules" not in modules_json_dict["repos"][repo] or not repo.startswith("http"): failed.append( @@ -33,7 +33,7 @@ def modules_json(self) -> dict[str, list[str]]: ) continue - for dir in modules_json_dict["repos"][repo]["modules"].keys(): + for dir in modules_json_dict["repos"][repo]["modules"]: for module, module_entry in modules_json_dict["repos"][repo]["modules"][dir].items(): if not Path(modules_dir, dir, module).exists(): failed.append( diff --git a/nf_core/pipelines/lint/multiqc_config.py b/nf_core/pipelines/lint/multiqc_config.py index 01b6b06dec..254caa925a 100644 --- a/nf_core/pipelines/lint/multiqc_config.py +++ b/nf_core/pipelines/lint/multiqc_config.py @@ -57,7 +57,7 @@ def multiqc_config(self) -> dict[str, list[str]]: try: with open(fn) as fh: mqc_yml = yaml.safe_load(fh) - except Exception as e: + except (OSError, yaml.YAMLError) as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} # check if required sections are present @@ -137,7 +137,7 @@ def multiqc_config(self) -> dict[str, list[str]]: # Check that export_plots is activated try: if not mqc_yml["export_plots"]: - raise AssertionError() + raise AssertionError except (AssertionError, KeyError, TypeError): failed.append("`assets/multiqc_config.yml` does not contain 'export_plots: true'.") else: diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index 0020c97749..f5913f5521 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -208,7 +208,7 @@ def nextflow_config(self) -> dict[str, list[str]]: if cf in ignore_configs: ignored.append(f"Config variable ignored: {self._wrap_quotes(cf)}") break - if cf in self.nf_config.keys(): + if cf in self.nf_config: passed.append(f"Config variable found: {self._wrap_quotes(cf)}") break else: @@ -218,7 +218,7 @@ def nextflow_config(self) -> dict[str, list[str]]: if cf in ignore_configs: ignored.append(f"Config variable ignored: {self._wrap_quotes(cf)}") break - if cf in self.nf_config.keys(): + if cf in self.nf_config: passed.append(f"Config variable found: {self._wrap_quotes(cf)}") break else: @@ -227,7 +227,7 @@ def nextflow_config(self) -> dict[str, list[str]]: if cf in ignore_configs: ignored.append(f"Config variable ignored: {self._wrap_quotes(cf)}") break - if cf not in self.nf_config.keys(): + if cf not in self.nf_config: passed.append(f"Config variable (correctly) not found: {self._wrap_quotes(cf)}") else: failed.append(f"Config variable (incorrectly) found: {self._wrap_quotes(cf)}") @@ -235,13 +235,7 @@ def nextflow_config(self) -> dict[str, list[str]]: # Check and warn if the process configuration is done with deprecated syntax process_with_deprecated_syntax = list( - set( - [ - match.group(1) - for ck in self.nf_config.keys() - if (match := re.match(r"^(process\.\$.*?)\.+.*$", ck)) is not None - ] - ) + {match.group(1) for ck in self.nf_config if (match := re.match(r"^(process\.\$.*?)\.+.*$", ck)) is not None} ) for pd in process_with_deprecated_syntax: warned.append(f"Process configuration is done with deprecated_syntax: {pd}") @@ -265,7 +259,7 @@ def nextflow_config(self) -> dict[str, list[str]]: try: manifest_name = self.nf_config.get("manifest.name", "").strip("'\"") if not manifest_name.startswith(f"{org_name}/"): - raise AssertionError() + raise AssertionError except (AssertionError, IndexError): failed.append(f"Config ``manifest.name`` did not begin with ``{org_name}/``:\n {manifest_name}") else: @@ -276,7 +270,7 @@ def nextflow_config(self) -> dict[str, list[str]]: try: manifest_homepage = self.nf_config.get("manifest.homePage", "").strip("'\"") if not manifest_homepage.startswith(f"https://github.com/{org_name}/"): - raise AssertionError() + raise AssertionError except (AssertionError, IndexError): failed.append( f"Config variable ``manifest.homePage`` did not begin with https://github.com/{org_name}/:\n {manifest_homepage}" @@ -415,11 +409,11 @@ def nextflow_config(self) -> dict[str, list[str]]: schema.load_schema() schema.get_schema_defaults() # Get default values from schema schema.get_schema_types() # Get types from schema - for param_name in schema.schema_defaults.keys(): + for param_name in schema.schema_defaults: param = "params." + param_name if param in ignore_defaults: ignored.append(f"Config default ignored: {param}") - elif param in self.nf_config.keys(): + elif param in self.nf_config: config_default: str | float | int | None = None schema_default: str | float | int | None = None if schema.schema_types[param_name] == "boolean": diff --git a/nf_core/pipelines/lint/nf_test_content.py b/nf_core/pipelines/lint/nf_test_content.py index 045c27f540..c0cd92159a 100644 --- a/nf_core/pipelines/lint/nf_test_content.py +++ b/nf_core/pipelines/lint/nf_test_content.py @@ -106,7 +106,7 @@ def nf_test_content(self) -> dict[str, list[str]]: ignored.append(f"'{test_fn.relative_to(self.wf_path)}' checking ignored") continue - checks_passed = {check: False for check in test_checks} + checks_passed = dict.fromkeys(test_checks, False) with open(test_fn) as fh: for line in fh: for check_name, check_info in test_checks.items(): @@ -145,7 +145,7 @@ def nf_test_content(self) -> dict[str, list[str]]: failed.append(f"'{conf_fn.relative_to(self.wf_path)}' does not exist") else: if nf_test_content_conf is None or str(conf_fn.relative_to(self.wf_path)) not in nf_test_content_conf: - checks_passed = {check: False for check in config_checks} + checks_passed = dict.fromkeys(config_checks, False) with open(conf_fn) as fh: for line in fh: line = line.strip() @@ -187,7 +187,7 @@ def nf_test_content(self) -> dict[str, list[str]]: failed.append(f"'{nf_test_conf_fn.relative_to(self.wf_path)}' does not exist") else: if nf_test_content_conf is None or str(nf_test_conf_fn.relative_to(self.wf_path)) not in nf_test_content_conf: - checks_passed = {check: False for check in nf_test_checks} + checks_passed = dict.fromkeys(nf_test_checks, False) with open(nf_test_conf_fn) as fh: for line in fh: line = line.strip() diff --git a/nf_core/pipelines/lint/plugin_includes.py b/nf_core/pipelines/lint/plugin_includes.py index a774192769..b787405169 100644 --- a/nf_core/pipelines/lint/plugin_includes.py +++ b/nf_core/pipelines/lint/plugin_includes.py @@ -1,7 +1,7 @@ import ast -import glob import logging import re +from pathlib import Path log = logging.getLogger(__name__) @@ -22,20 +22,19 @@ def plugin_includes(self) -> dict[str, list[str]]: plugin_include_pattern = re.compile(r"^include\s*{[^}]+}\s*from\s*[\"']plugin/([^\"']+)[\"']\s*$", re.MULTILINE) workflow_files = [ - file for file in glob.glob(f"{self.wf_path}/**/*.nf", recursive=True) if not file.startswith("./modules/") + file for file in Path(self.wf_path).rglob("*.nf") if file.relative_to(self.wf_path).parts[0] != "modules" ] test_passed = True for file in workflow_files: - with open(file) as of: - plugin_includes = re.findall(plugin_include_pattern, of.read()) - for include in plugin_includes: - if include not in ["nf-validation", "nf-schema"]: - continue - if include != validation_plugin: - test_passed = False - failed.append( - f"Found a `{include}` plugin import in `{file[2:]}`, but `{validation_plugin}` was used in `nextflow.config`" - ) + plugin_includes = re.findall(plugin_include_pattern, file.read_text()) + for include in plugin_includes: + if include not in ["nf-validation", "nf-schema"]: + continue + if include != validation_plugin: + test_passed = False + failed.append( + f"Found a `{include}` plugin import in `{file.relative_to(self.wf_path)}`, but `{validation_plugin}` was used in `nextflow.config`" + ) if test_passed: passed.append("No wrong validation plugin imports have been found") diff --git a/nf_core/pipelines/lint/readme.py b/nf_core/pipelines/lint/readme.py index d16125021c..ad7179ed22 100644 --- a/nf_core/pipelines/lint/readme.py +++ b/nf_core/pipelines/lint/readme.py @@ -61,7 +61,7 @@ def readme(self): nf_badge_version = match.group(1).strip("'\"") try: if nf_badge_version != self.minNextflowVersion: - raise AssertionError() + raise AssertionError except (AssertionError, KeyError): failed.append( f"README Nextflow minimum version badge does not match config. Badge: `{nf_badge_version}`, " diff --git a/nf_core/pipelines/lint/schema_description.py b/nf_core/pipelines/lint/schema_description.py index b586cc5242..8571f7832d 100644 --- a/nf_core/pipelines/lint/schema_description.py +++ b/nf_core/pipelines/lint/schema_description.py @@ -27,7 +27,7 @@ def schema_description(self): ignore_params = self.lint_config.get("schema_description", []) if self.lint_config is not None else [] # Get ungrouped params - if "properties" in self.schema_obj.schema.keys(): + if "properties" in self.schema_obj.schema: ungrouped_params = self.schema_obj.schema["properties"].keys() for up in ungrouped_params: if up in ignore_params: @@ -37,13 +37,13 @@ def schema_description(self): # Iterate over groups and add warning for parameters without a description defs_notation = self.schema_obj.defs_notation - for group_key in self.schema_obj.schema[defs_notation].keys(): + for group_key in self.schema_obj.schema[defs_notation]: group = self.schema_obj.schema[defs_notation][group_key] for param_key, param in group["properties"].items(): if param_key in ignore_params: ignored.append(f"Ignoring description check for param in schema: `{param_key}`") continue - if "description" not in param.keys(): + if "description" not in param: warned.append(f"No description provided in schema for parameter: `{param_key}`") for ip in ignore_params: diff --git a/nf_core/pipelines/lint/system_exit.py b/nf_core/pipelines/lint/system_exit.py index 435a2452d0..b891fea7a0 100644 --- a/nf_core/pipelines/lint/system_exit.py +++ b/nf_core/pipelines/lint/system_exit.py @@ -18,8 +18,8 @@ def system_exit(self): root_dir = Path(self.wf_path) # Get all groovy and nf files - groovy_files = [f for f in root_dir.rglob("*.groovy")] - nf_files = [f for f in root_dir.rglob("*.nf")] + groovy_files = list(root_dir.rglob("*.groovy")) + nf_files = list(root_dir.rglob("*.nf")) to_check = nf_files + groovy_files for file in to_check: diff --git a/nf_core/pipelines/lint/template_strings.py b/nf_core/pipelines/lint/template_strings.py index 0cb669e553..01f4ea5f2b 100644 --- a/nf_core/pipelines/lint/template_strings.py +++ b/nf_core/pipelines/lint/template_strings.py @@ -50,17 +50,16 @@ def template_strings(self): # Skip binary files binary_ftypes = ["image", "application/java-archive"] (ftype, encoding) = mimetypes.guess_type(fn) - if encoding is not None or (ftype is not None and any([ftype.startswith(ft) for ft in binary_ftypes])): + if encoding is not None or (ftype is not None and any(ftype.startswith(ft) for ft in binary_ftypes)): continue with open(fn, encoding="latin1") as fh: - lnum = 0 - for line in fh: - lnum += 1 + for i, line in enumerate(fh): + i += 1 cc_matches = re.findall(r"[^$]{{[^:}]*}}", line) if len(cc_matches) > 0: for cc_match in cc_matches: - failed.append(f"Found a Jinja template string in `{fn}` L{lnum}: {cc_match}") + failed.append(f"Found a Jinja template string in `{fn}` L{i}: {cc_match}") num_matches += 1 if num_matches == 0: passed.append(f"Did not find any Jinja template strings ({len(self.files)} files)") diff --git a/nf_core/pipelines/lint/version_consistency.py b/nf_core/pipelines/lint/version_consistency.py index e344d25e5d..c939938a46 100644 --- a/nf_core/pipelines/lint/version_consistency.py +++ b/nf_core/pipelines/lint/version_consistency.py @@ -47,7 +47,7 @@ def version_consistency(self): os.environ.get("GITHUB_REF", "").startswith("refs/tags/") and os.environ.get("GITHUB_REPOSITORY", "") != "nf-core/tools" ): - versions["GITHUB_REF"] = os.path.basename(os.environ["GITHUB_REF"].strip(" '\"")) + versions["GITHUB_REF"] = Path(os.environ["GITHUB_REF"].strip(" '\"")).name # Get version from the .nf-core.yml template yaml = YAML() diff --git a/nf_core/pipelines/lint_utils.py b/nf_core/pipelines/lint_utils.py index b9ff9533d3..24b3b4dfb9 100644 --- a/nf_core/pipelines/lint_utils.py +++ b/nf_core/pipelines/lint_utils.py @@ -165,7 +165,7 @@ def run_prettier_on_file(file: Path | str | list[str]) -> None: if ": SyntaxError: " in e.stdout.decode(): log.critical(f"Can't format {file} because it has a syntax error.\n{e.stdout.decode()}") elif "files were modified by this hook" in e.stdout.decode(): - all_lines = [line for line in e.stdout.decode().split("\n")] + all_lines = list(e.stdout.decode().split("\n")) files = "\n".join(all_lines[3:]) log.debug(f"The following files were modified by prettier:\n {files}") else: diff --git a/nf_core/pipelines/list.py b/nf_core/pipelines/list.py index 5822557d64..96ca9da59e 100644 --- a/nf_core/pipelines/list.py +++ b/nf_core/pipelines/list.py @@ -22,6 +22,26 @@ nf_core.utils.setup_requests_cachedir() +def get_nextflow_assets_dir(workflow_name: str | None = None) -> Path: + """ + Get the Nextflow assets directory path. + + Args: + workflow_name: Optional workflow name to append to the path (e.g., "nf-core/rnaseq") + + Returns: + Path to the Nextflow assets directory, optionally with workflow_name appended + """ + if nxf_assets := os.getenv("NXF_ASSETS"): + base_dir = Path(nxf_assets) + elif nxf_home := os.getenv("NXF_HOME"): + base_dir = Path(nxf_home) / "assets" + else: + base_dir = Path(os.getenv("HOME") or "", ".nextflow", "assets") + + return base_dir / workflow_name if workflow_name else base_dir + + def list_workflows(filter_by=None, sort_by="release", as_json=False, show_archived=False): """Prints out a list of all nf-core workflows. @@ -53,12 +73,12 @@ def autocomplete_pipelines(ctx, param, incomplete: str): matches = [CompletionItem(wor) for wor in available_workflows if wor.startswith(incomplete)] return matches - except Exception as e: + except (OSError, AttributeError) as e: print(f"[ERROR] Autocomplete failed: {e}", file=sys.stderr) return [] -def get_local_wf(workflow: str | Path, revision=None) -> str | None: +def get_local_wf(workflow: str | Path, revision=None) -> Path | None: """ Check if this workflow has a local copy and use nextflow to pull it if not """ @@ -69,16 +89,17 @@ def get_local_wf(workflow: str | Path, revision=None) -> str | None: wfs = Workflows() wfs.get_local_nf_workflows() for wf in wfs.local_workflows: - if workflow == wf.full_name: - if revision is None or revision == wf.commit_sha or revision == wf.branch or revision == wf.active_tag: - if wf.active_tag: - print_revision = f"v{wf.active_tag}" - elif wf.branch: - print_revision = f"{wf.branch} - {wf.commit_sha[:7]}" - else: - print_revision = wf.commit_sha - log.info(f"Using local workflow: {workflow} ({print_revision})") - return wf.local_path + if workflow == wf.full_name and ( + revision is None or revision == wf.commit_sha or revision == wf.branch or revision == wf.active_tag + ): + if wf.active_tag: + print_revision = f"v{wf.active_tag}" + elif wf.branch: + print_revision = f"{wf.branch} - {wf.commit_sha[:7]}" + else: + print_revision = wf.commit_sha + log.info(f"Using local workflow: {workflow} ({print_revision})") + return wf.local_path # Wasn't local, fetch it log.info(f"Downloading workflow: {workflow} ({revision})") @@ -131,17 +152,16 @@ def get_local_nf_workflows(self): Local workflows are stored in :attr:`self.local_workflows` list. """ # Try to guess the local cache directory (much faster than calling nextflow) - if len(os.environ.get("NXF_ASSETS", "")) > 0: - nextflow_wfdir = os.environ.get("NXF_ASSETS") - elif len(os.environ.get("NXF_HOME", "")) > 0: - nextflow_wfdir = os.path.join(os.environ.get("NXF_HOME"), "assets") - else: - nextflow_wfdir = os.path.join(os.getenv("HOME"), ".nextflow", "assets") - if os.path.isdir(nextflow_wfdir): + nextflow_wfdir = get_nextflow_assets_dir() + if nextflow_wfdir.is_dir(): log.debug("Guessed nextflow assets directory - pulling pipeline dirnames") - for org_name in os.listdir(nextflow_wfdir): - for wf_name in os.listdir(os.path.join(nextflow_wfdir, org_name)): - self.local_workflows.append(LocalWorkflow(f"{org_name}/{wf_name}")) + self.local_workflows.extend( + LocalWorkflow(f"{org_dir.name}/{wf_dir.name}") + for org_dir in nextflow_wfdir.iterdir() + if org_dir.is_dir() + for wf_dir in org_dir.iterdir() + if wf_dir.is_dir() + ) # Fetch details about local cached pipelines with `nextflow list` else: @@ -223,7 +243,8 @@ def print_summary(self): def sort_pulled_date(wf): try: return wf.local_wf.last_pull * -1 - except Exception: + except AttributeError: + # local_wf is None or doesn't have last_pull attribute return 0 filtered_workflows.sort(key=sort_pulled_date) @@ -258,10 +279,7 @@ def sort_pulled_date(wf): revision = f"{wf.local_wf.branch} - {wf.local_wf.commit_sha[:7]}" else: revision = wf.local_wf.commit_sha - if wf.local_is_latest: - is_latest = f"[green]Yes ({revision})" - else: - is_latest = f"[red]No ({revision})" + is_latest = f"[green]Yes ({revision})" if wf.local_is_latest else f"[red]No ({revision})" else: is_latest = "[dim]-" @@ -350,13 +368,8 @@ def get_local_nf_workflow_details(self): if self.local_path is None: # Try to guess the local cache directory - if len(os.environ.get("NXF_ASSETS", "")) > 0: - nf_wfdir = os.path.join(os.environ.get("NXF_ASSETS"), self.full_name) - elif len(os.environ.get("NXF_HOME", "")) > 0: - nf_wfdir = os.path.join(os.environ.get("NXF_HOME"), "assets", self.full_name) - else: - nf_wfdir = os.path.join(os.getenv("HOME"), ".nextflow", "assets", self.full_name) - if os.path.isdir(nf_wfdir): + nf_wfdir = get_nextflow_assets_dir(self.full_name) + if nf_wfdir.is_dir(): log.debug(f"Guessed nextflow assets workflow directory: {nf_wfdir}") self.local_path = nf_wfdir @@ -378,7 +391,7 @@ def get_local_nf_workflow_details(self): repo = git.Repo(self.local_path) self.commit_sha = str(repo.head.commit.hexsha) self.remote_url = str(repo.remotes.origin.url) - self.last_pull = os.stat(os.path.join(self.local_path, ".git", "FETCH_HEAD")).st_mtime + self.last_pull = (Path(self.local_path) / ".git" / "FETCH_HEAD").stat().st_mtime self.last_pull_date = datetime.fromtimestamp(self.last_pull).strftime("%Y-%m-%d %H:%M:%S") self.last_pull_pretty = pretty_date(self.last_pull) @@ -415,10 +428,7 @@ def pretty_date(time): """ now = datetime.now() - if isinstance(time, datetime): - diff = now - time - else: - diff = now - datetime.fromtimestamp(time) + diff = now - time if isinstance(time, datetime) else now - datetime.fromtimestamp(time) second_diff = diff.seconds day_diff = diff.days diff --git a/nf_core/pipelines/params_file.py b/nf_core/pipelines/params_file.py index a4f7bff321..50b4cb8a32 100644 --- a/nf_core/pipelines/params_file.py +++ b/nf_core/pipelines/params_file.py @@ -171,7 +171,7 @@ def format_group(self, definition, show_hidden=False) -> str: return out def format_param( - self, name: str, properties: dict, required_properties: list[str] = [], show_hidden: bool = False + self, name: str, properties: dict, required_properties: list[str] | None = None, show_hidden: bool = False ) -> str | None: """ Format a single parameter of the schema as commented YAML @@ -186,6 +186,8 @@ def format_param( str: Section of a params-file.yml for given parameter None: If the parameter is skipped because it is hidden and show_hidden is not set """ + if required_properties is None: + required_properties = [] out = "" hidden = properties.get("hidden", False) diff --git a/nf_core/pipelines/refgenie.py b/nf_core/pipelines/refgenie.py index 46197e9cc8..882f432587 100644 --- a/nf_core/pipelines/refgenie.py +++ b/nf_core/pipelines/refgenie.py @@ -52,12 +52,12 @@ def _print_nf_config(rgc): for asset in asset_list: try: pth = rgc.seek(genome, asset) - # Catch general exception instead of refgencof exception --> no refgenconf import needed - except Exception: + # Catch refgenconf exceptions without importing refgenconf + except (OSError, RuntimeError): log.warning(f"{genome}/{asset} is incomplete, ignoring...") else: # Translate an alias name to the alias used in the pipeline - if asset in alias_translations.keys(): + if asset in alias_translations: log.info(f"Translating refgenie asset alias {asset} to {alias_translations[asset]}.") asset = alias_translations[asset] genomes_str += f' {asset.ljust(20, " ")} = "{pth}"\n' @@ -72,23 +72,24 @@ def _update_nextflow_home_config(refgenie_genomes_config_file, nxf_home): for the 'refgenie_genomes_config_file' if not already defined """ # Check if NXF_HOME/config exists and has a + refgenie_config_path = Path(refgenie_genomes_config_file).resolve() include_config_string = dedent( f""" ///// >>> nf-core + RefGenie >>> ///// // !! Contents within this block are managed by 'nf-core/tools' !! // Includes auto-generated config file with RefGenie genome assets - includeConfig '{os.path.abspath(refgenie_genomes_config_file)}' + includeConfig '{refgenie_config_path}' ///// <<< nf-core + RefGenie <<< ///// """ ) nxf_home_config = Path(nxf_home) / "config" - if os.path.exists(nxf_home_config): + if nxf_home_config.exists(): # look for include statement in config has_include_statement = False with open(nxf_home_config) as fh: lines = fh.readlines() for line in lines: - if re.match(rf"\s*includeConfig\s*'{os.path.abspath(refgenie_genomes_config_file)}'", line): + if re.match(rf"\s*includeConfig\s*'{re.escape(str(refgenie_config_path))}'", line): has_include_statement = True break @@ -164,9 +165,9 @@ def update_config(rgc): if not nxf_home: try: nxf_home = Path.home() / ".nextflow" - if not os.path.exists(nxf_home): + if not nxf_home.exists(): log.info(f"Creating NXF_HOME directory at {nxf_home}") - os.makedirs(nxf_home, exist_ok=True) + nxf_home.mkdir(parents=True, exist_ok=True) except RuntimeError: nxf_home = False diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index 199110e7a3..c7ca451ad9 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -94,23 +94,22 @@ def create_rocrate(self, json_path: None | Path = None, zip_path: None | Path = """ # Check that the checkout pipeline version is the same as the requested version - if self.version != "": - if self.version != self.pipeline_obj.nf_config.get("manifest.version"): - # using git checkout to get the requested version - log.info(f"Checking out pipeline version {self.version}") - if self.pipeline_obj.repo is None: - log.error(f"Pipeline repository not found in {self.pipeline_dir}") - sys.exit(1) - try: - self.pipeline_obj.repo.git.checkout(self.version) - self.pipeline_obj = Pipeline(self.pipeline_dir) - self.pipeline_obj._load() - except InvalidGitRepositoryError: - log.error(f"Could not find a git repository in {self.pipeline_dir}") - sys.exit(1) - except GitCommandError: - log.error(f"Could not checkout version {self.version}") - sys.exit(1) + if self.version != "" and self.version != self.pipeline_obj.nf_config.get("manifest.version"): + # using git checkout to get the requested version + log.info(f"Checking out pipeline version {self.version}") + if self.pipeline_obj.repo is None: + log.error(f"Pipeline repository not found in {self.pipeline_dir}") + sys.exit(1) + try: + self.pipeline_obj.repo.git.checkout(self.version) + self.pipeline_obj = Pipeline(self.pipeline_dir) + self.pipeline_obj._load() + except InvalidGitRepositoryError: + log.error(f"Could not find a git repository in {self.pipeline_dir}") + sys.exit(1) + except GitCommandError: + log.error(f"Could not checkout version {self.version}") + sys.exit(1) self.version = self.pipeline_obj.nf_config.get("manifest.version", "") self.make_workflow_rocrate() @@ -206,10 +205,7 @@ def set_main_entity(self, main_entity_filename: str): "dateModified", str(datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")), compact=True ) self.crate.mainEntity.append_to("sdPublisher", {"@id": "https://nf-co.re/"}, compact=True) - if self.version.endswith("dev"): - url = "dev" - else: - url = self.version + url = "dev" if self.version.endswith("dev") else self.version self.crate.mainEntity.append_to( "url", f"https://nf-co.re/{self.crate.name.replace('nf-core/', '')}/{url}/", compact=True ) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index e9c1689e73..9086a96ecc 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -169,11 +169,11 @@ def load_lint_schema(self): except json.decoder.JSONDecodeError as e: error_msg = f"[bold red]Could not parse schema JSON:[/] {e}" log.error(error_msg) - raise AssertionError(error_msg) + raise AssertionError(error_msg) from e except AssertionError as e: error_msg = f"[red][✗] Pipeline schema does not follow nf-core specs:\n {e}" log.error(error_msg) - raise AssertionError(error_msg) + raise AssertionError(error_msg) from e def load_schema(self): """Load a pipeline schema from a file""" @@ -279,10 +279,10 @@ def load_input_params(self, params_path): try: params = json.load(fh) except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{params_path}' due to error {e}") + raise UserWarning(f"Unable to load JSON file '{params_path}' due to error {e}") from e self.input_params.update(params) log.debug(f"Loaded JSON input params: {params_path}") - except Exception as json_e: + except (OSError, UserWarning) as json_e: log.debug(f"Could not load input params as JSON: {json_e}") # This failed, try to load as YAML try: @@ -293,7 +293,7 @@ def load_input_params(self, params_path): except Exception as yaml_e: error_msg = f"Could not load params file as either JSON or YAML:\n JSON: {json_e}\n YAML: {yaml_e}" log.error(error_msg) - raise AssertionError(error_msg) + raise AssertionError(error_msg) from yaml_e def validate_params(self): """Check given parameters against a schema and validate""" @@ -322,7 +322,7 @@ def validate_default_params(self): jsonschema.validate(self.schema_defaults, strip_required(self.schema)) except jsonschema.exceptions.ValidationError as e: log.debug(f"Complete error message:\n{e}") - raise AssertionError(f"Default parameters are invalid: {e.message}") + raise AssertionError(f"Default parameters are invalid: {e.message}") from e for param, default in self.schema_defaults.items(): if default in ("null", "", None, "None") or default is False: log.warning( @@ -335,7 +335,7 @@ def validate_default_params(self): self.get_wf_params() # Go over group keys - for group_key, group in self.schema.get(self.defs_notation, {}).items(): + for _group_key, group in self.schema.get(self.defs_notation, {}).items(): group_properties = group.get("properties") for param in group_properties: if param in self.ignored_params: @@ -369,7 +369,7 @@ def validate_config_default_parameter(self, param, schema_param, config_default) """ # If we have a default in the schema, check it matches the config - if "default" in schema_param and ( + if "default" in schema_param and ( # noqa SIM102 (schema_param["type"] == "boolean" and str(config_default).lower() != str(schema_param["default"]).lower()) and (str(schema_param["default"]) != str(config_default).strip("'\"")) ): @@ -385,16 +385,12 @@ def validate_config_default_parameter(self, param, schema_param, config_default) return # Check variable types in nextflow.config - if schema_param["type"] == "string": - if str(config_default) in ["false", "true", "''"]: - self.invalid_nextflow_config_default_parameters[param] = ( - f"String should not be set to `{config_default}`" - ) - if schema_param["type"] == "boolean": - if str(config_default) not in ["false", "true"]: - self.invalid_nextflow_config_default_parameters[param] = ( - f"Booleans should only be true or false, not `{config_default}`" - ) + if schema_param["type"] == "string" and str(config_default) in ["false", "true", "''"]: + self.invalid_nextflow_config_default_parameters[param] = f"String should not be set to `{config_default}`" + if schema_param["type"] == "boolean" and str(config_default) not in ["false", "true"]: + self.invalid_nextflow_config_default_parameters[param] = ( + f"Booleans should only be true or false, not `{config_default}`" + ) if schema_param["type"] == "integer": try: int(config_default) @@ -429,13 +425,13 @@ def validate_schema(self, schema=None): jsonschema.Draft7Validator.check_schema(schema) log.debug("JSON Schema Draft7 validated") except jsonschema.exceptions.SchemaError as e: - raise AssertionError(f"Schema does not validate as Draft 7 JSON Schema:\n {e}") + raise AssertionError(f"Schema does not validate as Draft 7 JSON Schema:\n {e}") from e elif self.schema_draft == "https://json-schema.org/draft/2020-12/schema": try: jsonschema.Draft202012Validator.check_schema(schema) log.debug("JSON Schema Draft2020-12 validated") except jsonschema.exceptions.SchemaError as e: - raise AssertionError(f"Schema does not validate as Draft 2020-12 JSON Schema:\n {e}") + raise AssertionError(f"Schema does not validate as Draft 2020-12 JSON Schema:\n {e}") from e else: raise AssertionError( f"Schema `$schema` should be `https://json-schema.org/draft/2020-12/schema` or `https://json-schema.org/draft-07/schema` \n Found `{schema_draft}`" @@ -727,24 +723,23 @@ def build_schema(self, pipeline_dir, no_prompts, web_only, url): self.save_schema() # If running interactively, send to the web for customisation - if not self.no_prompts: - if Confirm.ask(":rocket: Launch web builder for customisation and editing?"): - try: - self.launch_web_builder() - except AssertionError as e: - log.error(e.args[0]) - # Extra help for people running offline - if "Could not connect" in e.args[0]: - log.info( - f"If you're working offline, now copy your schema ({self.schema_filename}) and paste at https://nf-co.re/pipeline_schema_builder" - ) - log.info("When you're finished, you can paste the edited schema back into the same file") - if self.web_schema_build_web_url: - log.info( - "To save your work, open {}\n" - f"Click the blue 'Finished' button, copy the schema and paste into this file: {self.web_schema_build_web_url, self.schema_filename}" - ) - return False + if not self.no_prompts and Confirm.ask(":rocket: Launch web builder for customisation and editing?"): + try: + self.launch_web_builder() + except AssertionError as e: + log.error(e.args[0]) + # Extra help for people running offline + if "Could not connect" in e.args[0]: + log.info( + f"If you're working offline, now copy your schema ({self.schema_filename}) and paste at https://nf-co.re/pipeline_schema_builder" + ) + log.info("When you're finished, you can paste the edited schema back into the same file") + if self.web_schema_build_web_url: + log.info( + "To save your work, open {}\n" + f"Click the blue 'Finished' button, copy the schema and paste into this file: {self.web_schema_build_web_url, self.schema_filename}" + ) + return False def get_wf_params(self): """ @@ -833,7 +828,7 @@ def remove_schema_notfound_configs_single_schema(self, schema): schema = copy.deepcopy(schema) params_removed = [] # Use iterator so that we can delete the key whilst iterating - for p_key in [k for k in schema.get("properties", {}).keys()]: + for p_key in list(schema.get("properties", {})): if self.prompt_remove_schema_notfound_config(p_key): del schema["properties"][p_key] # Remove required flag if set @@ -908,14 +903,16 @@ def add_schema_found_configs(self): and (p_key not in self.schema_defaults) and (p_key not in self.ignored_params) and (p_def := self.build_schema_param(p_val).get("default")) - ): - if self.no_prompts or Confirm.ask( + ) and ( + self.no_prompts + or Confirm.ask( f":sparkles: Default for [bold]'params.{p_key}'[/] is not in schema (def='{p_def}'). " "[blue]Update pipeline schema?" - ): - s_key_def = s_key + ("default",) - nf_core.utils.nested_setitem(self.schema, s_key_def, p_def) - log.debug(f"Updating '{p_key}' default to '{p_def}' in pipeline schema") + ) + ): + s_key_def = s_key + ("default",) + nf_core.utils.nested_setitem(self.schema, s_key_def, p_def) + log.debug(f"Updating '{p_key}' default to '{p_def}' in pipeline schema") return params_added def build_schema_param(self, p_val): @@ -973,12 +970,12 @@ def launch_web_builder(self): raise AssertionError( f'web_response["status"] should be "recieved", but it is "{web_response["status"]}"' ) - except AssertionError: + except AssertionError as e: log.debug(f"Response content:\n{json.dumps(web_response, indent=4)}") raise AssertionError( f"Pipeline schema builder response not recognised: {self.web_schema_build_url}\n" " See verbose log for full response (nf-core -v schema)" - ) + ) from e else: self.web_schema_build_web_url = web_response["web_url"] self.web_schema_build_api_url = web_response["api_url"] @@ -1004,7 +1001,7 @@ def get_web_builder_response(self): self.remove_schema_empty_definitions() self.validate_schema() except AssertionError as e: - raise AssertionError(f"Response from schema builder did not pass validation:\n {e}") + raise AssertionError(f"Response from schema builder did not pass validation:\n {e}") from e else: self.save_schema() return True diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 7ce358cd07..248c01ddbb 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -166,7 +166,7 @@ def sync(self) -> None: self.make_pull_request() except PullRequestExceptionError as e: self.reset_target_dir() - raise PullRequestExceptionError(e) + raise PullRequestExceptionError(e) from e self.reset_target_dir() @@ -184,8 +184,8 @@ def inspect_sync_dir(self): # Check that the pipeline_dir is a git repo try: self.repo = git.Repo(self.pipeline_dir) - except InvalidGitRepositoryError: - raise SyncExceptionError(f"'{self.pipeline_dir}' does not appear to be a git repository") + except InvalidGitRepositoryError as e: + raise SyncExceptionError(f"'{self.pipeline_dir}' does not appear to be a git repository") from e # get current branch so we can switch back later self.original_branch = self.repo.active_branch.name @@ -211,8 +211,8 @@ def get_wf_config(self): if self.from_branch and self.repo.active_branch.name != self.from_branch: log.info(f"Checking out workflow branch '{self.from_branch}'") self.repo.git.checkout(self.from_branch) - except GitCommandError: - raise SyncExceptionError(f"Branch `{self.from_branch}` not found!") + except GitCommandError as e: + raise SyncExceptionError(f"Branch `{self.from_branch}` not found!") from e # If not specified, get the name of the active branch if not self.from_branch: @@ -242,8 +242,8 @@ def checkout_template_branch(self): # Try to check out an existing local branch called TEMPLATE try: self.repo.git.checkout("TEMPLATE") - except GitCommandError: - raise SyncExceptionError("Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'") + except GitCommandError as e: + raise SyncExceptionError("Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'") from e def delete_tracked_template_branch_files(self): """ @@ -264,7 +264,7 @@ def _delete_tracked_files(self): try: file_path.unlink() except Exception as e: - raise SyncExceptionError(e) + raise SyncExceptionError(e) from e def _clean_up_empty_dirs(self): """ @@ -280,14 +280,14 @@ def _clean_up_empty_dirs(self): if curr_dir == str(self.pipeline_dir): continue - subdir_set = set(Path(curr_dir) / d for d in sub_dirs) + subdir_set = {Path(curr_dir) / d for d in sub_dirs} currdir_is_empty = (len(subdir_set - deleted) == 0) and (len(files) == 0) if currdir_is_empty: log.debug(f"Deleting empty directory {curr_dir}") try: Path(curr_dir).rmdir() except Exception as e: - raise SyncExceptionError(e) + raise SyncExceptionError(e) from e deleted.add(Path(curr_dir)) def make_template_pipeline(self): @@ -332,7 +332,7 @@ def make_template_pipeline(self): except Exception as err: # Reset to where you were to prevent git getting messed up. self.repo.git.reset("--hard") - raise SyncExceptionError(f"Failed to rebuild pipeline from template with error:\n{err}") + raise SyncExceptionError(f"Failed to rebuild pipeline from template with error:\n{err}") from err def commit_template_changes(self): """If we have any changes with the new template files, make a git commit""" @@ -350,7 +350,7 @@ def commit_template_changes(self): self.made_changes = True log.info("Committed changes to 'TEMPLATE' branch") except Exception as e: - raise SyncExceptionError(f"Could not commit changes to TEMPLATE:\n{e}") + raise SyncExceptionError(f"Could not commit changes to TEMPLATE:\n{e}") from e return True def push_template_branch(self): @@ -358,11 +358,11 @@ def push_template_branch(self): and try to make a PR. If we don't have the auth token, try to figure out a URL for the PR and print this to the console. """ - log.info(f"Pushing TEMPLATE branch to remote: '{os.path.basename(self.pipeline_dir)}'") + log.info(f"Pushing TEMPLATE branch to remote: '{self.pipeline_dir.name}'") try: self.repo.git.push() except GitCommandError as e: - raise PullRequestExceptionError(f"Could not push TEMPLATE branch:\n {e}") + raise PullRequestExceptionError(f"Could not push TEMPLATE branch:\n {e}") from e def create_merge_base_branch(self): """Create a new branch from the updated TEMPLATE branch @@ -389,7 +389,7 @@ def create_merge_base_branch(self): try: self.repo.create_head(self.merge_branch) except GitCommandError as e: - raise SyncExceptionError(f"Could not create new branch '{self.merge_branch}'\n{e}") + raise SyncExceptionError(f"Could not create new branch '{self.merge_branch}'\n{e}") from e def push_merge_branch(self): """Push the newly created merge branch to the remote repository""" @@ -398,7 +398,7 @@ def push_merge_branch(self): origin = self.repo.remote() origin.push(self.merge_branch) except GitCommandError as e: - raise PullRequestExceptionError(f"Could not push branch '{self.merge_branch}':\n {e}") + raise PullRequestExceptionError(f"Could not push branch '{self.merge_branch}':\n {e}") from e def make_pull_request(self): """Create a pull request to a base branch (default: dev), @@ -443,7 +443,7 @@ def make_pull_request(self): ) except Exception as e: stderr.print_exception() - raise PullRequestExceptionError(f"Something went badly wrong - {e}") + raise PullRequestExceptionError(f"Something went badly wrong - {e}") from e else: self.gh_pr_returned_data = r.json() self.pr_url = self.gh_pr_returned_data["html_url"] @@ -463,7 +463,8 @@ def _parse_json_response(response) -> tuple[Any, str]: try: json_data = json.loads(response.content) return json_data, json.dumps(json_data, indent=4) - except Exception: + except json.JSONDecodeError: + # Response content is not valid JSON return response.content, str(response.content) def reset_target_dir(self): @@ -474,7 +475,7 @@ def reset_target_dir(self): try: self.repo.git.checkout(self.original_branch) except GitCommandError as e: - raise SyncExceptionError(f"Could not reset to original branch `{self.original_branch}`:\n{e}") + raise SyncExceptionError(f"Could not reset to original branch `{self.original_branch}`:\n{e}") from e def _get_ignored_files(self) -> list[str]: """ diff --git a/nf_core/subworkflows/lint/subworkflow_if_empty_null.py b/nf_core/subworkflows/lint/subworkflow_if_empty_null.py index 481e31e3ed..bce4c168e2 100644 --- a/nf_core/subworkflows/lint/subworkflow_if_empty_null.py +++ b/nf_core/subworkflows/lint/subworkflow_if_empty_null.py @@ -22,7 +22,7 @@ def subworkflow_if_empty_null(_, subworkflow): subworkflow.warned.append( ("subworkflow_if_empty_null", "subworkflow_if_empty_null", warning, swf_results["file_paths"][i]) ) - for i, passed in enumerate(swf_results["passed"]): + for _i, passed in enumerate(swf_results["passed"]): subworkflow.passed.append( ("subworkflow_if_empty_null", "subworkflow_if_empty_null", passed, subworkflow.component_dir) ) diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index 168fac9fc2..804903626d 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -139,7 +139,7 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent, allow_missing: bool = Fal with open(snap_file) as snap_fh: try: snap_content = json.load(snap_fh) - for test_name in snap_content.keys(): + for test_name in snap_content: if "d41d8cd98f00b204e9800998ecf8427e" in str(snap_content[test_name]): if "stub" not in test_name: subworkflow.failed.append( diff --git a/nf_core/subworkflows/lint/subworkflow_todos.py b/nf_core/subworkflows/lint/subworkflow_todos.py index 05286bf11c..cec6bb2c45 100644 --- a/nf_core/subworkflows/lint/subworkflow_todos.py +++ b/nf_core/subworkflows/lint/subworkflow_todos.py @@ -36,5 +36,5 @@ def subworkflow_todos(_, subworkflow): swf_results = pipeline_todos(None, root_dir=subworkflow.component_dir) for i, warning in enumerate(swf_results["warned"]): subworkflow.warned.append(("subworkflow_todos", "subworkflow_todo", warning, swf_results["file_paths"][i])) - for i, passed in enumerate(swf_results["passed"]): + for _i, passed in enumerate(swf_results["passed"]): subworkflow.passed.append(("subworkflow_todos", "subworkflow_todo", passed, subworkflow.component_dir)) diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index eb5e406bfa..18356cfdfb 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -92,8 +92,8 @@ def get_remote_branches(remote_url): """ try: unparsed_branches = git.Git().ls_remote(remote_url) - except git.GitCommandError: - raise LookupError(f"Was unable to fetch branches from '{remote_url}'") + except git.GitCommandError as e: + raise LookupError(f"Was unable to fetch branches from '{remote_url}'") from e else: branches = {} for branch_info in unparsed_branches.split("\n"): @@ -133,8 +133,8 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa if config_fn is not None and repo_config is not None: try: self.repo_path = repo_config.org_path - except KeyError: - raise UserWarning(f"'org_path' key not present in {config_fn.name}") + except KeyError as e: + raise UserWarning(f"'org_path' key not present in {config_fn.name}") from e # Verify that the repo seems to be correctly configured if self.repo_path != NF_CORE_MODULES_NAME or self.branch: @@ -165,10 +165,9 @@ def verify_sha(self, prompt, sha): log.error("Cannot use '--sha' and '--prompt' at the same time!") return False - if sha: - if not self.sha_exists_on_branch(sha): - log.error(f"Commit SHA '{sha}' doesn't exist in '{self.remote_url}'") - return False + if sha and not self.sha_exists_on_branch(sha): + log.error(f"Commit SHA '{sha}' doesn't exist in '{self.remote_url}'") + return False return True @@ -206,17 +205,19 @@ def branch_exists(self): """ try: self.checkout_branch() - except GitCommandError: - raise LookupError(f"Branch '{self.branch}' not found in '{self.remote_url}'") + except GitCommandError as e: + raise LookupError(f"Branch '{self.branch}' not found in '{self.remote_url}'") from e def verify_branch(self): """ Verifies the active branch conforms to the correct directory structure """ - dir_names = os.listdir(self.local_repo_dir) - if "modules" not in dir_names: + assert self.local_repo_dir is not None, "Repository must be initialized before verifying branch" + local_repo_path = Path(self.local_repo_dir) + + if not Path(local_repo_path, "modules").exists(): err_str = f"Repository '{self.remote_url}' ({self.branch}) does not contain the 'modules/' directory" - if "software" in dir_names: + if Path(local_repo_path, "software").exists(): err_str += ( ".\nAs of nf-core/tools version 2.0, the 'software/' directory should be renamed to 'modules/'" ) @@ -336,7 +337,7 @@ def component_files_identical(self, component_name, base_path, commit, component else: self.checkout(commit) component_files = ["main.nf", "meta.yml"] - files_identical = {file: True for file in component_files} + files_identical = dict.fromkeys(component_files, True) component_dir = self.get_component_dir(component_name, component_type) for file in component_files: try: @@ -399,7 +400,7 @@ def get_component_git_log( "To solve this, you can try to remove the cloned rempository and run the command again.\n" f"This repository is typically found at `{self.local_repo_dir}`" ) - raise UserWarning + raise UserWarning from None commits = iter(commits_new + commits_old) return commits @@ -413,7 +414,7 @@ def get_latest_component_version(self, component_name, component_type): if not git_logs: return None return git_logs[0]["git_sha"] - except Exception as e: + except (git.exc.GitError, KeyError) as e: log.debug(f"Could not get latest version of {component_name}: {e}") return None diff --git a/nf_core/test_datasets/list.py b/nf_core/test_datasets/list.py index 3531eef1ce..f773598431 100644 --- a/nf_core/test_datasets/list.py +++ b/nf_core/test_datasets/list.py @@ -56,7 +56,7 @@ def list_datasets( tree = list_files_by_branch(branch, all_branches, ignored_file_prefixes) out = [] - for b in tree.keys(): + for b in tree: files = sorted(tree[b]) for f in files: if generate_nf_path: diff --git a/nf_core/test_datasets/test_datasets_utils.py b/nf_core/test_datasets/test_datasets_utils.py index 14719c6d8c..86491bfce3 100644 --- a/nf_core/test_datasets/test_datasets_utils.py +++ b/nf_core/test_datasets/test_datasets_utils.py @@ -82,11 +82,15 @@ def get_remote_branch_names() -> list[str]: return branches -def get_remote_tree_for_branch(branch: str, only_files: bool = True, ignored_prefixes: list[str] = []) -> list[str]: +def get_remote_tree_for_branch( + branch: str, only_files: bool = True, ignored_prefixes: list[str] | None = None +) -> list[str]: """ For a given branch name, return the file tree by querying the github API at the endpoint at `/repos/nf-core/test-datasets/git/trees/` """ + if ignored_prefixes is None: + ignored_prefixes = [] gh_filetree_file_value = "blob" # value in nodes used to refer to "files" gh_response_filetree_key = "tree" # key in response to refer to the filetree gh_filetree_type_key = "type" # key in filetree nodes used to refer to their type @@ -127,20 +131,18 @@ def get_remote_tree_for_branch(branch: str, only_files: bool = True, ignored_pre def list_files_by_branch( branch: str = "", - branches: list[str] = [], - ignored_file_prefixes: list[str] = [ - ".", - "CITATION", - "LICENSE", - "README", - "docs", - ], + branches: list[str] | None = None, + ignored_file_prefixes: list[str] | None = None, ) -> dict[str, list[str]]: """ Lists files for all branches in the test-datasets github repo. Returns dictionary with branchnames as keys and file-lists as values """ + if ignored_file_prefixes is None: + ignored_file_prefixes = [".", "CITATION", "LICENSE", "README", "docs"] + if branches is None: + branches = [] if len(branches) == 0: log.debug("Fetching list of remote branch names") branches = get_remote_branch_names() @@ -151,7 +153,7 @@ def list_files_by_branch( log.error(f"No branches matching '{branch}'") log.debug("Fetching remote trees") - tree = dict() + tree = {} for b in branches: tree[b] = get_remote_tree_for_branch(b, only_files=True, ignored_prefixes=ignored_file_prefixes) @@ -235,7 +237,7 @@ def get_or_prompt_file_selection(files: list[str], query: str | None) -> str: "File:", choices=files, style=nfcore_question_style, default=query, qmark=AUTOCOMPLETION_HINT ).unsafe_ask() - file_selected = any([selection == file for file in files]) + file_selected = any(selection == file for file in files) if not file_selected: stdout.print("Please select a file.") diff --git a/nf_core/utils.py b/nf_core/utils.py index 86de5a5fd1..2e16d78255 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -20,7 +20,7 @@ import sys import time from collections.abc import Callable, Generator -from contextlib import contextmanager +from contextlib import contextmanager, suppress from pathlib import Path from typing import TYPE_CHECKING, Any, Literal @@ -91,7 +91,10 @@ os.environ.get("XDG_CACHE_HOME", Path(os.getenv("HOME") or "", ".cache")), "nfcore", ) -NFCORE_DIR = Path(os.environ.get("XDG_CONFIG_HOME", os.path.join(os.getenv("HOME") or "", ".config")), "nfcore") +NFCORE_DIR = Path( + os.environ.get("XDG_CONFIG_HOME") or Path(os.getenv("HOME") or "") / ".config", + "nfcore", +) def unquote(s: str) -> str: @@ -152,11 +155,10 @@ def check_if_outdated( with concurrent.futures.ThreadPoolExecutor() as executor: future = executor.submit(fetch_remote_version, source_url) remote_version = future.result() - except Exception as e: + except requests.exceptions.RequestException as e: log.debug(f"Could not check for nf-core updates: {e}") - if remote_version is not None: - if Version(remote_version) > Version(current_version): - is_outdated = True + if remote_version is not None and Version(remote_version) > Version(current_version): + is_outdated = True return (is_outdated, current_version, remote_version) @@ -204,7 +206,7 @@ def __init__(self, wf_path: Path) -> None: try: self.repo = git.Repo(self.wf_path) self.git_sha = self.repo.head.object.hexsha - except Exception as e: + except git.exc.GitError as e: log.debug(f"Could not find git hash for pipeline: {self.wf_path}. {e}") # Overwrite if we have the last commit from the PR - otherwise we get a merge commit hash @@ -285,8 +287,7 @@ def is_pipeline_directory(wf_path): UserWarning: If one of the files are missing """ for fn in ["main.nf", "nextflow.config"]: - path = os.path.join(wf_path, fn) - if not os.path.isfile(path): + if not Path(wf_path, fn).is_file(): if wf_path == ".": warning = f"Current directory is not a pipeline - '{fn}' is missing." else: @@ -332,7 +333,7 @@ def get_nf_version() -> tuple[int, int, int, bool] | None: is_edge, ) return parsed_version_tuple - except Exception as e: + except (subprocess.CalledProcessError, IndexError, ValueError) as e: log.warning(f"Error getting Nextflow version: {e}") return None @@ -417,10 +418,7 @@ def fetch_wf_config(wf_path: Path, cache_config: bool = True) -> dict: # Log warning but don't raise - just regenerate the cache log.warning(f"Unable to load cached JSON file '{cache_path}' due to error: {e}") log.debug("Removing corrupted cache file and regenerating...") - try: - cache_path.unlink() - except OSError: - pass # If we can't delete it, just continue + cache_path.unlink(missing_ok=True) log.debug("No config cache found") # Call `nextflow config` @@ -481,7 +479,7 @@ def run_cmd(executable: str, cmd: str) -> tuple[bytes, bytes] | None: if e.errno == errno.ENOENT: raise RuntimeError( f"It looks like {executable} is not installed. Please ensure it is available in your PATH." - ) + ) from e else: return None except subprocess.CalledProcessError as e: @@ -491,7 +489,7 @@ def run_cmd(executable: str, cmd: str) -> tuple[bytes, bytes] | None: else: raise RuntimeError( f"Command '{full_cmd}' returned non-zero error code '{e.returncode}':\n[red]> {e.stderr.decode()}{e.stdout.decode()}" - ) + ) from e def setup_nfcore_dir() -> bool: @@ -534,7 +532,7 @@ def setup_nfcore_cachedir(cache_fn: str | Path) -> Path: if not Path(cachedir).exists(): Path(cachedir).mkdir(parents=True) except PermissionError: - log.warn(f"Could not create cache directory: {cachedir}") + log.warning(f"Could not create cache directory: {cachedir}") return cachedir @@ -559,8 +557,8 @@ def wait_cli_function(poll_func: Callable[[], bool], refresh_per_second: int = 2 if poll_func(): break time.sleep(2) - except KeyboardInterrupt: - raise AssertionError("Cancelled!") + except KeyboardInterrupt as e: + raise AssertionError("Cancelled!") from e def poll_nfcore_web_api(api_url: str, post_data: dict | None = None) -> dict: @@ -579,10 +577,10 @@ def poll_nfcore_web_api(api_url: str, post_data: dict | None = None) -> dict: else: log.debug(f"requesting {api_url} with {post_data}") response = requests.post(url=api_url, data=post_data) - except requests.exceptions.Timeout: - raise AssertionError(f"URL timed out: {api_url}") - except requests.exceptions.ConnectionError: - raise AssertionError(f"Could not connect to URL: {api_url}") + except requests.exceptions.Timeout as e: + raise AssertionError(f"URL timed out: {api_url}") from e + except requests.exceptions.ConnectionError as e: + raise AssertionError(f"Could not connect to URL: {api_url}") from e else: if response.status_code != 200 and response.status_code != 301: response_content = response.content @@ -598,8 +596,8 @@ def poll_nfcore_web_api(api_url: str, post_data: dict | None = None) -> dict: try: web_response = json.loads(response.content) if "status" not in web_response: - raise AssertionError() - except (json.decoder.JSONDecodeError, AssertionError, TypeError): + raise AssertionError + except (json.decoder.JSONDecodeError, AssertionError, TypeError) as e: response_content = response.content if isinstance(response_content, bytes): response_content = response_content.decode() @@ -607,7 +605,7 @@ def poll_nfcore_web_api(api_url: str, post_data: dict | None = None) -> dict: raise AssertionError( f"nf-core website API results response not recognised: {api_url}\n " "See verbose log for full response" - ) + ) from e else: return web_response @@ -657,8 +655,8 @@ def __call__(self, r): return r # Default auth if we're running and the gh CLI tool is installed - gh_cli_config_fn = os.path.expanduser("~/.config/gh/hosts.yml") - if self.auth is None and os.path.exists(gh_cli_config_fn): + gh_cli_config_fn = Path.home() / ".config" / "gh" / "hosts.yml" + if self.auth is None and gh_cli_config_fn.exists(): try: with open(gh_cli_config_fn) as fh: gh_cli_config = yaml.safe_load(fh) @@ -667,7 +665,7 @@ def __call__(self, r): gh_cli_config["github.com"]["oauth_token"], ) self.auth_mode = f"gh CLI config: {gh_cli_config['github.com']['user']}" - except Exception: + except (OSError, KeyError, yaml.YAMLError): ex_type, ex_value, _ = sys.exc_info() if ex_type is not None: output = rich.markup.escape(f"{ex_type.__name__}: {ex_value}") @@ -696,7 +694,7 @@ def log_content_headers(self, request, post_data=None): log.debug(json.dumps(dict(request.headers), indent=4)) log.debug(json.dumps(request.json(), indent=4)) log.debug(json.dumps(post_data, indent=4)) - except Exception as e: + except (json.JSONDecodeError, TypeError) as e: log.debug(f"Could not parse JSON response from GitHub API! {e}") log.debug(request.headers) log.debug(request.content) @@ -812,10 +810,10 @@ def anaconda_package(dep, dep_channels=None): anaconda_api_url = f"https://api.anaconda.org/package/{ch}/{depname}" try: response = requests.get(anaconda_api_url, timeout=10) - except requests.exceptions.Timeout: - raise LookupError(f"Anaconda API timed out: {anaconda_api_url}") - except requests.exceptions.ConnectionError: - raise LookupError("Could not connect to Anaconda API") + except requests.exceptions.Timeout as e: + raise LookupError(f"Anaconda API timed out: {anaconda_api_url}") from e + except requests.exceptions.ConnectionError as e: + raise LookupError("Could not connect to Anaconda API") from e else: if response.status_code == 200: return response.json() @@ -840,10 +838,8 @@ def parse_anaconda_licence(anaconda_response, version=None): # Licence for each version for f in anaconda_response["files"]: if not version or version == f.get("version"): - try: + with suppress(KeyError): licences.add(f["attrs"]["license"]) - except KeyError: - pass # Main licence field if len(list(licences)) == 0 and isinstance(anaconda_response["license"], str): licences.add(anaconda_response["license"]) @@ -881,10 +877,10 @@ def pip_package(dep): pip_api_url = f"https://pypi.python.org/pypi/{pip_depname}/json" try: response = requests.get(pip_api_url, timeout=10) - except requests.exceptions.Timeout: - raise LookupError(f"PyPI API timed out: {pip_api_url}") - except requests.exceptions.ConnectionError: - raise LookupError(f"PyPI API Connection error: {pip_api_url}") + except requests.exceptions.Timeout as e: + raise LookupError(f"PyPI API timed out: {pip_api_url}") from e + except requests.exceptions.ConnectionError as e: + raise LookupError(f"PyPI API Connection error: {pip_api_url}") from e else: if response.status_code == 200: return response.json() @@ -916,8 +912,8 @@ def get_tag_date(tag_date): try: response = requests.get(biocontainers_api_url) - except requests.exceptions.ConnectionError: - raise LookupError("Could not connect to biocontainers.pro API") + except requests.exceptions.ConnectionError as e: + raise LookupError("Could not connect to biocontainers.pro API") from e else: if response.status_code == 200: try: @@ -959,8 +955,8 @@ def get_tag_date(tag_date): if singularity_image is None: raise LookupError(f"Could not find singularity container for {package}") return docker_image_name, singularity_image["image_name"] - except TypeError: - raise LookupError(f"Could not find docker or singularity container for {package}") + except TypeError as e: + raise LookupError(f"Could not find docker or singularity container for {package}") from e elif response.status_code != 404: raise LookupError(f"Unexpected response code `{response.status_code}` for {biocontainers_api_url}") elif response.status_code == 404: @@ -1005,7 +1001,7 @@ def is_file_binary(path): binary_extensions = [".jpeg", ".jpg", ".png", ".zip", ".gz", ".jar", ".tar"] # Check common file extensions - _, file_extension = os.path.splitext(path) + file_extension = Path(path).suffix if file_extension in binary_extensions: return True @@ -1043,7 +1039,7 @@ def prompt_remote_pipeline_name(wfs): if pipeline.count("/") == 1: try: gh_api.safe_get(f"https://api.github.com/repos/{pipeline}") - except Exception: + except requests.exceptions.RequestException: # No repo found - pass and raise error at the end pass else: @@ -1072,7 +1068,7 @@ def prompt_pipeline_release_branch( # Releases if len(wf_releases) > 0: - for tag in map(lambda release: release.get("tag_name"), wf_releases): + for tag in (release.get("tag_name") for release in wf_releases): tag_display = [ ("fg:ansiblue", f"{tag} "), ("class:choice-default", "[release]"), @@ -1081,7 +1077,7 @@ def prompt_pipeline_release_branch( tag_set.append(str(tag)) # Branches - for branch in wf_branches.keys(): + for branch in wf_branches: branch_display = [ ("fg:ansiyellow", f"{branch} "), ("class:choice-default", "[branch]"), @@ -1112,7 +1108,7 @@ class SingularityCacheFilePathValidator(questionary.Validator): def validate(self, value): if len(value.text): - if os.path.isfile(value.text): + if Path(value.text).is_file(): return True else: raise questionary.ValidationError( @@ -1147,12 +1143,10 @@ def get_repo_releases_branches(pipeline, wfs): pipeline = wf.full_name # Store releases and stop loop - wf_releases = list( - sorted( - wf.releases, - key=lambda k: k.get("published_at_timestamp", 0), - reverse=True, - ) + wf_releases = sorted( + wf.releases, + key=lambda k: k.get("published_at_timestamp", 0), + reverse=True, ) break @@ -1173,12 +1167,10 @@ def get_repo_releases_branches(pipeline, wfs): raise AssertionError(f"Not able to find pipeline '{pipeline}'") except AttributeError: # Success! We have a list, which doesn't work with .get() which is looking for a dict key - wf_releases = list( - sorted( - rel_r.json(), - key=lambda k: k.get("published_at_timestamp", 0), - reverse=True, - ) + wf_releases = sorted( + rel_r.json(), + key=lambda k: k.get("published_at_timestamp", 0), + reverse=True, ) # Get release tag commit hashes @@ -1465,7 +1457,7 @@ def load_tools_config(directory: str | Path = ".") -> tuple[Path | None, NFCoreY error_message = f"Config file '{config_fn}' is invalid" for error in e.errors(): error_message += f"\n{error['loc'][0]}: {error['msg']}\ninput: {error['input']}" - raise AssertionError(error_message) + raise AssertionError(error_message) from e wf_config = fetch_wf_config(Path(directory)) if nf_core_yaml_config["repository_type"] == "pipeline" and wf_config: diff --git a/pyproject.toml b/pyproject.toml index 125d35a6a6..d2c0483e4c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -124,7 +124,20 @@ target-version = "py310" cache-dir = "~/.cache/ruff" [tool.ruff.lint] -select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"] +select = [ + "I", # isort + "E1", "E4", "E7", "E9", # pycodestyle errors (partial) + "F", # pyflakes + "UP", # pyupgrade + "N", # pep8-naming + "B", # flake8-bugbear - catches common bugs and design problems + "RSE", # flake8-raise - unnecessary parentheses on raised exceptions + "PTH", # flake8-use-pathlib - prefer pathlib over os.path + "SIM", # flake8-simplify - simplifies code patterns + "C4", # flake8-comprehensions - better list/dict/set comprehensions + "BLE", # flake8-blind-except - catches blind except clauses +] +ignore = ["PTH123"] [tool.ruff.lint.isort] known-first-party = ["nf_core"] diff --git a/tests/components/test_components_utils.py b/tests/components/test_components_utils.py index 03f69a10c0..da677143f2 100644 --- a/tests/components/test_components_utils.py +++ b/tests/components/test_components_utils.py @@ -71,11 +71,11 @@ def test_environment_variables_override(self): try: with mock.patch.dict(os.environ, mock_env): importlib.reload(nf_core.components.constants) - assert nf_core.components.constants.NF_CORE_MODULES_NAME == mock_env["NF_CORE_MODULES_NAME"] - assert nf_core.components.constants.NF_CORE_MODULES_REMOTE == mock_env["NF_CORE_MODULES_REMOTE"] + assert mock_env["NF_CORE_MODULES_NAME"] == nf_core.components.constants.NF_CORE_MODULES_NAME + assert mock_env["NF_CORE_MODULES_REMOTE"] == nf_core.components.constants.NF_CORE_MODULES_REMOTE assert ( - nf_core.components.constants.NF_CORE_MODULES_DEFAULT_BRANCH - == mock_env["NF_CORE_MODULES_DEFAULT_BRANCH"] + mock_env["NF_CORE_MODULES_DEFAULT_BRANCH"] + == nf_core.components.constants.NF_CORE_MODULES_DEFAULT_BRANCH ) finally: importlib.reload(nf_core.components.constants) diff --git a/tests/conftest.py b/tests/conftest.py index fbaf10dfbd..126a2954e1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,6 @@ """Global pytest configuration for nf-core tests setting up worker-specific cache directories to avoid git lock issues.""" +import contextlib import os import shutil import tempfile @@ -42,12 +43,8 @@ def pytest_unconfigure(config): # Clean up temporary directories if hasattr(config, "_temp_cache_dir"): - try: + with contextlib.suppress(OSError, FileNotFoundError): shutil.rmtree(config._temp_cache_dir) - except (OSError, FileNotFoundError): - pass if hasattr(config, "_temp_config_dir"): - try: + with contextlib.suppress(OSError, FileNotFoundError): shutil.rmtree(config._temp_config_dir) - except (OSError, FileNotFoundError): - pass diff --git a/tests/modules/lint/test_environment_yml.py b/tests/modules/lint/test_environment_yml.py index f6886d2ea8..21a44226b3 100644 --- a/tests/modules/lint/test_environment_yml.py +++ b/tests/modules/lint/test_environment_yml.py @@ -179,7 +179,7 @@ def test_environment_yml_invalid_files(tmp_path, invalid_content, filename): """Test that invalid YAML files raise exceptions""" test_file, module, lint = setup_test_environment(tmp_path, invalid_content, filename) - with pytest.raises(Exception): + with pytest.raises(ruamel.yaml.YAMLError): environment_yml(lint, module) diff --git a/tests/modules/lint/test_main_nf.py b/tests/modules/lint/test_main_nf.py index df4da94ec7..b793602e19 100644 --- a/tests/modules/lint/test_main_nf.py +++ b/tests/modules/lint/test_main_nf.py @@ -419,13 +419,13 @@ def test_get_outputs_with_hidden_attribute(tmp_path): # Check that the path pattern doesn't include "hidden: true" path_key = list(prof_output[0][1].keys())[0] - assert '"*.{prof,pidx}*"' == path_key, f"Expected '\"*.{{prof,pidx}}*\"', got '{path_key}'" + assert path_key == '"*.{prof,pidx}*"', f"Expected '\"*.{{prof,pidx}}*\"', got '{path_key}'" assert "hidden" not in path_key, f"Pattern should not contain 'hidden': {path_key}" # Check the data output also doesn't include "hidden: true" data_output = component.outputs["data"] data_path_key = list(data_output[0].keys())[0] - assert '"data.csv"' == data_path_key, f"Expected '\"data.csv\"', got '{data_path_key}'" + assert data_path_key == '"data.csv"', f"Expected '\"data.csv\"', got '{data_path_key}'" assert "hidden" not in data_path_key, f"Pattern should not contain 'hidden': {data_path_key}" diff --git a/tests/modules/test_bump_versions.py b/tests/modules/test_bump_versions.py index f3b377be33..63e6eb9c2c 100644 --- a/tests/modules/test_bump_versions.py +++ b/tests/modules/test_bump_versions.py @@ -1,4 +1,3 @@ -import os import re import tempfile from pathlib import Path @@ -18,7 +17,7 @@ class TestModulesBumpVersions(TestModules): def test_modules_bump_versions_single_module(self): """Test updating a single module""" # Change the bpipe/test version to an older version - env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") + env_yml_path = Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") with open(env_yml_path) as fh: content = fh.read() new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content) @@ -98,7 +97,7 @@ def test_modules_bump_versions_fail(self): def test_modules_bump_versions_fail_unknown_version(self): """Fail because of an unknown version""" # Change the bpipe/test version to an older version - env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") + env_yml_path = Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") with open(env_yml_path) as fh: content = fh.read() new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content) diff --git a/tests/modules/test_create.py b/tests/modules/test_create.py index cf0efc6b04..6a5420db8e 100644 --- a/tests/modules/test_create.py +++ b/tests/modules/test_create.py @@ -1,4 +1,3 @@ -import os import shutil from pathlib import Path from unittest import mock @@ -32,7 +31,7 @@ def test_modules_create_succeed(self): ) with requests_cache.disabled(): module_create.create() - assert os.path.exists(os.path.join(self.pipeline_dir, "modules", "local", "trimgalore/main.nf")) + assert Path(self.pipeline_dir, "modules", "local", "trimgalore/main.nf").exists() def test_modules_create_fail_exists(self): """Fail at creating the same module twice""" @@ -44,9 +43,8 @@ def test_modules_create_fail_exists(self): ) with requests_cache.disabled(): module_create.create() - with pytest.raises(UserWarning) as excinfo: - with requests_cache.disabled(): - module_create.create() + with pytest.raises(UserWarning) as excinfo, requests_cache.disabled(): + module_create.create() assert "module directory exists:" in str(excinfo.value) def test_modules_create_nfcore_modules(self): @@ -59,10 +57,8 @@ def test_modules_create_nfcore_modules(self): ) with requests_cache.disabled(): module_create.create() - assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "main.nf")) - assert os.path.exists( - os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "tests", "main.nf.test") - ) + assert Path(self.nfcore_modules, "modules", "nf-core", "fastqc", "main.nf").exists() + assert Path(self.nfcore_modules, "modules", "nf-core", "fastqc", "tests", "main.nf.test").exists() def test_modules_create_nfcore_modules_subtool(self): """Create a tool/subtool module in a nf-core/modules clone""" @@ -74,10 +70,8 @@ def test_modules_create_nfcore_modules_subtool(self): ) with requests_cache.disabled(): module_create.create() - assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "main.nf")) - assert os.path.exists( - os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "tests", "main.nf.test") - ) + assert Path(self.nfcore_modules, "modules", "nf-core", "star", "index", "main.nf").exists() + assert Path(self.nfcore_modules, "modules", "nf-core", "star", "index", "tests", "main.nf.test").exists() @mock.patch("rich.prompt.Confirm.ask") def test_modules_migrate(self, mock_rich_ask): @@ -115,7 +109,7 @@ def test_modules_migrate(self, mock_rich_ask): # Check that pytest_modules.yml is updated with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: modules_yml = yaml.safe_load(fh) - assert "samtools/sort" not in modules_yml.keys() + assert "samtools/sort" not in modules_yml @mock.patch("rich.prompt.Confirm.ask") def test_modules_migrate_no_delete(self, mock_rich_ask): @@ -138,7 +132,7 @@ def test_modules_migrate_no_delete(self, mock_rich_ask): # Check that pytest_modules.yml is updated with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: modules_yml = yaml.safe_load(fh) - assert "samtools/sort" not in modules_yml.keys() + assert "samtools/sort" not in modules_yml @mock.patch("rich.prompt.Confirm.ask") def test_modules_migrate_symlink(self, mock_rich_ask): diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py index 029eb32ccd..db0e522dba 100644 --- a/tests/modules/test_modules_json.py +++ b/tests/modules/test_modules_json.py @@ -23,7 +23,7 @@ def test_get_modules_json(self): try: mod_json_sb = json.load(fh) except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") + raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") from e mod_json_obj = ModulesJson(self.pipeline_dir) mod_json = mod_json_obj.get_modules_json() @@ -40,10 +40,10 @@ def test_mod_json_update(self): mod_json = mod_json_obj.get_modules_json() assert "MODULE_NAME" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"] - assert "GIT_SHA" == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["git_sha"] + assert mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["git_sha"] == "GIT_SHA" assert ( - NF_CORE_MODULES_DEFAULT_BRANCH - == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["branch"] + mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["branch"] + == NF_CORE_MODULES_DEFAULT_BRANCH ) assert ( "modules" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["installed_by"] @@ -214,7 +214,7 @@ def test_mod_json_dump(self): try: mod_json_new = json.load(f) except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") + raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") from e assert mod_json == mod_json_new def test_mod_json_with_empty_modules_value(self): diff --git a/tests/modules/test_modules_utils.py b/tests/modules/test_modules_utils.py index 9a19f06159..8ef8d9d404 100644 --- a/tests/modules/test_modules_utils.py +++ b/tests/modules/test_modules_utils.py @@ -45,7 +45,7 @@ def test_filter_modules_by_name_tool_family(self): # Test filtering by tool family (super-tool) filtered = nf_core.modules.modules_utils.filter_modules_by_name(nfcore_modules, "samtools") - assert set(m.component_name for m in filtered) == {"samtools/view", "samtools/sort", "samtools/index"} + assert {m.component_name for m in filtered} == {"samtools/view", "samtools/sort", "samtools/index"} def test_filter_modules_by_name_exact_match_preferred(self): """Test that exact matches are preferred over prefix matches""" diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py index f608278618..ff881b41ad 100644 --- a/tests/modules/test_patch.py +++ b/tests/modules/test_patch.py @@ -1,4 +1,3 @@ -import os import tempfile from pathlib import Path from unittest import mock @@ -324,11 +323,13 @@ def test_create_patch_update_fail(self): ).install_component_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, temp_dir) temp_module_dir = temp_dir / BISMARK_ALIGN - for file in os.listdir(temp_module_dir): - assert file in os.listdir(module_path) - with open(module_path / file) as fh: + temp_files = {f.name for f in temp_module_dir.iterdir()} + module_files = {f.name for f in module_path.iterdir()} + for file_name in temp_files: + assert file_name in module_files + with open(module_path / file_name) as fh: installed = fh.read() - with open(temp_module_dir / file) as fh: + with open(temp_module_dir / file_name) as fh: shouldbe = fh.read() assert installed == shouldbe diff --git a/tests/modules/test_remove.py b/tests/modules/test_remove.py index 2caece7ce5..c63b3d4377 100644 --- a/tests/modules/test_remove.py +++ b/tests/modules/test_remove.py @@ -1,4 +1,3 @@ -import os from pathlib import Path from ..test_modules import TestModules @@ -11,7 +10,7 @@ def test_modules_remove_trimgalore(self): assert self.mods_install.directory is not None module_path = Path(self.mods_install.directory, "modules", "nf-core", "modules", "trimgalore") assert self.mods_remove.remove("trimgalore") - assert os.path.exists(module_path) is False + assert not module_path.exists() def test_modules_remove_trimgalore_uninstalled(self): """Test removing TrimGalore! module without installing it""" @@ -23,4 +22,4 @@ def test_modules_remove_multiqc_from_gitlab(self): assert self.mods_install.directory is not None module_path = Path(self.mods_install_gitlab.directory, "modules", "nf-core-test", "multiqc") assert self.mods_remove_gitlab.remove("multiqc", force=True) - assert os.path.exists(module_path) is False + assert not module_path.exists() diff --git a/tests/modules/test_update.py b/tests/modules/test_update.py index 807f67cb81..be3df24256 100644 --- a/tests/modules/test_update.py +++ b/tests/modules/test_update.py @@ -167,7 +167,7 @@ def test_update_with_config_fixed_version(self): # Fix the trimgalore version in the .nf-core.yml to an old version update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": OLD_TRIMGALORE_SHA}}} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - setattr(tools_config, "update", update_config) + tools_config.update = update_config assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config.model_dump(), f) @@ -192,7 +192,7 @@ def test_update_with_config_dont_update(self): # Set the trimgalore field to no update in the .nf-core.yml update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": False}}} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - setattr(tools_config, "update", update_config) + tools_config.update = update_config assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config.model_dump(), f) @@ -221,7 +221,7 @@ def test_update_with_config_fix_all(self): # Fix the version of all nf-core modules in the .nf-core.yml to an old version update_config = {GITLAB_URL: OLD_TRIMGALORE_SHA} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - setattr(tools_config, "update", update_config) + tools_config.update = update_config assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config.model_dump(), f) @@ -245,7 +245,7 @@ def test_update_with_config_no_updates(self): # Fix the version of all nf-core modules in the .nf-core.yml to an old version update_config = {GITLAB_URL: False} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - setattr(tools_config, "update", update_config) + tools_config.update = update_config assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config.model_dump(), f) diff --git a/tests/pipelines/download/test_container.py b/tests/pipelines/download/test_container.py index 63fa7b9c07..a699eaa73f 100644 --- a/tests/pipelines/download/test_container.py +++ b/tests/pipelines/download/test_container.py @@ -60,13 +60,12 @@ def test_download_progress_sub_task(self): assert progress.tasks == [] # Add another sub-task, this time that raises an exception - with pytest.raises(ValueError): - with progress.sub_task("Sub-task", total=28) as sub_task_id: - assert sub_task_id == 1 - assert len(progress.tasks) == 1 - assert progress.task_ids[0] == sub_task_id - assert progress.tasks[0].total == 28 - raise ValueError("This is a test error") + with pytest.raises(ValueError), progress.sub_task("Sub-task", total=28) as sub_task_id: + assert sub_task_id == 1 + assert len(progress.tasks) == 1 + assert progress.task_ids[0] == sub_task_id + assert progress.tasks[0].total == 28 + raise ValueError("This is a test error") # The sub-task should also be gone now assert progress.tasks == [] diff --git a/tests/pipelines/download/test_singularity.py b/tests/pipelines/download/test_singularity.py index c7a05dbca3..eb6bfe1bbb 100644 --- a/tests/pipelines/download/test_singularity.py +++ b/tests/pipelines/download/test_singularity.py @@ -658,7 +658,7 @@ def test_file_download(self, outdir): output_path = outdir / Path(src_url).name downloader.download_file(src_url, output_path) assert (output_path).exists() - assert os.path.getsize(output_path) == 27 + assert output_path.stat().st_size == 27 assert ( "nf_core.pipelines.download.singularity", logging.DEBUG, @@ -704,7 +704,7 @@ def test_file_download(self, outdir): # Fire in the hole ! The download will be aborted and no output file will be created src_url = "https://github.com/nf-core/test-datasets/raw/refs/heads/modules/data/genomics/sarscov2/genome/genome.fasta.fai" output_path = outdir / Path(src_url).name - os.unlink(output_path) + output_path.unlink() downloader.kill_with_fire = True with pytest.raises(KeyboardInterrupt): downloader.download_file(src_url, output_path) diff --git a/tests/pipelines/download/test_utils.py b/tests/pipelines/download/test_utils.py index f3d35b333f..a04a65de15 100644 --- a/tests/pipelines/download/test_utils.py +++ b/tests/pipelines/download/test_utils.py @@ -1,4 +1,3 @@ -import os import subprocess import unittest from pathlib import Path @@ -33,7 +32,7 @@ def test_intermediate_file(self, outdir): tmp.write(b"Hello, World!") assert output_path.exists() - assert os.path.getsize(output_path) == 13 + assert output_path.stat().st_size == 13 assert not tmp_path.exists() # Run an external command as in pull_image @@ -43,40 +42,36 @@ def test_intermediate_file(self, outdir): subprocess.check_call([f"echo 'Hello, World!' > {tmp_path}"], shell=True) assert (output_path).exists() - assert os.path.getsize(output_path) == 14 # Extra \n ! + assert output_path.stat().st_size == 14 # Extra \n ! assert not (tmp_path).exists() # Code that fails. The file shall not exist # Directly write to the file and raise an exception output_path = outdir / "testfile3" - with pytest.raises(ValueError): - with intermediate_file(output_path) as tmp: - tmp_path = Path(tmp.name) - tmp.write(b"Hello, World!") - raise ValueError("This is a test error") + with pytest.raises(ValueError), intermediate_file(output_path) as tmp: + tmp_path = Path(tmp.name) + tmp.write(b"Hello, World!") + raise ValueError("This is a test error") assert not (output_path).exists() assert not (tmp_path).exists() # Run an external command and raise an exception output_path = outdir / "testfile4" - with pytest.raises(subprocess.CalledProcessError): - with intermediate_file(output_path) as tmp: - tmp_path = Path(tmp.name) - subprocess.check_call([f"echo 'Hello, World!' > {tmp_path}"], shell=True) - subprocess.check_call(["ls", "/dummy"]) + with pytest.raises(subprocess.CalledProcessError), intermediate_file(output_path) as tmp: + tmp_path = Path(tmp.name) + subprocess.check_call([f"echo 'Hello, World!' > {tmp_path}"], shell=True) + subprocess.check_call(["ls", "/dummy"]) assert not (output_path).exists() assert not (tmp_path).exists() # Test for invalid output paths - with pytest.raises(DownloadError): - with intermediate_file(outdir) as tmp: - pass + with pytest.raises(DownloadError), intermediate_file(outdir) as tmp: + pass output_path = outdir / "testfile5" - os.symlink("/dummy", output_path) - with pytest.raises(DownloadError): - with intermediate_file(output_path) as tmp: - pass + output_path.symlink_to("/dummy") + with pytest.raises(DownloadError), intermediate_file(output_path) as tmp: + pass diff --git a/tests/pipelines/lint/test_configs.py b/tests/pipelines/lint/test_configs.py index 7bb6329b5b..3b179c9f84 100644 --- a/tests/pipelines/lint/test_configs.py +++ b/tests/pipelines/lint/test_configs.py @@ -21,7 +21,7 @@ def test_withname_in_modules_config(self): result = lint_obj.modules_config() assert len(result["failed"]) == 0 assert any( - ["`FASTQC` found in `conf/modules.config` and Nextflow scripts." in passed for passed in result["passed"]] + "`FASTQC` found in `conf/modules.config` and Nextflow scripts." in passed for passed in result["passed"] ) def test_superfluous_withname_in_modules_config_fails(self): diff --git a/tests/pipelines/lint/test_merge_markers.py b/tests/pipelines/lint/test_merge_markers.py index 3094d8f8d1..d724eb4e43 100644 --- a/tests/pipelines/lint/test_merge_markers.py +++ b/tests/pipelines/lint/test_merge_markers.py @@ -1,4 +1,4 @@ -import os +from pathlib import Path import nf_core.pipelines.lint @@ -10,10 +10,10 @@ def test_merge_markers_found(self): """Missing 'jobs' field should result in failure""" new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, "main.nf")) as fh: + with open(Path(new_pipeline, "main.nf")) as fh: main_nf_content = fh.read() main_nf_content = ">>>>>>>\n" + main_nf_content - with open(os.path.join(new_pipeline, "main.nf"), "w") as fh: + with open(Path(new_pipeline, "main.nf"), "w") as fh: fh.write(main_nf_content) lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) diff --git a/tests/pipelines/lint/test_nextflow_config.py b/tests/pipelines/lint/test_nextflow_config.py index f8c3c1f31f..d71b25280b 100644 --- a/tests/pipelines/lint/test_nextflow_config.py +++ b/tests/pipelines/lint/test_nextflow_config.py @@ -1,4 +1,3 @@ -import os import re from pathlib import Path @@ -55,7 +54,7 @@ def test_nextflow_config_dev_in_release_mode_failed(self): def test_nextflow_config_missing_test_profile_failed(self): """Test failure if config file does not contain `test` profile.""" # Change the name of the test profile so there is no such profile - nf_conf_file = os.path.join(self.new_pipeline, "nextflow.config") + nf_conf_file = Path(self.new_pipeline, "nextflow.config") with open(nf_conf_file) as f: content = f.read() fail_content = re.sub(r"\btest\b", "testfail", content) diff --git a/tests/pipelines/lint/test_rocrate_readme_sync.py b/tests/pipelines/lint/test_rocrate_readme_sync.py index cd600481e2..6ac0068e9c 100644 --- a/tests/pipelines/lint/test_rocrate_readme_sync.py +++ b/tests/pipelines/lint/test_rocrate_readme_sync.py @@ -19,7 +19,7 @@ def test_rocrate_readme_sync_fixed(self): try: rocrate = json.load(f) except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{json_path}' due to error {e}") + raise UserWarning(f"Unable to load JSON file '{json_path}' due to error {e}") from e rocrate["@graph"][0]["description"] = "This is a test script" with open(json_path, "w") as f: json.dump(rocrate, f, indent=4) diff --git a/tests/pipelines/test_create.py b/tests/pipelines/test_create.py index d2ee4dd246..ccd125d17e 100644 --- a/tests/pipelines/test_create.py +++ b/tests/pipelines/test_create.py @@ -72,13 +72,13 @@ def test_pipeline_creation_initiation_with_yml(self, tmp_path): default_branch=self.default_branch, ) pipeline.init_pipeline() - assert os.path.isdir(os.path.join(pipeline.outdir, ".git")) + assert Path(pipeline.outdir, ".git").is_dir() assert f" {self.default_branch}\n" in git.Repo.init(pipeline.outdir).git.branch() # Check pipeline template yml has been dumped to `.nf-core.yml` and matches input - assert not os.path.exists(os.path.join(pipeline.outdir, "pipeline_template.yml")) - assert os.path.exists(os.path.join(pipeline.outdir, ".nf-core.yml")) - with open(os.path.join(pipeline.outdir, ".nf-core.yml")) as fh: + assert not Path(pipeline.outdir, "pipeline_template.yml").exists() + assert Path(pipeline.outdir, ".nf-core.yml").exists() + with open(Path(pipeline.outdir, ".nf-core.yml")) as fh: nfcore_yml = yaml.safe_load(fh) assert "template" in nfcore_yml assert yaml.safe_load(PIPELINE_TEMPLATE_YML.read_text()).items() <= nfcore_yml["template"].items() @@ -89,13 +89,13 @@ def test_pipeline_creation_initiation_customize_template(self, tmp_path): outdir=tmp_path, template_config=PIPELINE_TEMPLATE_YML, default_branch=self.default_branch ) pipeline.init_pipeline() - assert os.path.isdir(os.path.join(pipeline.outdir, ".git")) + assert Path(pipeline.outdir, ".git").is_dir() assert f" {self.default_branch}\n" in git.Repo.init(pipeline.outdir).git.branch() # Check pipeline template yml has been dumped to `.nf-core.yml` and matches input - assert not os.path.exists(os.path.join(pipeline.outdir, "pipeline_template.yml")) - assert os.path.exists(os.path.join(pipeline.outdir, ".nf-core.yml")) - with open(os.path.join(pipeline.outdir, ".nf-core.yml")) as fh: + assert not Path(pipeline.outdir, "pipeline_template.yml").exists() + assert Path(pipeline.outdir, ".nf-core.yml").exists() + with open(Path(pipeline.outdir, ".nf-core.yml")) as fh: nfcore_yml = yaml.safe_load(fh) assert "template" in nfcore_yml assert yaml.safe_load(PIPELINE_TEMPLATE_YML.read_text()).items() <= nfcore_yml["template"].items() @@ -151,7 +151,7 @@ def test_template_customisation_all_files_grouping(self): ] all_skipped_files = [] for section in template_features_yml.values(): - for feature in section["features"].keys(): + for feature in section["features"]: if section["features"][feature]["skippable_paths"]: all_skipped_files.extend(section["features"][feature]["skippable_paths"]) diff --git a/tests/pipelines/test_launch.py b/tests/pipelines/test_launch.py index ed23872f66..e97f2f10b6 100644 --- a/tests/pipelines/test_launch.py +++ b/tests/pipelines/test_launch.py @@ -325,7 +325,7 @@ def test_build_command_params(self): try: saved_json = json.load(fh) except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{self.nf_params_fn}' due to error {e}") + raise UserWarning(f"Unable to load JSON file '{self.nf_params_fn}' due to error {e}") from e assert saved_json == {"input": "custom_input"} diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index f8e029beeb..2d77d34d73 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -59,14 +59,14 @@ def test_load_lint_config_ignore_all_tests(self): lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) # Make a config file listing all test names - config_dict = {"repository_type": "pipeline", "lint": {test_name: False for test_name in lint_obj.lint_tests}} + config_dict = {"repository_type": "pipeline", "lint": dict.fromkeys(lint_obj.lint_tests, False)} with open(Path(new_pipeline, ".nf-core.yml"), "w") as fh: yaml.dump(config_dict, fh) # Load the new lint config file and check lint_obj._load_lint_config() assert lint_obj.lint_config is not None - assert sorted(list(lint_obj.lint_config.model_dump(exclude_none=True))) == sorted(lint_obj.lint_tests) + assert sorted(lint_obj.lint_config.model_dump(exclude_none=True)) == sorted(lint_obj.lint_tests) # Try running linting and make sure that all tests are ignored lint_obj._lint_pipeline() @@ -113,7 +113,7 @@ def test_json_output(self, tmp_dir): try: saved_json = json.load(fh) except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{json_fn}' due to error {e}") + raise UserWarning(f"Unable to load JSON file '{json_fn}' due to error {e}") from e assert saved_json["num_tests_pass"] > 0 assert saved_json["num_tests_warned"] > 0 assert saved_json["num_tests_ignored"] == 0 diff --git a/tests/pipelines/test_list.py b/tests/pipelines/test_list.py index aacc3805e8..d1227a4727 100644 --- a/tests/pipelines/test_list.py +++ b/tests/pipelines/test_list.py @@ -106,14 +106,14 @@ def test_local_workflows_compare_and_fail_silently(self): rwf_ex.releases = None - @mock.patch("nf_core.pipelines.list.LocalWorkflow") - def test_parse_local_workflow_and_succeed(self, mock_local_wf): + def test_parse_local_workflow_and_succeed(self): test_path = self.tmp_nxf / "nf-core" - if not os.path.isdir(test_path): - os.makedirs(test_path) + if not test_path.is_dir(): + test_path.mkdir(parents=True) + # Create a dummy workflow directory (not just a file) + dummy_wf_path = self.tmp_nxf / "nf-core" / "dummy-wf" + dummy_wf_path.mkdir(parents=True, exist_ok=True) assert os.environ["NXF_ASSETS"] == self.tmp_nxf_str - with open(self.tmp_nxf / "nf-core/dummy-wf", "w") as f: - f.write("dummy") workflows_obj = nf_core.pipelines.list.Workflows() workflows_obj.get_local_nf_workflows() assert len(workflows_obj.local_workflows) == 1 @@ -122,21 +122,24 @@ def test_parse_local_workflow_and_succeed(self, mock_local_wf): @mock.patch("subprocess.check_output") def test_parse_local_workflow_home(self, mock_local_wf, mock_subprocess): test_path = self.tmp_nxf / "nf-core" - if not os.path.isdir(test_path): - os.makedirs(test_path) + if not test_path.is_dir(): + test_path.mkdir(parents=True) assert os.environ["NXF_ASSETS"] == self.tmp_nxf_str with open(self.tmp_nxf / "nf-core/dummy-wf", "w") as f: f.write("dummy") workflows_obj = nf_core.pipelines.list.Workflows() workflows_obj.get_local_nf_workflows() - @mock.patch("os.stat") @mock.patch("git.Repo") - def test_local_workflow_investigation(self, mock_repo, mock_stat): + def test_local_workflow_investigation(self, mock_repo): local_wf = nf_core.pipelines.list.LocalWorkflow("dummy") local_wf.local_path = self.tmp_nxf.parent - mock_repo.head.commit.hexsha = "h00r4y" - mock_stat.st_mode = 1 + # Create the .git/FETCH_HEAD file that the code expects + git_dir = self.tmp_nxf.parent / ".git" + git_dir.mkdir(parents=True, exist_ok=True) + (git_dir / "FETCH_HEAD").touch() + mock_repo.return_value.head.commit.hexsha = "h00r4y" + mock_repo.return_value.remotes.origin.url = "https://github.com/nf-core/dummy" local_wf.get_local_nf_workflow_details() def test_worflow_filter(self): diff --git a/tests/pipelines/test_refgenie.py b/tests/pipelines/test_refgenie.py index 734a2368bd..2b824f599e 100644 --- a/tests/pipelines/test_refgenie.py +++ b/tests/pipelines/test_refgenie.py @@ -24,7 +24,7 @@ def setUp(self): # avoids adding includeConfig statement to config file outside the current tmpdir try: self.NXF_HOME_ORIGINAL = os.environ["NXF_HOME"] - except Exception: + except KeyError: self.NXF_HOME_ORIGINAL = None os.environ["NXF_HOME"] = str(self.NXF_HOME) diff --git a/tests/pipelines/test_rocrate.py b/tests/pipelines/test_rocrate.py index a073beae31..47b914593a 100644 --- a/tests/pipelines/test_rocrate.py +++ b/tests/pipelines/test_rocrate.py @@ -41,7 +41,6 @@ def test_rocrate_creation(self): """Run the nf-core rocrate command""" # Run the command - self.rocrate_obj assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) # Check that the crate was created diff --git a/tests/pipelines/test_schema.py b/tests/pipelines/test_schema.py index efc2798969..364fd994e7 100644 --- a/tests/pipelines/test_schema.py +++ b/tests/pipelines/test_schema.py @@ -1,7 +1,6 @@ """Tests covering the pipeline schema code.""" import json -import os import shutil import tempfile import unittest @@ -27,20 +26,20 @@ def setUp(self): self.schema_obj.schema_draft = "https://json-schema.org/draft/2020-12/schema" self.schema_obj.defs_notation = "$defs" self.schema_obj.validation_plugin = "nf-schema" - self.root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + self.root_repo_dir = Path(__file__).resolve().parent.parent # Create a test pipeline in temp directory self.tmp_dir = tempfile.mkdtemp() - self.template_dir = os.path.join(self.tmp_dir, "wf") + self.template_dir = Path(self.tmp_dir, "wf") create_obj = nf_core.pipelines.create.create.PipelineCreate( "testpipeline", "a description", "Me", outdir=self.template_dir, no_git=True ) create_obj.init_pipeline() - self.template_schema = os.path.join(self.template_dir, "nextflow_schema.json") + self.template_schema = self.template_dir / "nextflow_schema.json" def tearDown(self): - if os.path.exists(self.tmp_dir): + if Path(self.tmp_dir).exists(): shutil.rmtree(self.tmp_dir) def test_load_lint_schema(self): @@ -55,7 +54,7 @@ def test_load_lint_schema_nofile(self): def test_load_lint_schema_notjson(self): """Check that linting raises properly if a non-JSON file is given""" - self.schema_obj.get_schema_path(os.path.join(self.template_dir, "nextflow.config")) + self.schema_obj.get_schema_path(self.template_dir / "nextflow.config") with pytest.raises(AssertionError): self.schema_obj.load_lint_schema() diff --git a/tests/pipelines/test_sync.py b/tests/pipelines/test_sync.py index 746b2db76a..4e6f6f80db 100644 --- a/tests/pipelines/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -118,7 +118,7 @@ def test_inspect_sync_dir_dirty(self): psync.inspect_sync_dir() assert exc_info.value.args[0].startswith("Uncommitted changes found in pipeline directory!") finally: - os.remove(test_fn) + test_fn.unlink() def test_inspect_sync_ignored_files(self): """ @@ -180,7 +180,8 @@ def test_delete_tracked_template_branch_files(self): psync.checkout_template_branch() psync.delete_tracked_template_branch_files() top_level_ignored = self._get_top_level_ignored(psync) - assert set(os.listdir(self.pipeline_dir)) == set([".git"]).union(top_level_ignored) + pipeline_contents = {f.name for f in Path(self.pipeline_dir).iterdir()} + assert pipeline_contents == {".git"}.union(top_level_ignored) def test_delete_tracked_template_branch_files_unlink_throws_error(self): """Test that SyncExceptionError is raised when Path.unlink throws an exception""" @@ -299,11 +300,13 @@ def test_create_template_pipeline(self): psync.checkout_template_branch() psync.delete_tracked_template_branch_files() top_level_ignored = self._get_top_level_ignored(psync) - assert set(os.listdir(self.pipeline_dir)) == set([".git"]).union(top_level_ignored) + pipeline_contents = {f.name for f in Path(self.pipeline_dir).iterdir()} + assert pipeline_contents == {".git"}.union(top_level_ignored) # Now create the new template psync.make_template_pipeline() - assert "main.nf" in os.listdir(self.pipeline_dir) - assert "nextflow.config" in os.listdir(self.pipeline_dir) + pipeline_path = Path(self.pipeline_dir) + assert (pipeline_path / "main.nf").exists() + assert (pipeline_path / "nextflow.config").exists() def test_commit_template_changes_nochanges(self): """Try to commit the TEMPLATE branch, but no changes were made""" diff --git a/tests/subworkflows/test_create.py b/tests/subworkflows/test_create.py index 704a23772e..ec93d8311d 100644 --- a/tests/subworkflows/test_create.py +++ b/tests/subworkflows/test_create.py @@ -81,7 +81,7 @@ def test_subworkflows_migrate(self, mock_rich_ask): # Check that pytest_modules.yml is updated with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: modules_yml = yaml.safe_load(fh) - assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() + assert "subworkflows/bam_stats_samtools" not in modules_yml @mock.patch("rich.prompt.Confirm.ask") def test_subworkflows_migrate_no_delete(self, mock_rich_ask): @@ -106,4 +106,4 @@ def test_subworkflows_migrate_no_delete(self, mock_rich_ask): # Check that pytest_modules.yml is updated with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: modules_yml = yaml.safe_load(fh) - assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() + assert "subworkflows/bam_stats_samtools" not in modules_yml diff --git a/tests/subworkflows/test_lint.py b/tests/subworkflows/test_lint.py index 80aeb1d113..f64a616bbf 100644 --- a/tests/subworkflows/test_lint.py +++ b/tests/subworkflows/test_lint.py @@ -230,16 +230,13 @@ def test_subworkflows_lint_include_multiple_alias(self): assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) == 6 assert any( - [x.message == "Included component 'SAMTOOLS_STATS_1' used in main.nf" for x in subworkflow_lint.passed] + x.message == "Included component 'SAMTOOLS_STATS_1' used in main.nf" for x in subworkflow_lint.passed ) assert any( - [x.message == "Included component 'SAMTOOLS_STATS_2' not used in main.nf" for x in subworkflow_lint.warned] + x.message == "Included component 'SAMTOOLS_STATS_2' not used in main.nf" for x in subworkflow_lint.warned ) assert any( - [ - x.message.endswith("Can be ignored if the module is using topic channels") - for x in subworkflow_lint.warned - ] + x.message.endswith("Can be ignored if the module is using topic channels") for x in subworkflow_lint.warned ) # cleanup @@ -276,7 +273,7 @@ def test_subworkflows_lint_capitalization_fail(self): assert len(subworkflow_lint.failed) >= 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0 - assert any([x.lint_test == "workflow_capitals" for x in subworkflow_lint.failed]) + assert any(x.lint_test == "workflow_capitals" for x in subworkflow_lint.failed) # cleanup self.subworkflow_remove.remove("bam_stats_samtools", force=True) @@ -302,7 +299,7 @@ def test_subworkflows_absent_version(self): assert len(subworkflow_lint.failed) == 0 assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" - assert any([x.lint_test == "test_snap_versions" for x in subworkflow_lint.warned]) + assert any(x.lint_test == "test_snap_versions" for x in subworkflow_lint.warned) # cleanup with open(snap_file, "w") as fh: @@ -319,7 +316,7 @@ def test_subworkflows_missing_test_dir(self): assert len(subworkflow_lint.failed) == 1 assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" - assert any([x.lint_test == "test_dir_exists" for x in subworkflow_lint.failed]) + assert any(x.lint_test == "test_dir_exists" for x in subworkflow_lint.failed) # cleanup shutil.copytree(test_dir_copy, test_dir) @@ -507,10 +504,7 @@ def test_lint_clean_patch(self): # TODO: Add count test back, once we have finished topic conversion # assert len(subworkflow_lint.warned) == 1, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" assert any( - [ - x.message.endswith("Can be ignored if the module is using topic channels") - for x in subworkflow_lint.warned - ] + x.message.endswith("Can be ignored if the module is using topic channels") for x in subworkflow_lint.warned ) def test_lint_broken_patch(self): @@ -532,8 +526,5 @@ def test_lint_broken_patch(self): # TODO: Add count test back, once we have finished topic conversion # assert len(subworkflow_lint.warned) == 1, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" assert any( - [ - x.message.endswith("Can be ignored if the module is using topic channels") - for x in subworkflow_lint.warned - ] + x.message.endswith("Can be ignored if the module is using topic channels") for x in subworkflow_lint.warned ) diff --git a/tests/subworkflows/test_patch.py b/tests/subworkflows/test_patch.py index 659ae2b1c4..b186bc1807 100644 --- a/tests/subworkflows/test_patch.py +++ b/tests/subworkflows/test_patch.py @@ -1,4 +1,3 @@ -import os import tempfile from pathlib import Path from unittest import mock @@ -274,11 +273,13 @@ def test_create_patch_update_fail(self): ).install_component_files("bam_sort_stats_samtools", FAIL_SHA, update_obj.modules_repo, temp_dir) temp_module_dir = temp_dir / "bam_sort_stats_samtools" - for file in os.listdir(temp_module_dir): - assert file in os.listdir(swf_path) - with open(swf_path / file) as fh: + temp_files = {f.name for f in temp_module_dir.iterdir()} + swf_files = {f.name for f in swf_path.iterdir()} + for file_name in temp_files: + assert file_name in swf_files + with open(swf_path / file_name) as fh: installed = fh.read() - with open(temp_module_dir / file) as fh: + with open(temp_module_dir / file_name) as fh: shouldbe = fh.read() assert installed == shouldbe diff --git a/tests/subworkflows/test_remove.py b/tests/subworkflows/test_remove.py index 94cd64d0c1..88ea58acf8 100644 --- a/tests/subworkflows/test_remove.py +++ b/tests/subworkflows/test_remove.py @@ -30,11 +30,9 @@ def test_subworkflows_remove_subworkflow(self): # assert subworkflows key is removed from modules.json assert ( "bam_sort_stats_samtools" - not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() - ) - assert ( - "samtools/index" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"].keys() + not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"] ) + assert "samtools/index" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"] def test_subworkflows_remove_subworkflow_keep_installed_module(self): """Test removing subworkflow and all it's dependencies after installing it, except for a separately installed module""" @@ -57,11 +55,10 @@ def test_subworkflows_remove_subworkflow_keep_installed_module(self): # assert subworkflows key is removed from modules.json assert ( "bam_sort_stats_samtools" - not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() + not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"] ) assert ( - "samtools/index" - in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"].keys() + "samtools/index" in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"] ) def test_subworkflows_remove_one_of_two_subworkflow(self): @@ -120,11 +117,6 @@ def test_subworkflows_remove_subworkflow_keep_installed_cross_org_module(self): assert Path.exists(nfcore_fastqc_path) is True assert mod_json_before != mod_json_after # assert subworkflows key is removed from modules.json - assert CROSS_ORGANIZATION_URL not in mod_json_after["repos"].keys() - assert ( - "fastqc" in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"].keys() - ) - assert ( - "fastp" - not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"].keys() - ) + assert CROSS_ORGANIZATION_URL not in mod_json_after["repos"] + assert "fastqc" in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"] + assert "fastp" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"] diff --git a/tests/subworkflows/test_update.py b/tests/subworkflows/test_update.py index 7d2f78f041..936b61c782 100644 --- a/tests/subworkflows/test_update.py +++ b/tests/subworkflows/test_update.py @@ -157,7 +157,7 @@ def test_update_with_config_fixed_version(self): # Fix the subworkflow version in the .nf-core.yml to an old version update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": OLD_SUBWORKFLOWS_SHA}}} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - setattr(tools_config, "update", update_config) + tools_config.update = update_config assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config.model_dump(), f) @@ -188,7 +188,7 @@ def test_update_with_config_dont_update(self): # Set the fastq_align_bowtie2 field to no update in the .nf-core.yml update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": False}}} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - setattr(tools_config, "update", update_config) + tools_config.update = update_config assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config.model_dump(), f) @@ -219,7 +219,7 @@ def test_update_with_config_fix_all(self): # Fix the version of all nf-core subworkflows in the .nf-core.yml to an old version update_config = {NF_CORE_MODULES_REMOTE: OLD_SUBWORKFLOWS_SHA} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - setattr(tools_config, "update", update_config) + tools_config.update = update_config assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config.model_dump(), f) @@ -250,7 +250,7 @@ def test_update_with_config_no_updates(self): # Set all repository updates to False update_config = {NF_CORE_MODULES_REMOTE: False} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - setattr(tools_config, "update", update_config) + tools_config.update = update_config assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config.model_dump(), f) diff --git a/tests/test_cli.py b/tests/test_cli.py index 10727c3828..31f6c691aa 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -101,35 +101,35 @@ def test_cli_launch(self, mock_launcher): """Test nf-core pipeline is launched and cli parameters are passed on.""" mock_launcher.return_value.launch_pipeline.return_value = True - temp_params_in = tempfile.NamedTemporaryFile() - params = { - "revision": "abcdef", - "id": "idgui", - "command-only": None, - "params-out": "/path/params/out", - "params-in": temp_params_in.name, - "save-all": None, - "show-hidden": None, - "url": "builder_url", - } - cmd = ["pipelines", "launch"] + self.assemble_params(params) + ["pipeline_name"] - result = self.invoke_cli(cmd) + with tempfile.NamedTemporaryFile() as temp_params_in: + params = { + "revision": "abcdef", + "id": "idgui", + "command-only": None, + "params-out": "/path/params/out", + "params-in": temp_params_in.name, + "save-all": None, + "show-hidden": None, + "url": "builder_url", + } + cmd = ["pipelines", "launch"] + self.assemble_params(params) + ["pipeline_name"] + result = self.invoke_cli(cmd) - assert result.exit_code == 0 + assert result.exit_code == 0 - mock_launcher.assert_called_once_with( - cmd[-1], - params["revision"], - "command-only" in params, - params["params-in"], - params["params-out"], - "save-all" in params, - "show-hidden" in params, - params["url"], - params["id"], - ) + mock_launcher.assert_called_once_with( + cmd[-1], + params["revision"], + "command-only" in params, + params["params-in"], + params["params-out"], + "save-all" in params, + "show-hidden" in params, + params["url"], + params["id"], + ) - mock_launcher.return_value.launch_pipeline.assert_called_once() + mock_launcher.return_value.launch_pipeline.assert_called_once() @mock.patch("nf_core.pipelines.launch.Launch") def test_cli_launch_no_params_in(self, mock_launcher): @@ -267,37 +267,37 @@ def test_lint(self, mock_lint, mock_is_pipeline): mock_lint_results[2].failed = [] mock_lint.return_value = mock_lint_results - temp_pipeline_dir = tempfile.NamedTemporaryFile() - params = { - "dir": temp_pipeline_dir.name, - "release": None, - "fix": "fix test", - "key": "key test", - "show-passed": None, - "fail-ignored": None, - "fail-warned": None, - "markdown": "output_file.md", - "json": "output_file.json", - } - - cmd = ["pipelines", "lint"] + self.assemble_params(params) - result = self.invoke_cli(cmd) + with tempfile.NamedTemporaryFile() as temp_pipeline_dir: + params = { + "dir": temp_pipeline_dir.name, + "release": None, + "fix": "fix test", + "key": "key test", + "show-passed": None, + "fail-ignored": None, + "fail-warned": None, + "markdown": "output_file.md", + "json": "output_file.json", + } + + cmd = ["pipelines", "lint"] + self.assemble_params(params) + result = self.invoke_cli(cmd) - assert result.exit_code == 0 - mock_lint.assert_called_once_with( - params["dir"], - "release" in params, - (params["fix"],), - (params["key"],), - "show-passed" in params, - "fail-ignored" in params, - "fail-warned" in params, - "test", - params["markdown"], - params["json"], - "hide-progress" in params, - False, # plain_text - ) + assert result.exit_code == 0 + mock_lint.assert_called_once_with( + params["dir"], + "release" in params, + (params["fix"],), + (params["key"],), + "show-passed" in params, + "fail-ignored" in params, + "fail-warned" in params, + "test", + params["markdown"], + params["json"], + "hide-progress" in params, + False, # plain_text + ) def test_lint_no_dir(self): """Test nf-core pipelines lint fails if --dir does not exist""" diff --git a/tests/test_utils.py b/tests/test_utils.py index f72de515c1..c7f5f37ed9 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -144,7 +144,7 @@ def test_get_repo_releases_branches_nf_core(self): break else: raise AssertionError("Release 1.6 not found") - assert "dev" in wf_branches.keys() + assert "dev" in wf_branches def test_get_repo_releases_branches_not_nf_core(self): wfs = nf_core.pipelines.list.Workflows() @@ -155,7 +155,7 @@ def test_get_repo_releases_branches_not_nf_core(self): break else: raise AssertionError("MultiQC release v1.32 not found") - assert "main" in wf_branches.keys() + assert "main" in wf_branches def test_get_repo_releases_branches_not_exists(self): wfs = nf_core.pipelines.list.Workflows() @@ -212,9 +212,8 @@ def test_set_wd(self): def test_set_wd_revert_on_raise(self): wd_before_context = Path().resolve() - with pytest.raises(Exception): - with nf_core.utils.set_wd(self.tmp_dir): - raise Exception + with pytest.raises(RuntimeError), nf_core.utils.set_wd(self.tmp_dir): + raise RuntimeError assert wd_before_context == Path().resolve() @mock.patch("nf_core.utils.run_cmd")