From 9825d89ebb7b0616d100b70f09bf2962d055f3f3 Mon Sep 17 00:00:00 2001 From: jvfe Date: Fri, 21 Jun 2024 15:42:18 -0300 Subject: [PATCH 01/60] tests: Add test case for cross-organization subwf Strangely it is passing at the moment, it seems the install command returns a passing exit code regardless of installing all modules. - Addresses sanger-tol/nf-core-tools#2 --- tests/subworkflows/install.py | 9 +++++++++ tests/test_subworkflows.py | 1 + tests/utils.py | 1 + 3 files changed, 11 insertions(+) diff --git a/tests/subworkflows/install.py b/tests/subworkflows/install.py index dfe71686fb..477818b261 100644 --- a/tests/subworkflows/install.py +++ b/tests/subworkflows/install.py @@ -6,7 +6,9 @@ from nf_core.subworkflows.install import SubworkflowInstall from ..utils import ( + CROSS_ORGANIZATION_URL, GITLAB_BRANCH_TEST_BRANCH, + GITLAB_DEFAULT_BRANCH, GITLAB_REPO, GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, @@ -79,6 +81,13 @@ def test_subworkflows_install_different_branch_fail(self): assert install_obj.install("bam_stats_samtools") is False +def test_subworkflows_install_across_organizations(self): + """Test installing a subworkflow with modules from different organizations""" + install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=CROSS_ORGANIZATION_URL, branch=GITLAB_DEFAULT_BRANCH) + # The hic_bwamem2 subworkflow contains modules from different organizations + assert install_obj.install("hic_bwamem2") is True + + def test_subworkflows_install_tracking(self): """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" self.subworkflow_install.install("bam_sort_stats_samtools") diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 0a9224002a..eeee62bda1 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -120,6 +120,7 @@ def tearDown(self): ) from .subworkflows.install import ( # type: ignore[misc] test_subworkflow_install_nopipeline, + test_subworkflows_install_across_organizations, test_subworkflows_install_alternate_remote, test_subworkflows_install_bam_sort_stats_samtools, test_subworkflows_install_bam_sort_stats_samtools_twice, diff --git a/tests/utils.py b/tests/utils.py index 89c1328818..34c899ccf3 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -15,6 +15,7 @@ OLD_TRIMGALORE_SHA = "9b7a3bdefeaad5d42324aa7dd50f87bea1b04386" OLD_TRIMGALORE_BRANCH = "mimic-old-trimgalore" GITLAB_URL = "https://gitlab.com/nf-core/modules-test.git" +CROSS_ORGANIZATION_URL = "https://github.com/jvfe/test-subworkflow-remote.git" GITLAB_REPO = "nf-core-test" GITLAB_DEFAULT_BRANCH = "main" GITLAB_SUBWORKFLOWS_BRANCH = "subworkflows" From 568363a4b025d6bdfbce031a12e9898df3ca0137 Mon Sep 17 00:00:00 2001 From: jvfe Date: Fri, 28 Jun 2024 15:33:56 -0300 Subject: [PATCH 02/60] tests: Check module on module list instead --- tests/subworkflows/install.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tests/subworkflows/install.py b/tests/subworkflows/install.py index 477818b261..44856ad033 100644 --- a/tests/subworkflows/install.py +++ b/tests/subworkflows/install.py @@ -85,7 +85,13 @@ def test_subworkflows_install_across_organizations(self): """Test installing a subworkflow with modules from different organizations""" install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=CROSS_ORGANIZATION_URL, branch=GITLAB_DEFAULT_BRANCH) # The hic_bwamem2 subworkflow contains modules from different organizations - assert install_obj.install("hic_bwamem2") is True + install_obj.install("hic_bwamem2") + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"][CROSS_ORGANIZATION_URL]["modules"]["jvfe"]["samtools/merge"]["installed_by"] == [ + "hic_bwamem2" + ] def test_subworkflows_install_tracking(self): From 534da92b483587a4287ab9fa75331c57e06c87de Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 1 Jul 2024 08:41:07 -0300 Subject: [PATCH 03/60] chore: Dummy commit to test action --- tests/subworkflows/install.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/subworkflows/install.py b/tests/subworkflows/install.py index 44856ad033..b654921674 100644 --- a/tests/subworkflows/install.py +++ b/tests/subworkflows/install.py @@ -89,6 +89,7 @@ def test_subworkflows_install_across_organizations(self): # Verify that the installed_by entry was added correctly modules_json = ModulesJson(self.pipeline_dir) mod_json = modules_json.get_modules_json() + # Dummy comment assert mod_json["repos"][CROSS_ORGANIZATION_URL]["modules"]["jvfe"]["samtools/merge"]["installed_by"] == [ "hic_bwamem2" ] From 286ab86263be1cb2d31c0093c58554f7c05c1e8f Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 1 Jul 2024 08:45:13 -0300 Subject: [PATCH 04/60] Revert "chore: Dummy commit to test action" This reverts commit 534da92b483587a4287ab9fa75331c57e06c87de. --- tests/subworkflows/install.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/subworkflows/install.py b/tests/subworkflows/install.py index b654921674..44856ad033 100644 --- a/tests/subworkflows/install.py +++ b/tests/subworkflows/install.py @@ -89,7 +89,6 @@ def test_subworkflows_install_across_organizations(self): # Verify that the installed_by entry was added correctly modules_json = ModulesJson(self.pipeline_dir) mod_json = modules_json.get_modules_json() - # Dummy comment assert mod_json["repos"][CROSS_ORGANIZATION_URL]["modules"]["jvfe"]["samtools/merge"]["installed_by"] == [ "hic_bwamem2" ] From 423517ef1a268da1186f707d84d82de466d50df6 Mon Sep 17 00:00:00 2001 From: jvfe Date: Tue, 2 Jul 2024 18:46:48 -0300 Subject: [PATCH 05/60] tests: Change installed subworkflow --- tests/subworkflows/install.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/subworkflows/install.py b/tests/subworkflows/install.py index 44856ad033..f872b1f7fb 100644 --- a/tests/subworkflows/install.py +++ b/tests/subworkflows/install.py @@ -85,12 +85,12 @@ def test_subworkflows_install_across_organizations(self): """Test installing a subworkflow with modules from different organizations""" install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=CROSS_ORGANIZATION_URL, branch=GITLAB_DEFAULT_BRANCH) # The hic_bwamem2 subworkflow contains modules from different organizations - install_obj.install("hic_bwamem2") + install_obj.install("get_genome_annotation") # Verify that the installed_by entry was added correctly modules_json = ModulesJson(self.pipeline_dir) mod_json = modules_json.get_modules_json() - assert mod_json["repos"][CROSS_ORGANIZATION_URL]["modules"]["jvfe"]["samtools/merge"]["installed_by"] == [ - "hic_bwamem2" + assert mod_json["repos"][CROSS_ORGANIZATION_URL]["modules"]["jvfe"]["prokka"]["installed_by"] == [ + "get_genome_annotation" ] From af816fd5a3a0ecf5e7317869eb5f3f9f86c5662c Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 8 Jul 2024 15:45:34 -0300 Subject: [PATCH 06/60] feat: Read from meta.yml in get_components_to_install --- nf_core/components/components_utils.py | 10 ++++++---- nf_core/components/install.py | 7 ++++++- nf_core/modules/modules_json.py | 5 +++++ 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 01650a643d..1d433512ac 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -5,6 +5,7 @@ import questionary import rich.prompt +import yaml import nf_core.utils from nf_core.modules.modules_repo import ModulesRepo @@ -146,9 +147,10 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str match = regex.match(line) if match and len(match.groups()) == 2: name, link = match.groups() - if link.startswith("../../../"): - name_split = name.lower().split("_") - modules.append("/".join(name_split)) - elif link.startswith("../"): + if link.startswith("../"): subworkflows.append(name.lower()) + with open(Path(subworkflow_dir, "meta.yml")) as fh: + meta = yaml.safe_load(fh) + components = meta.get("components") + modules.extend(components) return modules, subworkflows diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 6385ee4092..624bd072e3 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -14,7 +14,7 @@ prompt_component_version_sha, ) from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME +from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, ModulesRepo log = logging.getLogger(__name__) @@ -53,6 +53,11 @@ def install(self, component, silent=False): # Check modules directory structure self.check_modules_structure() + if isinstance(component, dict): + component_name = list(component.keys())[0] + self.modules_repo = ModulesRepo(component[component_name]["git_remote"]) + component = component_name + # Verify that 'modules.json' is consistent with the installed modules and subworkflows modules_json = ModulesJson(self.dir) if not silent: diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 7d78268e92..dc47843601 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1186,6 +1186,11 @@ def recreate_dependencies(self, repo, org, subworkflow): dep_mods, dep_subwfs = get_components_to_install(sw_path) for dep_mod in dep_mods: + if isinstance(dep_mod, dict): + component_name = list(dep_mod.keys())[0] + repo = dep_mod[component_name]["git_remote"] + org = dep_mod[component_name]["org_path"] + dep_mod = component_name installed_by = self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] if installed_by == ["modules"]: self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] = [] From d0e6031298f76b3e90b8f8e64bbca9892969b077 Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 15 Jul 2024 09:52:07 -0300 Subject: [PATCH 07/60] refact: Change module return type to always be dict --- nf_core/components/components_utils.py | 18 ++++++++++-- nf_core/components/install.py | 39 +++++++++++++------------- nf_core/modules/modules_json.py | 8 +++--- 3 files changed, 39 insertions(+), 26 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 1d433512ac..0697fcd15f 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -133,7 +133,7 @@ def prompt_component_version_sha( return git_sha -def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str]]: +def get_components_to_install(subworkflow_dir: str) -> Tuple[List[dict], List[str]]: """ Parse the subworkflow main.nf file to retrieve all imported modules and subworkflows. """ @@ -147,10 +147,22 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str match = regex.match(line) if match and len(match.groups()) == 2: name, link = match.groups() - if link.startswith("../"): + if link.startswith("../") and not link.startswith("../../"): subworkflows.append(name.lower()) with open(Path(subworkflow_dir, "meta.yml")) as fh: meta = yaml.safe_load(fh) components = meta.get("components") - modules.extend(components) + component_list = [] + for component in components: + if isinstance(component, str): + comp_dict = {"name": component, "org_path": None, "git_remote": None} + else: + name = list(component.keys())[0] + comp_dict = { + "name": name, + "org_path": component[name]["org_path"], + "git_remote": component[name]["git_remote"], + } + component_list.append(comp_dict) + modules.extend(component_list) return modules, subworkflows diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 624bd072e3..d76a65bf45 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -53,10 +53,13 @@ def install(self, component, silent=False): # Check modules directory structure self.check_modules_structure() + repo_path = self.modules_repo.repo_path + remote_url = self.modules_repo.remote_url if isinstance(component, dict): - component_name = list(component.keys())[0] - self.modules_repo = ModulesRepo(component[component_name]["git_remote"]) - component = component_name + if component["git_remote"] is not None: + repo_path = component["org_path"] + remote_url = component["git_remote"] + component = component["name"] # Verify that 'modules.json' is consistent with the installed modules and subworkflows modules_json = ModulesJson(self.dir) @@ -70,61 +73,59 @@ def install(self, component, silent=False): return False # Verify SHA - if not self.modules_repo.verify_sha(self.prompt, self.sha): + if not ModulesRepo(remote_url).verify_sha(self.prompt, self.sha): return False # Check and verify component name - component = self.collect_and_verify_name(component, self.modules_repo) + component = self.collect_and_verify_name(component, ModulesRepo(remote_url)) if not component: return False # Get current version - current_version = modules_json.get_component_version( - self.component_type, component, self.modules_repo.remote_url, self.modules_repo.repo_path - ) + current_version = modules_json.get_component_version(self.component_type, component, remote_url, repo_path) # Set the install folder based on the repository name - install_folder = Path(self.dir, self.component_type, self.modules_repo.repo_path) + install_folder = Path(self.dir, self.component_type, repo_path) # Compute the component directory component_dir = Path(install_folder, component) # Check that the component is not already installed component_not_installed = self.check_component_installed( - component, current_version, component_dir, self.modules_repo, self.force, self.prompt, silent + component, current_version, component_dir, ModulesRepo(remote_url), self.force, self.prompt, silent ) if not component_not_installed: log.debug( f"{self.component_type[:-1].title()} is already installed and force is not set.\nAdding the new installation source {self.installed_by} for {self.component_type[:-1]} {component} to 'modules.json' without installing the {self.component_type}." ) modules_json.load() - modules_json.update(self.component_type, self.modules_repo, component, current_version, self.installed_by) + modules_json.update( + self.component_type, ModulesRepo(remote_url), component, current_version, self.installed_by + ) return False - version = self.get_version(component, self.sha, self.prompt, current_version, self.modules_repo) + version = self.get_version(component, self.sha, self.prompt, current_version, ModulesRepo(remote_url)) if not version: return False # Remove component if force is set and component is installed install_track = None if self.force: - log.debug(f"Removing installed version of '{self.modules_repo.repo_path}/{component}'") + log.debug(f"Removing installed version of '{repo_path}/{component}'") self.clear_component_dir(component, component_dir) - install_track = self.clean_modules_json(component, self.modules_repo, modules_json) + install_track = self.clean_modules_json(component, ModulesRepo(remote_url), modules_json) if not silent: log.info(f"{'Rei' if self.force else 'I'}nstalling '{component}'") - log.debug( - f"Installing {self.component_type} '{component}' at modules hash {version} from {self.modules_repo.remote_url}" - ) + log.debug(f"Installing {self.component_type} '{component}' at modules hash {version} from {remote_url}") # Download component files - if not self.install_component_files(component, version, self.modules_repo, install_folder): + if not self.install_component_files(component, version, ModulesRepo(remote_url), install_folder): return False # Update module.json with newly installed subworkflow modules_json.load() modules_json.update( - self.component_type, self.modules_repo, component, version, self.installed_by, install_track + self.component_type, ModulesRepo(remote_url), component, version, self.installed_by, install_track ) if self.component_type == "subworkflows": diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index dc47843601..bab977006f 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1187,10 +1187,10 @@ def recreate_dependencies(self, repo, org, subworkflow): for dep_mod in dep_mods: if isinstance(dep_mod, dict): - component_name = list(dep_mod.keys())[0] - repo = dep_mod[component_name]["git_remote"] - org = dep_mod[component_name]["org_path"] - dep_mod = component_name + if dep_mod["git_remote"] is not None: + repo = dep_mod["git_remote"] + org = dep_mod["org_path"] + dep_mod = dep_mod["name"] installed_by = self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] if installed_by == ["modules"]: self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] = [] From 1737c88e1f73e82229329fdd0f0a972e6467d746 Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 15 Jul 2024 09:59:18 -0300 Subject: [PATCH 08/60] fix: Add correct return type --- nf_core/components/components_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 0697fcd15f..440e81ebd9 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -1,7 +1,7 @@ import logging import re from pathlib import Path -from typing import List, Optional, Tuple +from typing import Dict, List, Optional, Tuple import questionary import rich.prompt @@ -133,7 +133,7 @@ def prompt_component_version_sha( return git_sha -def get_components_to_install(subworkflow_dir: str) -> Tuple[List[dict], List[str]]: +def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, str | None]], List[str]]: """ Parse the subworkflow main.nf file to retrieve all imported modules and subworkflows. """ From f7aaea2a9b9b21af239f03eb7fa16b147a76e1b4 Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 15 Jul 2024 10:03:59 -0300 Subject: [PATCH 09/60] fix: Use any for values --- nf_core/components/components_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 440e81ebd9..6de6e2069b 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -1,7 +1,7 @@ import logging import re from pathlib import Path -from typing import Dict, List, Optional, Tuple +from typing import Any, Dict, List, Optional, Tuple import questionary import rich.prompt @@ -133,7 +133,7 @@ def prompt_component_version_sha( return git_sha -def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, str | None]], List[str]]: +def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, Any]], List[str]]: """ Parse the subworkflow main.nf file to retrieve all imported modules and subworkflows. """ From d1c490d864f45ed423e927fbc1a700263d97e902 Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 15 Jul 2024 10:22:44 -0300 Subject: [PATCH 10/60] test: Fix module key in across orgs --- tests/subworkflows/install.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/subworkflows/install.py b/tests/subworkflows/install.py index f872b1f7fb..614ff33845 100644 --- a/tests/subworkflows/install.py +++ b/tests/subworkflows/install.py @@ -8,7 +8,6 @@ from ..utils import ( CROSS_ORGANIZATION_URL, GITLAB_BRANCH_TEST_BRANCH, - GITLAB_DEFAULT_BRANCH, GITLAB_REPO, GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, @@ -83,13 +82,13 @@ def test_subworkflows_install_different_branch_fail(self): def test_subworkflows_install_across_organizations(self): """Test installing a subworkflow with modules from different organizations""" - install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=CROSS_ORGANIZATION_URL, branch=GITLAB_DEFAULT_BRANCH) + install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=CROSS_ORGANIZATION_URL) # The hic_bwamem2 subworkflow contains modules from different organizations install_obj.install("get_genome_annotation") # Verify that the installed_by entry was added correctly modules_json = ModulesJson(self.pipeline_dir) mod_json = modules_json.get_modules_json() - assert mod_json["repos"][CROSS_ORGANIZATION_URL]["modules"]["jvfe"]["prokka"]["installed_by"] == [ + assert mod_json["repos"][CROSS_ORGANIZATION_URL]["modules"]["jvfe"]["wget"]["installed_by"] == [ "get_genome_annotation" ] From 6fa5cf49722f203755e30ede69e58594d0370316 Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 15 Jul 2024 10:53:50 -0300 Subject: [PATCH 11/60] refact: Reset modules repo when git_remote not defined --- nf_core/components/install.py | 36 ++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index d76a65bf45..1d70ab5139 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -53,12 +53,12 @@ def install(self, component, silent=False): # Check modules directory structure self.check_modules_structure() - repo_path = self.modules_repo.repo_path - remote_url = self.modules_repo.remote_url if isinstance(component, dict): if component["git_remote"] is not None: - repo_path = component["org_path"] remote_url = component["git_remote"] + self.modules_repo = ModulesRepo(remote_url) + else: + self.modules_repo = ModulesRepo() component = component["name"] # Verify that 'modules.json' is consistent with the installed modules and subworkflows @@ -73,59 +73,61 @@ def install(self, component, silent=False): return False # Verify SHA - if not ModulesRepo(remote_url).verify_sha(self.prompt, self.sha): + if not self.modules_repo.verify_sha(self.prompt, self.sha): return False # Check and verify component name - component = self.collect_and_verify_name(component, ModulesRepo(remote_url)) + component = self.collect_and_verify_name(component, self.modules_repo) if not component: return False # Get current version - current_version = modules_json.get_component_version(self.component_type, component, remote_url, repo_path) + current_version = modules_json.get_component_version( + self.component_type, component, self.modules_repo.remote_url, self.modules_repo.repo_path + ) # Set the install folder based on the repository name - install_folder = Path(self.dir, self.component_type, repo_path) + install_folder = Path(self.dir, self.component_type, self.modules_repo.repo_path) # Compute the component directory component_dir = Path(install_folder, component) # Check that the component is not already installed component_not_installed = self.check_component_installed( - component, current_version, component_dir, ModulesRepo(remote_url), self.force, self.prompt, silent + component, current_version, component_dir, self.modules_repo, self.force, self.prompt, silent ) if not component_not_installed: log.debug( f"{self.component_type[:-1].title()} is already installed and force is not set.\nAdding the new installation source {self.installed_by} for {self.component_type[:-1]} {component} to 'modules.json' without installing the {self.component_type}." ) modules_json.load() - modules_json.update( - self.component_type, ModulesRepo(remote_url), component, current_version, self.installed_by - ) + modules_json.update(self.component_type, self.modules_repo, component, current_version, self.installed_by) return False - version = self.get_version(component, self.sha, self.prompt, current_version, ModulesRepo(remote_url)) + version = self.get_version(component, self.sha, self.prompt, current_version, self.modules_repo) if not version: return False # Remove component if force is set and component is installed install_track = None if self.force: - log.debug(f"Removing installed version of '{repo_path}/{component}'") + log.debug(f"Removing installed version of '{self.modules_repo.repo_path}/{component}'") self.clear_component_dir(component, component_dir) - install_track = self.clean_modules_json(component, ModulesRepo(remote_url), modules_json) + install_track = self.clean_modules_json(component, self.modules_repo, modules_json) if not silent: log.info(f"{'Rei' if self.force else 'I'}nstalling '{component}'") - log.debug(f"Installing {self.component_type} '{component}' at modules hash {version} from {remote_url}") + log.debug( + f"Installing {self.component_type} '{component}' at modules hash {version} from {self.modules_repo.remote_url}" + ) # Download component files - if not self.install_component_files(component, version, ModulesRepo(remote_url), install_folder): + if not self.install_component_files(component, version, self.modules_repo, install_folder): return False # Update module.json with newly installed subworkflow modules_json.load() modules_json.update( - self.component_type, ModulesRepo(remote_url), component, version, self.installed_by, install_track + self.component_type, self.modules_repo, component, version, self.installed_by, install_track ) if self.component_type == "subworkflows": From d9032820a2b2fc9a09757a8686ba32bd33fe1e28 Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 15 Jul 2024 11:40:29 -0300 Subject: [PATCH 12/60] refact: Copy parent attribute --- nf_core/components/install.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 1d70ab5139..3acde5f49f 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -33,6 +33,7 @@ def __init__( installed_by=False, ): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) + self.current_remote = remote_url self.force = force self.prompt = prompt self.sha = sha @@ -58,7 +59,7 @@ def install(self, component, silent=False): remote_url = component["git_remote"] self.modules_repo = ModulesRepo(remote_url) else: - self.modules_repo = ModulesRepo() + self.modules_repo = ModulesRepo(self.current_remote) component = component["name"] # Verify that 'modules.json' is consistent with the installed modules and subworkflows From 7f095fac8c40ad203fe6bcc803f34d46c15515a9 Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 15 Jul 2024 14:32:13 -0300 Subject: [PATCH 13/60] refact: Keep old strategy as fallback --- nf_core/components/components_utils.py | 41 +++++++++++++++----------- 1 file changed, 24 insertions(+), 17 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 6de6e2069b..5dec79ef0f 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -147,22 +147,29 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, Any] match = regex.match(line) if match and len(match.groups()) == 2: name, link = match.groups() - if link.startswith("../") and not link.startswith("../../"): + if link.startswith("../../../"): + name_split = name.lower().split("_") + modules.append({"name": "/".join(name_split), "org_path": None, "git_remote": None}) + elif link.startswith("../"): subworkflows.append(name.lower()) - with open(Path(subworkflow_dir, "meta.yml")) as fh: - meta = yaml.safe_load(fh) - components = meta.get("components") - component_list = [] - for component in components: - if isinstance(component, str): - comp_dict = {"name": component, "org_path": None, "git_remote": None} - else: - name = list(component.keys())[0] - comp_dict = { - "name": name, - "org_path": component[name]["org_path"], - "git_remote": component[name]["git_remote"], - } - component_list.append(comp_dict) - modules.extend(component_list) + + if Path(subworkflow_dir, "meta.yml").exists(): + with open(Path(subworkflow_dir, "meta.yml")) as fh: + meta = yaml.safe_load(fh) + if "components" not in meta: + return modules, subworkflows + components = meta.get("components") + component_list = [] + for component in components: + if isinstance(component, str): + comp_dict = {"name": component, "org_path": None, "git_remote": None} + else: + name = list(component.keys())[0] + comp_dict = { + "name": name, + "org_path": component[name]["org_path"], + "git_remote": component[name]["git_remote"], + } + component_list.append(comp_dict) + modules.extend(component_list) return modules, subworkflows From 012e9d6d0e618dd3eef7aa9195c6663fd7482d6e Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 15 Jul 2024 15:46:08 -0300 Subject: [PATCH 14/60] refact: Check component not in subwf list --- nf_core/components/components_utils.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 5dec79ef0f..ef2fd5b16f 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -161,15 +161,16 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, Any] components = meta.get("components") component_list = [] for component in components: - if isinstance(component, str): - comp_dict = {"name": component, "org_path": None, "git_remote": None} - else: - name = list(component.keys())[0] - comp_dict = { - "name": name, - "org_path": component[name]["org_path"], - "git_remote": component[name]["git_remote"], - } - component_list.append(comp_dict) + if component not in subworkflows: + if isinstance(component, str): + comp_dict = {"name": component, "org_path": None, "git_remote": None} + else: + name = list(component.keys())[0] + comp_dict = { + "name": name, + "org_path": component[name]["org_path"], + "git_remote": component[name]["git_remote"], + } + component_list.append(comp_dict) modules.extend(component_list) return modules, subworkflows From d77fccb85ae7f6d49224b96a75648425a2518bd9 Mon Sep 17 00:00:00 2001 From: jvfe Date: Wed, 24 Jul 2024 10:13:23 -0300 Subject: [PATCH 15/60] refact: Change return type to optional --- nf_core/components/components_utils.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index ef2fd5b16f..1fbce9f03d 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -1,7 +1,7 @@ import logging import re from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple +from typing import Dict, List, Optional, Tuple import questionary import rich.prompt @@ -133,7 +133,7 @@ def prompt_component_version_sha( return git_sha -def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, Any]], List[str]]: +def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, Optional[str]]], List[str]]: """ Parse the subworkflow main.nf file to retrieve all imported modules and subworkflows. """ @@ -154,6 +154,7 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, Any] subworkflows.append(name.lower()) if Path(subworkflow_dir, "meta.yml").exists(): + modules = [] with open(Path(subworkflow_dir, "meta.yml")) as fh: meta = yaml.safe_load(fh) if "components" not in meta: From faa7faf3051d0b5d9ce4fc8f543dba30d5a8c3dd Mon Sep 17 00:00:00 2001 From: jvfe Date: Wed, 24 Jul 2024 10:33:58 -0300 Subject: [PATCH 16/60] refact: Change way of handling dicts in modulesjson --- nf_core/modules/modules_json.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index bab977006f..c68cecf9db 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1187,11 +1187,17 @@ def recreate_dependencies(self, repo, org, subworkflow): for dep_mod in dep_mods: if isinstance(dep_mod, dict): + name = dep_mod["name"] if dep_mod["git_remote"] is not None: - repo = dep_mod["git_remote"] - org = dep_mod["org_path"] - dep_mod = dep_mod["name"] - installed_by = self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] + current_repo = dep_mod["git_remote"] + current_org = dep_mod["org_path"] + installed_by = self.modules_json["repos"][current_repo]["modules"][current_org][name][ + "installed_by" + ] + else: + installed_by = self.modules_json["repos"][repo]["modules"][org][name]["installed_by"] + else: + installed_by = self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] if installed_by == ["modules"]: self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] = [] if subworkflow not in installed_by: From 0f08bbe73b5ad4ce0629490bed660a29cee1f669 Mon Sep 17 00:00:00 2001 From: jvfe Date: Wed, 24 Jul 2024 11:13:39 -0300 Subject: [PATCH 17/60] refact: Handle dicts in meta_yml lint --- nf_core/subworkflows/lint/meta_yml.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nf_core/subworkflows/lint/meta_yml.py b/nf_core/subworkflows/lint/meta_yml.py index 24e75eddbf..9dbe517ec2 100644 --- a/nf_core/subworkflows/lint/meta_yml.py +++ b/nf_core/subworkflows/lint/meta_yml.py @@ -93,6 +93,9 @@ def meta_yml(subworkflow_lint_object, subworkflow): included_components = ( included_components[0] + included_components[1] ) # join included modules and included subworkflows in a single list + included_components = [ + component["name"] if isinstance(component, dict) else component for component in included_components + ] if "components" in meta_yaml: meta_components = [x for x in meta_yaml["components"]] for component in set(included_components): From 6d052de0cae90cd0b3c086e3e6042dc4894a15f5 Mon Sep 17 00:00:00 2001 From: jvfe Date: Sun, 28 Jul 2024 11:23:43 -0300 Subject: [PATCH 18/60] fix: Pass branch in install too --- nf_core/components/install.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 3acde5f49f..6fbc9d0a9d 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -34,6 +34,7 @@ def __init__( ): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.current_remote = remote_url + self.branch = branch self.force = force self.prompt = prompt self.sha = sha @@ -57,9 +58,9 @@ def install(self, component, silent=False): if isinstance(component, dict): if component["git_remote"] is not None: remote_url = component["git_remote"] - self.modules_repo = ModulesRepo(remote_url) + self.modules_repo = ModulesRepo(remote_url, self.branch) else: - self.modules_repo = ModulesRepo(self.current_remote) + self.modules_repo = ModulesRepo(self.current_remote, self.branch) component = component["name"] # Verify that 'modules.json' is consistent with the installed modules and subworkflows From c6ce67f00bc5a45493ec435f53d82e1bdc475adf Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 5 Aug 2024 10:13:22 -0300 Subject: [PATCH 19/60] fix: Check if module in meta.yml is imported --- nf_core/components/components_utils.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 1fbce9f03d..3c62e0ab24 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -154,7 +154,6 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, Opti subworkflows.append(name.lower()) if Path(subworkflow_dir, "meta.yml").exists(): - modules = [] with open(Path(subworkflow_dir, "meta.yml")) as fh: meta = yaml.safe_load(fh) if "components" not in meta: @@ -162,7 +161,7 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, Opti components = meta.get("components") component_list = [] for component in components: - if component not in subworkflows: + if component not in subworkflows and component in [d["name"] for d in modules]: if isinstance(component, str): comp_dict = {"name": component, "org_path": None, "git_remote": None} else: @@ -173,5 +172,5 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, Opti "git_remote": component[name]["git_remote"], } component_list.append(comp_dict) - modules.extend(component_list) + modules = component_list return modules, subworkflows From fca6f279ba61fa6a441017cef21621c8bc8ff771 Mon Sep 17 00:00:00 2001 From: jvfe Date: Sat, 10 Aug 2024 14:45:22 -0300 Subject: [PATCH 20/60] refact: Define org path based on git remote --- nf_core/components/components_utils.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 3c62e0ab24..892ddb53db 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -166,10 +166,14 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, Opti comp_dict = {"name": component, "org_path": None, "git_remote": None} else: name = list(component.keys())[0] + git_remote = component[name]["git_remote"] + match_name = re.match(r"(?:https://|git@)[\w\.]+[:/](.*?)/", git_remote) + if match_name is not None: + org_path = match_name.group(1) comp_dict = { "name": name, - "org_path": component[name]["org_path"], - "git_remote": component[name]["git_remote"], + "org_path": org_path, + "git_remote": git_remote, } component_list.append(comp_dict) modules = component_list From 092bf912d33642b061b23b5328909d7a98657aef Mon Sep 17 00:00:00 2001 From: jvfe Date: Sat, 10 Aug 2024 15:08:38 -0300 Subject: [PATCH 21/60] feat: Allow defining branches in meta.yml --- nf_core/components/components_utils.py | 1 + nf_core/components/install.py | 3 ++- tests/subworkflows/install.py | 5 ++++- tests/utils.py | 1 + 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 892ddb53db..d24291fd95 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -174,6 +174,7 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, Opti "name": name, "org_path": org_path, "git_remote": git_remote, + "branch": component[name].get("branch", "master"), } component_list.append(comp_dict) modules = component_list diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 6fbc9d0a9d..2417eddfe3 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -58,7 +58,8 @@ def install(self, component, silent=False): if isinstance(component, dict): if component["git_remote"] is not None: remote_url = component["git_remote"] - self.modules_repo = ModulesRepo(remote_url, self.branch) + branch = component["branch"] + self.modules_repo = ModulesRepo(remote_url, branch) else: self.modules_repo = ModulesRepo(self.current_remote, self.branch) component = component["name"] diff --git a/tests/subworkflows/install.py b/tests/subworkflows/install.py index 614ff33845..45179e9e57 100644 --- a/tests/subworkflows/install.py +++ b/tests/subworkflows/install.py @@ -6,6 +6,7 @@ from nf_core.subworkflows.install import SubworkflowInstall from ..utils import ( + CROSS_ORGANIZATION_BRANCH, CROSS_ORGANIZATION_URL, GITLAB_BRANCH_TEST_BRANCH, GITLAB_REPO, @@ -82,7 +83,9 @@ def test_subworkflows_install_different_branch_fail(self): def test_subworkflows_install_across_organizations(self): """Test installing a subworkflow with modules from different organizations""" - install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=CROSS_ORGANIZATION_URL) + install_obj = SubworkflowInstall( + self.pipeline_dir, remote_url=CROSS_ORGANIZATION_URL, branch=CROSS_ORGANIZATION_BRANCH + ) # The hic_bwamem2 subworkflow contains modules from different organizations install_obj.install("get_genome_annotation") # Verify that the installed_by entry was added correctly diff --git a/tests/utils.py b/tests/utils.py index 30dd1ab224..6cd7ed535f 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -17,6 +17,7 @@ OLD_TRIMGALORE_BRANCH = "mimic-old-trimgalore" GITLAB_URL = "https://gitlab.com/nf-core/modules-test.git" CROSS_ORGANIZATION_URL = "https://github.com/jvfe/test-subworkflow-remote.git" +CROSS_ORGANIZATION_BRANCH = "main" GITLAB_REPO = "nf-core-test" GITLAB_DEFAULT_BRANCH = "main" GITLAB_SUBWORKFLOWS_BRANCH = "subworkflows" From 208d796c686cd2f944d7cfc7d368b17ed0cf37b4 Mon Sep 17 00:00:00 2001 From: jvfe Date: Sat, 10 Aug 2024 15:15:43 -0300 Subject: [PATCH 22/60] fix: Add empty branch in other dicts --- nf_core/components/components_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index d24291fd95..4a0f41b6b6 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -149,7 +149,7 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, Opti name, link = match.groups() if link.startswith("../../../"): name_split = name.lower().split("_") - modules.append({"name": "/".join(name_split), "org_path": None, "git_remote": None}) + modules.append({"name": "/".join(name_split), "org_path": None, "git_remote": None, "branch": None}) elif link.startswith("../"): subworkflows.append(name.lower()) @@ -163,7 +163,7 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, Opti for component in components: if component not in subworkflows and component in [d["name"] for d in modules]: if isinstance(component, str): - comp_dict = {"name": component, "org_path": None, "git_remote": None} + comp_dict = {"name": component, "org_path": None, "git_remote": None, "branch": None} else: name = list(component.keys())[0] git_remote = component[name]["git_remote"] From 71c43bee6625428ebe98bf60f24239233fc0cea7 Mon Sep 17 00:00:00 2001 From: jvfe Date: Sun, 11 Aug 2024 21:52:11 -0300 Subject: [PATCH 23/60] refact: Rework logic to use subwfs as well --- nf_core/components/components_utils.py | 62 ++++++++++++++++---------- 1 file changed, 38 insertions(+), 24 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 4a0f41b6b6..411c18afe8 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -133,7 +133,9 @@ def prompt_component_version_sha( return git_sha -def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, Optional[str]]], List[str]]: +def get_components_to_install( + subworkflow_dir: str, +) -> Tuple[List[Dict[str, Optional[str]]], List[Dict[str, Optional[str]]]]: """ Parse the subworkflow main.nf file to retrieve all imported modules and subworkflows. """ @@ -149,33 +151,45 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[Dict[str, Opti name, link = match.groups() if link.startswith("../../../"): name_split = name.lower().split("_") - modules.append({"name": "/".join(name_split), "org_path": None, "git_remote": None, "branch": None}) + component_dict = { + "name": "/".join(name_split), + "org_path": None, + "git_remote": None, + "branch": None, + } + modules.append(component_dict) elif link.startswith("../"): - subworkflows.append(name.lower()) + component_dict = {"name": name.lower(), "org_path": None, "git_remote": None, "branch": None} + subworkflows.append(component_dict) if Path(subworkflow_dir, "meta.yml").exists(): with open(Path(subworkflow_dir, "meta.yml")) as fh: meta = yaml.safe_load(fh) - if "components" not in meta: - return modules, subworkflows - components = meta.get("components") - component_list = [] + components = meta.get("components", []) + new_module_list = [] + new_subwf_list = [] + for component in components: - if component not in subworkflows and component in [d["name"] for d in modules]: - if isinstance(component, str): - comp_dict = {"name": component, "org_path": None, "git_remote": None, "branch": None} - else: - name = list(component.keys())[0] - git_remote = component[name]["git_remote"] - match_name = re.match(r"(?:https://|git@)[\w\.]+[:/](.*?)/", git_remote) - if match_name is not None: - org_path = match_name.group(1) - comp_dict = { - "name": name, - "org_path": org_path, - "git_remote": git_remote, - "branch": component[name].get("branch", "master"), - } - component_list.append(comp_dict) - modules = component_list + if isinstance(component, str): + component_dict = {"name": component, "org_path": None, "git_remote": None, "branch": None} + else: + name = list(component.keys())[0] + git_remote = component[name]["git_remote"] + org_path_match = re.search(r"(?:https://|git@)[\w\.]+[:/](.*?)/", git_remote) + org_path = org_path_match.group(1) if org_path_match else None + component_dict = { + "name": name.lower(), + "org_path": org_path, + "git_remote": git_remote, + "branch": component[name].get("branch", "master"), + } + + if component_dict["name"] in [sw["name"] for sw in subworkflows]: + new_subwf_list.append(component_dict) + else: + new_module_list.append(component_dict) + + modules = new_module_list + subworkflows = new_subwf_list + return modules, subworkflows From 8bcf7374a8015a7aee0cc77fcd45c056b452b203 Mon Sep 17 00:00:00 2001 From: jvfe Date: Sun, 11 Aug 2024 22:14:00 -0300 Subject: [PATCH 24/60] refact: Support subwf dict in recreate deps --- nf_core/modules/modules_json.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 4436fab605..ba36d80632 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1205,7 +1205,16 @@ def recreate_dependencies(self, repo, org, subworkflow): self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"].append(subworkflow) for dep_subwf in dep_subwfs: - installed_by = self.modules_json["repos"][repo]["subworkflows"][org][dep_subwf]["installed_by"] + if isinstance(dep_subwf, dict): + name = dep_subwf["name"] + if dep_subwf["git_remote"] is not None: + current_repo = dep_subwf["git_remote"] + current_org = dep_subwf["org_path"] + installed_by = self.modules_json["repos"][current_repo]["modules"][current_org][name][ + "installed_by" + ] + else: + installed_by = self.modules_json["repos"][repo]["modules"][org][name]["installed_by"] if installed_by == ["subworkflows"]: self.modules_json["repos"][repo]["subworkflows"][org][dep_subwf]["installed_by"] = [] if subworkflow not in installed_by: From efde7b76822a2393bddae0a92c389f96382223bf Mon Sep 17 00:00:00 2001 From: jvfe Date: Sun, 11 Aug 2024 22:26:10 -0300 Subject: [PATCH 25/60] fix: Change modules to subwfs --- nf_core/modules/modules_json.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index ba36d80632..7c40d0f86c 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1210,11 +1210,11 @@ def recreate_dependencies(self, repo, org, subworkflow): if dep_subwf["git_remote"] is not None: current_repo = dep_subwf["git_remote"] current_org = dep_subwf["org_path"] - installed_by = self.modules_json["repos"][current_repo]["modules"][current_org][name][ + installed_by = self.modules_json["repos"][current_repo]["subworkflows"][current_org][name][ "installed_by" ] else: - installed_by = self.modules_json["repos"][repo]["modules"][org][name]["installed_by"] + installed_by = self.modules_json["repos"][repo]["subworkflows"][org][name]["installed_by"] if installed_by == ["subworkflows"]: self.modules_json["repos"][repo]["subworkflows"][org][dep_subwf]["installed_by"] = [] if subworkflow not in installed_by: From 8e255875a6802f3800d52809221d29acbcb87a0a Mon Sep 17 00:00:00 2001 From: jvfe Date: Sun, 11 Aug 2024 22:43:48 -0300 Subject: [PATCH 26/60] fix: Use name value in recreate deps --- nf_core/modules/modules_json.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 7c40d0f86c..57ea70d3bb 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1183,7 +1183,7 @@ def recreate_dependencies(self, repo, org, subworkflow): i.e., no module or subworkflow has been installed by the user in the meantime """ - sw_path = Path(self.subworkflows_dir, org, subworkflow) + sw_path = Path(self.subworkflows_dir, org, subworkflow["name"]) dep_mods, dep_subwfs = get_components_to_install(sw_path) for dep_mod in dep_mods: From 494791368b4cc6acafb81062d73ff61f9116e150 Mon Sep 17 00:00:00 2001 From: jvfe Date: Sun, 11 Aug 2024 22:48:48 -0300 Subject: [PATCH 27/60] Revert "fix: Use name value in recreate deps" This reverts commit 8e255875a6802f3800d52809221d29acbcb87a0a. --- nf_core/modules/modules_json.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 57ea70d3bb..7c40d0f86c 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1183,7 +1183,7 @@ def recreate_dependencies(self, repo, org, subworkflow): i.e., no module or subworkflow has been installed by the user in the meantime """ - sw_path = Path(self.subworkflows_dir, org, subworkflow["name"]) + sw_path = Path(self.subworkflows_dir, org, subworkflow) dep_mods, dep_subwfs = get_components_to_install(sw_path) for dep_mod in dep_mods: From 4888d3251982337ba31a4b73de960cac7a3d8f3b Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 12 Aug 2024 08:12:22 -0300 Subject: [PATCH 28/60] fix: Use subworkflow name in recreate deps --- nf_core/modules/modules_json.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 7c40d0f86c..fce8c630c0 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1183,7 +1183,8 @@ def recreate_dependencies(self, repo, org, subworkflow): i.e., no module or subworkflow has been installed by the user in the meantime """ - sw_path = Path(self.subworkflows_dir, org, subworkflow) + sw_name = subworkflow["name"] if isinstance(subworkflow, dict) else subworkflow + sw_path = Path(self.subworkflows_dir, org, sw_name) dep_mods, dep_subwfs = get_components_to_install(sw_path) for dep_mod in dep_mods: From aa265d8fa0626b135b72be1db6b1cde7b3bbbeea Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 12 Aug 2024 08:27:12 -0300 Subject: [PATCH 29/60] fix: Use sw_name in appends too --- nf_core/modules/modules_json.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index fce8c630c0..9e8e975fe5 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1202,8 +1202,8 @@ def recreate_dependencies(self, repo, org, subworkflow): installed_by = self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] if installed_by == ["modules"]: self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] = [] - if subworkflow not in installed_by: - self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"].append(subworkflow) + if sw_name not in installed_by: + self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"].append(sw_name) for dep_subwf in dep_subwfs: if isinstance(dep_subwf, dict): @@ -1218,6 +1218,6 @@ def recreate_dependencies(self, repo, org, subworkflow): installed_by = self.modules_json["repos"][repo]["subworkflows"][org][name]["installed_by"] if installed_by == ["subworkflows"]: self.modules_json["repos"][repo]["subworkflows"][org][dep_subwf]["installed_by"] = [] - if subworkflow not in installed_by: - self.modules_json["repos"][repo]["subworkflows"][org][dep_subwf]["installed_by"].append(subworkflow) + if sw_name not in installed_by: + self.modules_json["repos"][repo]["subworkflows"][org][dep_subwf]["installed_by"].append(sw_name) self.recreate_dependencies(repo, org, dep_subwf) From a764c76092fa7311378491d4f868a0e7fc290e6e Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 12 Aug 2024 20:53:35 -0300 Subject: [PATCH 30/60] fix: Only add module if it's in main.nf too --- nf_core/components/components_utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 411c18afe8..4faa9f149f 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -187,7 +187,8 @@ def get_components_to_install( if component_dict["name"] in [sw["name"] for sw in subworkflows]: new_subwf_list.append(component_dict) else: - new_module_list.append(component_dict) + if component_dict["name"] in [m["name"] for m in modules]: + new_module_list.append(component_dict) modules = new_module_list subworkflows = new_subwf_list From 869d8165be959be82fcefe808ccfe75cfe09f8e2 Mon Sep 17 00:00:00 2001 From: jvfe Date: Tue, 13 Aug 2024 09:05:54 -0300 Subject: [PATCH 31/60] fix: Handle incomplete meta.yml --- nf_core/components/components_utils.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 4faa9f149f..eab9390321 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -165,7 +165,9 @@ def get_components_to_install( if Path(subworkflow_dir, "meta.yml").exists(): with open(Path(subworkflow_dir, "meta.yml")) as fh: meta = yaml.safe_load(fh) - components = meta.get("components", []) + if "components" not in meta: + return modules, subworkflows + components = meta.get("components") new_module_list = [] new_subwf_list = [] From c3ac1b8ab5da6c4c98fee9804575b15ea6b18554 Mon Sep 17 00:00:00 2001 From: jvfe Date: Tue, 13 Aug 2024 11:23:43 -0300 Subject: [PATCH 32/60] refact: Remove isinstance check in lint/meta.yml --- nf_core/subworkflows/lint/meta_yml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/subworkflows/lint/meta_yml.py b/nf_core/subworkflows/lint/meta_yml.py index 9dbe517ec2..9b42b19957 100644 --- a/nf_core/subworkflows/lint/meta_yml.py +++ b/nf_core/subworkflows/lint/meta_yml.py @@ -94,7 +94,7 @@ def meta_yml(subworkflow_lint_object, subworkflow): included_components[0] + included_components[1] ) # join included modules and included subworkflows in a single list included_components = [ - component["name"] if isinstance(component, dict) else component for component in included_components + component["name"] for component in included_components ] if "components" in meta_yaml: meta_components = [x for x in meta_yaml["components"]] From 4eb95e6c6ce3eeba0bf519e4a49ef3174c0b3d7f Mon Sep 17 00:00:00 2001 From: jvfe Date: Tue, 13 Aug 2024 11:28:21 -0300 Subject: [PATCH 33/60] style: Format meta_yml.py --- nf_core/subworkflows/lint/meta_yml.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/nf_core/subworkflows/lint/meta_yml.py b/nf_core/subworkflows/lint/meta_yml.py index 9b42b19957..9c96df7563 100644 --- a/nf_core/subworkflows/lint/meta_yml.py +++ b/nf_core/subworkflows/lint/meta_yml.py @@ -93,9 +93,7 @@ def meta_yml(subworkflow_lint_object, subworkflow): included_components = ( included_components[0] + included_components[1] ) # join included modules and included subworkflows in a single list - included_components = [ - component["name"] for component in included_components - ] + included_components = [component["name"] for component in included_components] if "components" in meta_yaml: meta_components = [x for x in meta_yaml["components"]] for component in set(included_components): From e9bf23830492483e4ef9b438603dd60cf5ac8af0 Mon Sep 17 00:00:00 2001 From: jvfe Date: Tue, 13 Aug 2024 11:33:04 -0300 Subject: [PATCH 34/60] refact: Remove isinstance check in components/install.py --- nf_core/components/install.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 2417eddfe3..b151ab427d 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -44,8 +44,10 @@ def __init__( self.installed_by = self.component_type def install(self, component, silent=False): + component_name = component["name"] + if self.repo_type == "modules": - log.error(f"You cannot install a {component} in a clone of nf-core/modules") + log.error(f"You cannot install a {component_name} in a clone of nf-core/modules") return False # Check whether pipelines is valid if not self.has_valid_directory(): @@ -55,14 +57,12 @@ def install(self, component, silent=False): # Check modules directory structure self.check_modules_structure() - if isinstance(component, dict): - if component["git_remote"] is not None: - remote_url = component["git_remote"] - branch = component["branch"] - self.modules_repo = ModulesRepo(remote_url, branch) - else: - self.modules_repo = ModulesRepo(self.current_remote, self.branch) - component = component["name"] + if component["git_remote"] is not None: + remote_url = component["git_remote"] + branch = component["branch"] + self.modules_repo = ModulesRepo(remote_url, branch) + else: + self.modules_repo = ModulesRepo(self.current_remote, self.branch) # Verify that 'modules.json' is consistent with the installed modules and subworkflows modules_json = ModulesJson(self.dir) @@ -80,7 +80,7 @@ def install(self, component, silent=False): return False # Check and verify component name - component = self.collect_and_verify_name(component, self.modules_repo) + component = self.collect_and_verify_name(component_name, self.modules_repo) if not component: return False From c8b8a83f40106ad76a1699a5f3f50880f8a9aec3 Mon Sep 17 00:00:00 2001 From: jvfe Date: Tue, 13 Aug 2024 12:46:16 -0300 Subject: [PATCH 35/60] Revert "refact: Remove isinstance check in components/install.py" This reverts commit e9bf23830492483e4ef9b438603dd60cf5ac8af0. --- nf_core/components/install.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index b151ab427d..2417eddfe3 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -44,10 +44,8 @@ def __init__( self.installed_by = self.component_type def install(self, component, silent=False): - component_name = component["name"] - if self.repo_type == "modules": - log.error(f"You cannot install a {component_name} in a clone of nf-core/modules") + log.error(f"You cannot install a {component} in a clone of nf-core/modules") return False # Check whether pipelines is valid if not self.has_valid_directory(): @@ -57,12 +55,14 @@ def install(self, component, silent=False): # Check modules directory structure self.check_modules_structure() - if component["git_remote"] is not None: - remote_url = component["git_remote"] - branch = component["branch"] - self.modules_repo = ModulesRepo(remote_url, branch) - else: - self.modules_repo = ModulesRepo(self.current_remote, self.branch) + if isinstance(component, dict): + if component["git_remote"] is not None: + remote_url = component["git_remote"] + branch = component["branch"] + self.modules_repo = ModulesRepo(remote_url, branch) + else: + self.modules_repo = ModulesRepo(self.current_remote, self.branch) + component = component["name"] # Verify that 'modules.json' is consistent with the installed modules and subworkflows modules_json = ModulesJson(self.dir) @@ -80,7 +80,7 @@ def install(self, component, silent=False): return False # Check and verify component name - component = self.collect_and_verify_name(component_name, self.modules_repo) + component = self.collect_and_verify_name(component, self.modules_repo) if not component: return False From eba839167edfb4de42dd172cb872893b700855d0 Mon Sep 17 00:00:00 2001 From: jvfe Date: Tue, 13 Aug 2024 16:06:09 -0300 Subject: [PATCH 36/60] refact: Remove isinstance check in recreate_deps --- nf_core/modules/modules_json.py | 36 ++++++++++++++------------------- 1 file changed, 15 insertions(+), 21 deletions(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 9e8e975fe5..b2c2afe897 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1188,34 +1188,28 @@ def recreate_dependencies(self, repo, org, subworkflow): dep_mods, dep_subwfs = get_components_to_install(sw_path) for dep_mod in dep_mods: - if isinstance(dep_mod, dict): - name = dep_mod["name"] - if dep_mod["git_remote"] is not None: - current_repo = dep_mod["git_remote"] - current_org = dep_mod["org_path"] - installed_by = self.modules_json["repos"][current_repo]["modules"][current_org][name][ - "installed_by" - ] - else: - installed_by = self.modules_json["repos"][repo]["modules"][org][name]["installed_by"] + name = dep_mod["name"] + if dep_mod["git_remote"] is not None: + current_repo = dep_mod["git_remote"] + current_org = dep_mod["org_path"] + installed_by = self.modules_json["repos"][current_repo]["modules"][current_org][name]["installed_by"] else: - installed_by = self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] + installed_by = self.modules_json["repos"][repo]["modules"][org][name]["installed_by"] if installed_by == ["modules"]: self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] = [] if sw_name not in installed_by: self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"].append(sw_name) for dep_subwf in dep_subwfs: - if isinstance(dep_subwf, dict): - name = dep_subwf["name"] - if dep_subwf["git_remote"] is not None: - current_repo = dep_subwf["git_remote"] - current_org = dep_subwf["org_path"] - installed_by = self.modules_json["repos"][current_repo]["subworkflows"][current_org][name][ - "installed_by" - ] - else: - installed_by = self.modules_json["repos"][repo]["subworkflows"][org][name]["installed_by"] + name = dep_subwf["name"] + if dep_subwf["git_remote"] is not None: + current_repo = dep_subwf["git_remote"] + current_org = dep_subwf["org_path"] + installed_by = self.modules_json["repos"][current_repo]["subworkflows"][current_org][name][ + "installed_by" + ] + else: + installed_by = self.modules_json["repos"][repo]["subworkflows"][org][name]["installed_by"] if installed_by == ["subworkflows"]: self.modules_json["repos"][repo]["subworkflows"][org][dep_subwf]["installed_by"] = [] if sw_name not in installed_by: From c72b94a3e6db1e06b1330c5e2da016382bada85b Mon Sep 17 00:00:00 2001 From: jvfe Date: Thu, 15 Aug 2024 14:06:16 -0300 Subject: [PATCH 37/60] refact: Change function structure to use dicts not lists --- nf_core/components/components_utils.py | 65 +++++++++++--------------- 1 file changed, 28 insertions(+), 37 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index d327d28af3..edd560b616 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -149,8 +149,8 @@ def get_components_to_install( """ Parse the subworkflow main.nf file to retrieve all imported modules and subworkflows. """ - modules = [] - subworkflows = [] + modules = {} + subworkflows = {} with open(Path(subworkflow_dir, "main.nf")) as fh: for line in fh: regex = re.compile( @@ -161,48 +161,39 @@ def get_components_to_install( name, link = match.groups() if link.startswith("../../../"): name_split = name.lower().split("_") + component_name = "/".join(name_split) component_dict = { - "name": "/".join(name_split), + "name": component_name, "org_path": None, "git_remote": None, "branch": None, } - modules.append(component_dict) + modules[component_dict[component_name]] = component_dict elif link.startswith("../"): - component_dict = {"name": name.lower(), "org_path": None, "git_remote": None, "branch": None} - subworkflows.append(component_dict) + component_name = name.lower() + component_dict = {"name": component_name, "org_path": None, "git_remote": None, "branch": None} + subworkflows[component_dict[component_name]] = component_dict if Path(subworkflow_dir, "meta.yml").exists(): with open(Path(subworkflow_dir, "meta.yml")) as fh: meta = yaml.safe_load(fh) - if "components" not in meta: - return modules, subworkflows - components = meta.get("components") - new_module_list = [] - new_subwf_list = [] - - for component in components: - if isinstance(component, str): - component_dict = {"name": component, "org_path": None, "git_remote": None, "branch": None} - else: - name = list(component.keys())[0] - git_remote = component[name]["git_remote"] - org_path_match = re.search(r"(?:https://|git@)[\w\.]+[:/](.*?)/", git_remote) - org_path = org_path_match.group(1) if org_path_match else None - component_dict = { - "name": name.lower(), - "org_path": org_path, - "git_remote": git_remote, - "branch": component[name].get("branch", "master"), - } - - if component_dict["name"] in [sw["name"] for sw in subworkflows]: - new_subwf_list.append(component_dict) - else: - if component_dict["name"] in [m["name"] for m in modules]: - new_module_list.append(component_dict) - - modules = new_module_list - subworkflows = new_subwf_list - - return modules, subworkflows + if "components" in meta: + components = meta.get("components") + for component in components: + if isinstance(component, dict): + component_name = list(component.keys())[0].lower() + git_remote = component[component_name]["git_remote"] + org_path_match = re.search(r"(?:https://|git@)[\w\.]+[:/](.*?)/", git_remote) + org_path = org_path_match.group(1) if org_path_match else None + current_comp_dict = subworkflows if component_name in subworkflows else modules + + component_dict = { + "name": component_name, + "org_path": org_path, + "git_remote": git_remote, + "branch": component[component_name].get("branch", "master"), + } + + current_comp_dict[component_dict[component_name]].update(component_dict) + + return list(modules.values()), list(subworkflows.values()) From bb31d78fd4a120d1df06d5933bd69b7d771de803 Mon Sep 17 00:00:00 2001 From: jvfe Date: Thu, 15 Aug 2024 14:46:33 -0300 Subject: [PATCH 38/60] fix: Change access key in inner dicts --- nf_core/components/components_utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index edd560b616..0f02305077 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -168,11 +168,11 @@ def get_components_to_install( "git_remote": None, "branch": None, } - modules[component_dict[component_name]] = component_dict + modules[component_dict["name"]] = component_dict elif link.startswith("../"): component_name = name.lower() component_dict = {"name": component_name, "org_path": None, "git_remote": None, "branch": None} - subworkflows[component_dict[component_name]] = component_dict + subworkflows[component_dict["name"]] = component_dict if Path(subworkflow_dir, "meta.yml").exists(): with open(Path(subworkflow_dir, "meta.yml")) as fh: @@ -194,6 +194,6 @@ def get_components_to_install( "branch": component[component_name].get("branch", "master"), } - current_comp_dict[component_dict[component_name]].update(component_dict) + current_comp_dict[component_dict["name"]].update(component_dict) return list(modules.values()), list(subworkflows.values()) From 7f4cce83bb06c7de29edf603c0c9323e0a87a251 Mon Sep 17 00:00:00 2001 From: jvfe Date: Fri, 16 Aug 2024 11:19:52 -0300 Subject: [PATCH 39/60] refact: Use component_name every time --- nf_core/components/components_utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 0f02305077..13c1fe1e0c 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -168,11 +168,11 @@ def get_components_to_install( "git_remote": None, "branch": None, } - modules[component_dict["name"]] = component_dict + modules[component_name] = component_dict elif link.startswith("../"): component_name = name.lower() component_dict = {"name": component_name, "org_path": None, "git_remote": None, "branch": None} - subworkflows[component_dict["name"]] = component_dict + subworkflows[component_name] = component_dict if Path(subworkflow_dir, "meta.yml").exists(): with open(Path(subworkflow_dir, "meta.yml")) as fh: @@ -194,6 +194,6 @@ def get_components_to_install( "branch": component[component_name].get("branch", "master"), } - current_comp_dict[component_dict["name"]].update(component_dict) + current_comp_dict[component_name].update(component_dict) return list(modules.values()), list(subworkflows.values()) From e342678e99b020ab3a9b4f31c61b1c05a1858568 Mon Sep 17 00:00:00 2001 From: jvfe Date: Fri, 16 Aug 2024 11:20:09 -0300 Subject: [PATCH 40/60] refact: Don't default to master --- nf_core/components/components_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 13c1fe1e0c..c8abe2d785 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -191,7 +191,7 @@ def get_components_to_install( "name": component_name, "org_path": org_path, "git_remote": git_remote, - "branch": component[component_name].get("branch", "master"), + "branch": component[component_name].get("branch", None), } current_comp_dict[component_name].update(component_dict) From 50216c45590be387eda69b77ea8a09e499e1a91b Mon Sep 17 00:00:00 2001 From: jvfe Date: Tue, 20 Aug 2024 08:01:28 -0300 Subject: [PATCH 41/60] refact: Move instance check up --- nf_core/components/install.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index ca746098dc..b3ad7749ed 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -50,6 +50,15 @@ def __init__( self.installed_by = [self.component_type] def install(self, component: str, silent: bool = False) -> bool: + if isinstance(component, dict): + if component["git_remote"] is not None: + remote_url = component["git_remote"] + branch = component["branch"] + self.modules_repo = ModulesRepo(remote_url, branch) + else: + self.modules_repo = ModulesRepo(self.current_remote, self.branch) + component = component["name"] + if self.repo_type == "modules": log.error(f"You cannot install a {component} in a clone of nf-core/modules") return False @@ -61,15 +70,6 @@ def install(self, component: str, silent: bool = False) -> bool: # Check modules directory structure self.check_modules_structure() - if isinstance(component, dict): - if component["git_remote"] is not None: - remote_url = component["git_remote"] - branch = component["branch"] - self.modules_repo = ModulesRepo(remote_url, branch) - else: - self.modules_repo = ModulesRepo(self.current_remote, self.branch) - component = component["name"] - # Verify that 'modules.json' is consistent with the installed modules and subworkflows modules_json = ModulesJson(self.directory) if not silent: From 0e83d9afbbf461a20abc65b1350cb95e9dfd8bfd Mon Sep 17 00:00:00 2001 From: Matthieu Muffato Date: Tue, 20 Aug 2024 13:23:56 +0100 Subject: [PATCH 42/60] Refactoring of component_utils.py --- nf_core/components/components_utils.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index c8abe2d785..e2d47fe65a 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -162,17 +162,12 @@ def get_components_to_install( if link.startswith("../../../"): name_split = name.lower().split("_") component_name = "/".join(name_split) - component_dict = { - "name": component_name, - "org_path": None, - "git_remote": None, - "branch": None, - } - modules[component_name] = component_dict + current_comp_dict = modules elif link.startswith("../"): component_name = name.lower() - component_dict = {"name": component_name, "org_path": None, "git_remote": None, "branch": None} - subworkflows[component_name] = component_dict + current_comp_dict = subworkflows + component_dict = {"name": component_name, "org_path": None, "git_remote": None, "branch": None} + current_comp_dict[component_name] = component_dict if Path(subworkflow_dir, "meta.yml").exists(): with open(Path(subworkflow_dir, "meta.yml")) as fh: From f8c8251c44fd469a4d305459b7b4cc0c38ffe24e Mon Sep 17 00:00:00 2001 From: jvfe Date: Tue, 20 Aug 2024 20:53:15 -0300 Subject: [PATCH 43/60] Revert "Refactoring of component_utils.py" This reverts commit 0e83d9afbbf461a20abc65b1350cb95e9dfd8bfd. --- nf_core/components/components_utils.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index e2d47fe65a..c8abe2d785 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -162,12 +162,17 @@ def get_components_to_install( if link.startswith("../../../"): name_split = name.lower().split("_") component_name = "/".join(name_split) - current_comp_dict = modules + component_dict = { + "name": component_name, + "org_path": None, + "git_remote": None, + "branch": None, + } + modules[component_name] = component_dict elif link.startswith("../"): component_name = name.lower() - current_comp_dict = subworkflows - component_dict = {"name": component_name, "org_path": None, "git_remote": None, "branch": None} - current_comp_dict[component_name] = component_dict + component_dict = {"name": component_name, "org_path": None, "git_remote": None, "branch": None} + subworkflows[component_name] = component_dict if Path(subworkflow_dir, "meta.yml").exists(): with open(Path(subworkflow_dir, "meta.yml")) as fh: From 313853ec737a484b6d05b1ae31ad1f04bd7ff980 Mon Sep 17 00:00:00 2001 From: jvfe Date: Wed, 21 Aug 2024 09:47:46 -0300 Subject: [PATCH 44/60] refact: Use get in components/install.py --- nf_core/components/install.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index b3ad7749ed..be3d8a40f7 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -51,13 +51,13 @@ def __init__( def install(self, component: str, silent: bool = False) -> bool: if isinstance(component, dict): - if component["git_remote"] is not None: - remote_url = component["git_remote"] - branch = component["branch"] + if component.get("git_remote") is not None: + remote_url = component.get("git_remote") + branch = component.get("branch") self.modules_repo = ModulesRepo(remote_url, branch) else: self.modules_repo = ModulesRepo(self.current_remote, self.branch) - component = component["name"] + component = component.get("name") if self.repo_type == "modules": log.error(f"You cannot install a {component} in a clone of nf-core/modules") From e8e4da0ee0223b05324b8cefc9365e0e31be9ee9 Mon Sep 17 00:00:00 2001 From: jvfe Date: Wed, 21 Aug 2024 11:30:16 -0300 Subject: [PATCH 45/60] refact: Avoid redefining keys when possible --- nf_core/components/components_utils.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index c8abe2d785..78c1116085 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -149,8 +149,9 @@ def get_components_to_install( """ Parse the subworkflow main.nf file to retrieve all imported modules and subworkflows. """ - modules = {} - subworkflows = {} + modules: Dict[str, Dict[str, Optional[str]]] = {} + subworkflows: Dict[str, Dict[str, Optional[str]]] = {} + with open(Path(subworkflow_dir, "main.nf")) as fh: for line in fh: regex = re.compile( @@ -162,16 +163,13 @@ def get_components_to_install( if link.startswith("../../../"): name_split = name.lower().split("_") component_name = "/".join(name_split) - component_dict = { + component_dict: Dict[str, Optional[str]] = { "name": component_name, - "org_path": None, - "git_remote": None, - "branch": None, } modules[component_name] = component_dict elif link.startswith("../"): component_name = name.lower() - component_dict = {"name": component_name, "org_path": None, "git_remote": None, "branch": None} + component_dict = {"name": component_name} subworkflows[component_name] = component_dict if Path(subworkflow_dir, "meta.yml").exists(): @@ -188,7 +186,6 @@ def get_components_to_install( current_comp_dict = subworkflows if component_name in subworkflows else modules component_dict = { - "name": component_name, "org_path": org_path, "git_remote": git_remote, "branch": component[component_name].get("branch", None), From f783d58866f8cbdfdfb795d94d259cbb41096264 Mon Sep 17 00:00:00 2001 From: jvfe Date: Wed, 21 Aug 2024 12:41:24 -0300 Subject: [PATCH 46/60] refact: Use get in modules_json --- nf_core/modules/modules_json.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index b94052c7f0..7c063b572c 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1255,9 +1255,9 @@ def recreate_dependencies(self, repo, org, subworkflow): assert self.modules_json is not None # mypy for dep_mod in dep_mods: name = dep_mod["name"] - if dep_mod["git_remote"] is not None: - current_repo = dep_mod["git_remote"] - current_org = dep_mod["org_path"] + if dep_mod.get("git_remote") is not None: + current_repo = dep_mod.get("git_remote", repo) + current_org = dep_mod.get("org_path", org) installed_by = self.modules_json["repos"][current_repo]["modules"][current_org][name]["installed_by"] else: installed_by = self.modules_json["repos"][repo]["modules"][org][name]["installed_by"] @@ -1268,9 +1268,9 @@ def recreate_dependencies(self, repo, org, subworkflow): for dep_subwf in dep_subwfs: name = dep_subwf["name"] - if dep_subwf["git_remote"] is not None: - current_repo = dep_subwf["git_remote"] - current_org = dep_subwf["org_path"] + if dep_subwf.get("git_remote") is not None: + current_repo = dep_subwf.get("git_remote", repo) + current_org = dep_subwf.get("org_path", org) installed_by = self.modules_json["repos"][current_repo]["subworkflows"][current_org][name][ "installed_by" ] From eb593089ea395635f178d5487402f3c8595cfa92 Mon Sep 17 00:00:00 2001 From: jvfe Date: Wed, 21 Aug 2024 13:00:04 -0300 Subject: [PATCH 47/60] refact: Use dictionary input in check_up_to_date --- nf_core/modules/modules_json.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 7c063b572c..6afa9b6ec4 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -674,7 +674,7 @@ def check_up_to_date(self): dump_modules_json = True for repo, subworkflows in subworkflows_dict.items(): for org, subworkflow in subworkflows: - self.recreate_dependencies(repo, org, subworkflow) + self.recreate_dependencies(repo, org, {"name": subworkflow}) self.pipeline_components = original_pipeline_components if dump_modules_json: @@ -1249,7 +1249,7 @@ def recreate_dependencies(self, repo, org, subworkflow): i.e., no module or subworkflow has been installed by the user in the meantime """ - sw_name = subworkflow["name"] if isinstance(subworkflow, dict) else subworkflow + sw_name = subworkflow["name"] sw_path = Path(self.subworkflows_dir, org, sw_name) dep_mods, dep_subwfs = get_components_to_install(sw_path) assert self.modules_json is not None # mypy From be4d78f807fbaf5ec0e1aed999220ad542c7f745 Mon Sep 17 00:00:00 2001 From: Matthieu Muffato Date: Wed, 21 Aug 2024 22:03:13 +0100 Subject: [PATCH 48/60] Use the power of get to skip if tests --- nf_core/components/components_utils.py | 4 ++-- nf_core/components/install.py | 11 ++++------- nf_core/modules/modules_json.py | 20 ++++++-------------- 3 files changed, 12 insertions(+), 23 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 78c1116085..44c1b7bec7 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -176,7 +176,7 @@ def get_components_to_install( with open(Path(subworkflow_dir, "meta.yml")) as fh: meta = yaml.safe_load(fh) if "components" in meta: - components = meta.get("components") + components = meta["components"] for component in components: if isinstance(component, dict): component_name = list(component.keys())[0].lower() @@ -188,7 +188,7 @@ def get_components_to_install( component_dict = { "org_path": org_path, "git_remote": git_remote, - "branch": component[component_name].get("branch", None), + "branch": component[component_name].get("branch"), } current_comp_dict[component_name].update(component_dict) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index be3d8a40f7..865e681a44 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -51,13 +51,10 @@ def __init__( def install(self, component: str, silent: bool = False) -> bool: if isinstance(component, dict): - if component.get("git_remote") is not None: - remote_url = component.get("git_remote") - branch = component.get("branch") - self.modules_repo = ModulesRepo(remote_url, branch) - else: - self.modules_repo = ModulesRepo(self.current_remote, self.branch) - component = component.get("name") + remote_url = component.get("git_remote", self.current_remote) + branch = component.get("branch", self.branch) + self.modules_repo = ModulesRepo(remote_url, branch) + component = component["name"] if self.repo_type == "modules": log.error(f"You cannot install a {component} in a clone of nf-core/modules") diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 6afa9b6ec4..08e117b1ad 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1255,12 +1255,9 @@ def recreate_dependencies(self, repo, org, subworkflow): assert self.modules_json is not None # mypy for dep_mod in dep_mods: name = dep_mod["name"] - if dep_mod.get("git_remote") is not None: - current_repo = dep_mod.get("git_remote", repo) - current_org = dep_mod.get("org_path", org) - installed_by = self.modules_json["repos"][current_repo]["modules"][current_org][name]["installed_by"] - else: - installed_by = self.modules_json["repos"][repo]["modules"][org][name]["installed_by"] + current_repo = dep_mod.get("git_remote", repo) + current_org = dep_mod.get("org_path", org) + installed_by = self.modules_json["repos"][current_repo]["modules"][current_org][name]["installed_by"] if installed_by == ["modules"]: self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] = [] if sw_name not in installed_by: @@ -1268,14 +1265,9 @@ def recreate_dependencies(self, repo, org, subworkflow): for dep_subwf in dep_subwfs: name = dep_subwf["name"] - if dep_subwf.get("git_remote") is not None: - current_repo = dep_subwf.get("git_remote", repo) - current_org = dep_subwf.get("org_path", org) - installed_by = self.modules_json["repos"][current_repo]["subworkflows"][current_org][name][ - "installed_by" - ] - else: - installed_by = self.modules_json["repos"][repo]["subworkflows"][org][name]["installed_by"] + current_repo = dep_subwf.get("git_remote", repo) + current_org = dep_subwf.get("org_path", org) + installed_by = self.modules_json["repos"][current_repo]["subworkflows"][current_org][name]["installed_by"] if installed_by == ["subworkflows"]: self.modules_json["repos"][repo]["subworkflows"][org][dep_subwf]["installed_by"] = [] if sw_name not in installed_by: From b13a40688100eb3782ab01a03e5dfe4dabb66fb6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Cavalcante?= Date: Thu, 22 Aug 2024 14:28:46 -0300 Subject: [PATCH 49/60] refact: Raise error if org_path not found --- nf_core/components/components_utils.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 44c1b7bec7..f6bc436de9 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -182,7 +182,10 @@ def get_components_to_install( component_name = list(component.keys())[0].lower() git_remote = component[component_name]["git_remote"] org_path_match = re.search(r"(?:https://|git@)[\w\.]+[:/](.*?)/", git_remote) - org_path = org_path_match.group(1) if org_path_match else None + if org_path_match: + org_path = org_path_match.group(1) + else: + raise UserWarning(f"Organisation path could not be established for {component_name}") current_comp_dict = subworkflows if component_name in subworkflows else modules component_dict = { From e96341e8a3097158d38a222b8559069a7388fcb0 Mon Sep 17 00:00:00 2001 From: jvfe Date: Thu, 22 Aug 2024 14:36:25 -0300 Subject: [PATCH 50/60] style: Run ruff format --- nf_core/components/components_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index f6bc436de9..2190605ab8 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -185,7 +185,7 @@ def get_components_to_install( if org_path_match: org_path = org_path_match.group(1) else: - raise UserWarning(f"Organisation path could not be established for {component_name}") + raise UserWarning(f"Organisation path could not be established for {component_name}") current_comp_dict = subworkflows if component_name in subworkflows else modules component_dict = { From 2b9a573c4b12d6e7961969615283ee0c9aa9bf96 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Cavalcante?= Date: Thu, 22 Aug 2024 17:31:43 -0300 Subject: [PATCH 51/60] refact: Change UserWarning message Co-authored-by: Matthieu Muffato --- nf_core/components/components_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 2190605ab8..174002cfff 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -185,7 +185,7 @@ def get_components_to_install( if org_path_match: org_path = org_path_match.group(1) else: - raise UserWarning(f"Organisation path could not be established for {component_name}") + raise UserWarning(f"The organisation path of {component_name} could not be established from '{git_remote}'") current_comp_dict = subworkflows if component_name in subworkflows else modules component_dict = { From e55c74dcc9aaa650ae6160bbf9454401100c203c Mon Sep 17 00:00:00 2001 From: jvfe Date: Thu, 22 Aug 2024 17:36:11 -0300 Subject: [PATCH 52/60] style: Run ruff format --- nf_core/components/components_utils.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 174002cfff..1b3c1fd182 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -185,7 +185,9 @@ def get_components_to_install( if org_path_match: org_path = org_path_match.group(1) else: - raise UserWarning(f"The organisation path of {component_name} could not be established from '{git_remote}'") + raise UserWarning( + f"The organisation path of {component_name} could not be established from '{git_remote}'" + ) current_comp_dict = subworkflows if component_name in subworkflows else modules component_dict = { From 2491207e1863275d409f7172863df6e27312b3d4 Mon Sep 17 00:00:00 2001 From: jvfe Date: Fri, 6 Sep 2024 14:16:16 -0300 Subject: [PATCH 53/60] refact: Use walrus operator in meta.yml check --- nf_core/components/components_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index ddf1301dbd..8a00e758cb 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -172,8 +172,8 @@ def get_components_to_install( component_dict = {"name": component_name} subworkflows[component_name] = component_dict - if Path(subworkflow_dir, "meta.yml").exists(): - with open(Path(subworkflow_dir, "meta.yml")) as fh: + if (sw_meta := Path(subworkflow_dir, "meta.yml")).exists(): + with open(sw_meta) as fh: meta = yaml.safe_load(fh) if "components" in meta: components = meta["components"] From a870787e04afa12dcd322da174dc9c4a7e3375f7 Mon Sep 17 00:00:00 2001 From: jvfe Date: Fri, 6 Sep 2024 14:27:30 -0300 Subject: [PATCH 54/60] refact: Use a union type for component arg --- nf_core/components/install.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 865e681a44..c9b04b10d6 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -1,7 +1,7 @@ import logging import os from pathlib import Path -from typing import List, Optional, Union +from typing import Dict, List, Optional, Union import questionary from rich import print @@ -49,7 +49,7 @@ def __init__( else: self.installed_by = [self.component_type] - def install(self, component: str, silent: bool = False) -> bool: + def install(self, component: Union[str, Dict[str, str]], silent: bool = False) -> bool: if isinstance(component, dict): remote_url = component.get("git_remote", self.current_remote) branch = component.get("branch", self.branch) From 1a8c3bebd9d6a4dcf9ba54f9b85bc9102f554a1b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Cavalcante?= Date: Fri, 13 Sep 2024 10:15:04 -0300 Subject: [PATCH 55/60] docs: Add comment for clarification about the feature MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Júlia Mir Pedrol --- nf_core/components/install.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index c9b04b10d6..03f419e623 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -51,6 +51,7 @@ def __init__( def install(self, component: Union[str, Dict[str, str]], silent: bool = False) -> bool: if isinstance(component, dict): + # Override modules_repo when the component to install is a dependency from a subworkflow. remote_url = component.get("git_remote", self.current_remote) branch = component.get("branch", self.branch) self.modules_repo = ModulesRepo(remote_url, branch) From 864eb7f0d08626fc06fca9739a329e8488274798 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Cavalcante?= Date: Wed, 4 Dec 2024 12:58:26 -0300 Subject: [PATCH 56/60] refact: Add suggestions from the third round of reviews (#15) * test: Add test for installing a module already installed from different org * fix: Remove unnecessary install command * test: Add test for updating cross-org subwfs * fix: Change hash to sha * fix: Add remote url * refact: Create shared instance of subworkflowinstall * test: to keep same named cross mods post subwf rm * refact: Don't use sha for the alternative repo * fix: Add else to sha check * fix: Move check to cross-org stuff * refact: Reverse check order * test: Add update test (#19) * wip: Add bug two * refact: Try defaulting to a dict * fix: Dont allow remote to be unbound * refact: Check if not equal to component type * refact: Re-add isinstance dict * fix: Remove mention of component_name * refact: Add special case for strings following dicts * test: Try removing branch from test case * refact: Remove isinstance for str * refact: Use dict only with get_comps out * Revert "refact: Use dict only with get_comps out" This reverts commit 7ebc6dced3b69563a54761a559e24854db7dd1b4. * refact: Check if content has subwfs * refact: Always set current remote * refact: Check sha before resetting current_repo * refact: Do the negative test * refact: Use another variable for remote * refact: Simplify check * refact: Expand check once again * refact: Roll back to previous check * fix: self.sha must not be none to reset to none * Revert "fix: self.sha must not be none to reset to none" This reverts commit 65aa1c4a2e4b96a1548a3131acc2b0eb525e9259. * Try removing the section entirely * refact: Try moving check to reset * refact: Remove unnecessary current_sha * refact: Change check in get_comps * refact: Set git remote beforehand * refact: Change indent so previous check is the same * refact: Remove current repo check * Revert "refact: Remove current repo check" This reverts commit a956e0432cf1f5581c785727129050e0d81a1f80. * refact: Try using name list * fix: Remove break in loop * refactor: Always set current_repo * fix: Check if name is in updated * refactor: Remove unused sections and logs * test: Use the same subworkflow in all test cases get_genome_annotation is not necessary, given we're using another subworkflow for the other tests * refact: Update cross_org remote to nf-core-test * fix: Change remote name in install test * refact: Remove none check from all_subworkflows Co-authored-by: Matthieu Muffato * docs: Add hash comment * refact: Use mod/subwf_name in both sections * test: Check updated subwf content --------- Co-authored-by: Matthieu Muffato --- nf_core/components/install.py | 28 ++++++++---- nf_core/components/update.py | 70 +++++++++++++++++++++++------- tests/subworkflows/test_install.py | 28 ++++++++---- tests/subworkflows/test_remove.py | 25 +++++++++++ tests/subworkflows/test_update.py | 31 ++++++++++++- tests/test_subworkflows.py | 9 ++++ tests/utils.py | 2 +- 7 files changed, 159 insertions(+), 34 deletions(-) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 03f419e623..f40b0712bf 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -44,6 +44,7 @@ def __init__( self.force = force self.prompt = prompt self.sha = sha + self.current_sha = sha if installed_by is not None: self.installed_by = installed_by else: @@ -57,6 +58,14 @@ def install(self, component: Union[str, Dict[str, str]], silent: bool = False) - self.modules_repo = ModulesRepo(remote_url, branch) component = component["name"] + if self.current_remote is None: + self.current_remote = self.modules_repo.remote_url + + if self.current_remote == self.modules_repo.remote_url and self.sha is not None: + self.current_sha = self.sha + else: + self.current_sha = None + if self.repo_type == "modules": log.error(f"You cannot install a {component} in a clone of nf-core/modules") return False @@ -80,8 +89,8 @@ def install(self, component: Union[str, Dict[str, str]], silent: bool = False) - return False # Verify SHA - if not self.modules_repo.verify_sha(self.prompt, self.sha): - err_msg = f"SHA '{self.sha}' is not a valid commit SHA for the repository '{self.modules_repo.remote_url}'" + if not self.modules_repo.verify_sha(self.prompt, self.current_sha): + err_msg = f"SHA '{self.current_sha}' is not a valid commit SHA for the repository '{self.modules_repo.remote_url}'" log.error(err_msg) return False @@ -124,7 +133,7 @@ def install(self, component: Union[str, Dict[str, str]], silent: bool = False) - modules_json.update(self.component_type, self.modules_repo, component, current_version, self.installed_by) return False try: - version = self.get_version(component, self.sha, self.prompt, current_version, self.modules_repo) + version = self.get_version(component, self.current_sha, self.prompt, current_version, self.modules_repo) except UserWarning as e: log.error(e) return False @@ -209,7 +218,7 @@ def collect_and_verify_name( if component is None: component = questionary.autocomplete( f"{'Tool' if self.component_type == 'modules' else 'Subworkflow'} name:", - choices=sorted(modules_repo.get_avail_components(self.component_type, commit=self.sha)), + choices=sorted(modules_repo.get_avail_components(self.component_type, commit=self.current_sha)), style=nf_core.utils.nfcore_question_style, ).unsafe_ask() @@ -217,7 +226,9 @@ def collect_and_verify_name( return "" # Check that the supplied name is an available module/subworkflow - if component and component not in modules_repo.get_avail_components(self.component_type, commit=self.sha): + if component and component not in modules_repo.get_avail_components( + self.component_type, commit=self.current_sha + ): log.error(f"{self.component_type[:-1].title()} '{component}' not found in available {self.component_type}") print( Panel( @@ -233,9 +244,10 @@ def collect_and_verify_name( raise ValueError - if not modules_repo.component_exists(component, self.component_type, commit=self.sha): - warn_msg = f"{self.component_type[:-1].title()} '{component}' not found in remote '{modules_repo.remote_url}' ({modules_repo.branch})" - log.warning(warn_msg) + if self.current_remote == modules_repo.remote_url: + if not modules_repo.component_exists(component, self.component_type, commit=self.current_sha): + warn_msg = f"{self.component_type[:-1].title()} '{component}' not found in remote '{modules_repo.remote_url}' ({modules_repo.branch})" + log.warning(warn_msg) return component diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 3e4694adc8..97aba9aa20 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -1,5 +1,6 @@ import logging import os +import re import shutil import tempfile from pathlib import Path @@ -41,6 +42,8 @@ def __init__( limit_output=False, ): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) + self.current_remote = remote_url + self.branch = branch self.force = force self.prompt = prompt self.sha = sha @@ -92,6 +95,13 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr Returns: (bool): True if the update was successful, False otherwise. """ + if isinstance(component, dict): + # Override modules_repo when the component to install is a dependency from a subworkflow. + remote_url = component.get("git_remote", self.current_remote) + branch = component.get("branch", self.branch) + self.modules_repo = ModulesRepo(remote_url, branch) + component = component["name"] + self.component = component if updated is None: updated = [] @@ -868,7 +878,17 @@ def get_components_to_update(self, component): if self.component_type == "modules": # All subworkflow names in the installed_by section of a module are subworkflows using this module # We need to update them too - subworkflows_to_update = [subworkflow for subworkflow in installed_by if subworkflow != self.component_type] + git_remote = self.current_remote + for subworkflow in installed_by: + if subworkflow != component: + for remote_url, content in mods_json["repos"].items(): + if all_subworkflows := content.get("subworkflows"): + for _, details in all_subworkflows.items(): + if subworkflow in details: + git_remote = remote_url + if subworkflow != self.component_type: + subworkflows_to_update.append({"name": subworkflow, "git_remote": git_remote}) + elif self.component_type == "subworkflows": for repo, repo_content in mods_json["repos"].items(): for component_type, dir_content in repo_content.items(): @@ -879,9 +899,9 @@ def get_components_to_update(self, component): # We need to update it too if component in comp_content["installed_by"]: if component_type == "modules": - modules_to_update.append(comp) + modules_to_update.append({"name": comp, "git_remote": repo, "org_path": dir}) elif component_type == "subworkflows": - subworkflows_to_update.append(comp) + subworkflows_to_update.append({"name": comp, "git_remote": repo, "org_path": dir}) return modules_to_update, subworkflows_to_update @@ -896,7 +916,7 @@ def update_linked_components( Update modules and subworkflows linked to the component being updated. """ for s_update in subworkflows_to_update: - if s_update in updated: + if s_update["name"] in updated: continue original_component_type, original_update_all = self._change_component_type("subworkflows") self.update( @@ -908,7 +928,7 @@ def update_linked_components( self._reset_component_type(original_component_type, original_update_all) for m_update in modules_to_update: - if m_update in updated: + if m_update["name"] in updated: continue original_component_type, original_update_all = self._change_component_type("modules") try: @@ -931,28 +951,44 @@ def update_linked_components( def manage_changes_in_linked_components(self, component, modules_to_update, subworkflows_to_update): """Check for linked components added or removed in the new subworkflow version""" if self.component_type == "subworkflows": - subworkflow_directory = Path(self.directory, self.component_type, self.modules_repo.repo_path, component) + org_path_match = re.search(r"(?:https://|git@)[\w\.]+[:/](.*?)/", self.current_remote) + if org_path_match: + org_path = org_path_match.group(1) + + subworkflow_directory = Path(self.directory, self.component_type, org_path, component) included_modules, included_subworkflows = get_components_to_install(subworkflow_directory) # If a module/subworkflow has been removed from the subworkflow for module in modules_to_update: - if module not in included_modules: - log.info(f"Removing module '{module}' which is not included in '{component}' anymore.") + module_name = module["name"] + included_modules_names = [m["name"] for m in included_modules] + if module_name not in included_modules_names: + log.info(f"Removing module '{module_name}' which is not included in '{component}' anymore.") remove_module_object = ComponentRemove("modules", self.directory) - remove_module_object.remove(module, removed_by=component) + remove_module_object.remove(module_name, removed_by=component) for subworkflow in subworkflows_to_update: - if subworkflow not in included_subworkflows: - log.info(f"Removing subworkflow '{subworkflow}' which is not included in '{component}' anymore.") + subworkflow_name = subworkflow["name"] + included_subworkflow_names = [m["name"] for m in included_subworkflows] + if subworkflow_name not in included_subworkflow_names: + log.info( + f"Removing subworkflow '{subworkflow_name}' which is not included in '{component}' anymore." + ) remove_subworkflow_object = ComponentRemove("subworkflows", self.directory) - remove_subworkflow_object.remove(subworkflow, removed_by=component) + remove_subworkflow_object.remove(subworkflow_name, removed_by=component) # If a new module/subworkflow is included in the subworklfow and wasn't included before for module in included_modules: - if module not in modules_to_update: - log.info(f"Installing newly included module '{module}' for '{component}'") + module_name = module["name"] + module["git_remote"] = module.get("git_remote", self.current_remote) + module["branch"] = module.get("branch", self.branch) + if module_name not in modules_to_update: + log.info(f"Installing newly included module '{module_name}' for '{component}'") install_module_object = ComponentInstall(self.directory, "modules", installed_by=component) install_module_object.install(module, silent=True) for subworkflow in included_subworkflows: - if subworkflow not in subworkflows_to_update: - log.info(f"Installing newly included subworkflow '{subworkflow}' for '{component}'") + subworkflow_name = subworkflow["name"] + subworkflow["git_remote"] = subworkflow.get("git_remote", self.current_remote) + subworkflow["branch"] = subworkflow.get("branch", self.branch) + if subworkflow_name not in subworkflows_to_update: + log.info(f"Installing newly included subworkflow '{subworkflow_name}' for '{component}'") install_subworkflow_object = ComponentInstall( self.directory, "subworkflows", installed_by=component ) @@ -971,3 +1007,5 @@ def _reset_component_type(self, original_component_type, original_update_all): self.component_type = original_component_type self.modules_json.pipeline_components = None self.update_all = original_update_all + if self.current_remote is None: + self.current_remote = self.modules_repo.remote_url diff --git a/tests/subworkflows/test_install.py b/tests/subworkflows/test_install.py index d6116b5096..b5010d49f7 100644 --- a/tests/subworkflows/test_install.py +++ b/tests/subworkflows/test_install.py @@ -7,7 +7,6 @@ from ..test_subworkflows import TestSubworkflows from ..utils import ( - CROSS_ORGANIZATION_BRANCH, CROSS_ORGANIZATION_URL, GITLAB_BRANCH_TEST_BRANCH, GITLAB_REPO, @@ -87,16 +86,29 @@ def test_subworkflows_install_different_branch_fail(self): def test_subworkflows_install_across_organizations(self): """Test installing a subworkflow with modules from different organizations""" - install_obj = SubworkflowInstall( - self.pipeline_dir, remote_url=CROSS_ORGANIZATION_URL, branch=CROSS_ORGANIZATION_BRANCH - ) - # The hic_bwamem2 subworkflow contains modules from different organizations - install_obj.install("get_genome_annotation") + # The fastq_trim_fastp_fastqc subworkflow contains modules from different organizations + self.subworkflow_install_cross_org.install("fastq_trim_fastp_fastqc") + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"][CROSS_ORGANIZATION_URL]["modules"]["nf-core-test"]["fastqc"]["installed_by"] == [ + "fastq_trim_fastp_fastqc" + ] + + def test_subworkflow_install_with_same_module(self): + """Test installing a subworkflow with a module from a different organization that is already installed from another org""" + # The fastq_trim_fastp_fastqc subworkflow contains the cross-org fastqc module, not the nf-core one + self.subworkflow_install_cross_org.install("fastq_trim_fastp_fastqc") # Verify that the installed_by entry was added correctly modules_json = ModulesJson(self.pipeline_dir) mod_json = modules_json.get_modules_json() - assert mod_json["repos"][CROSS_ORGANIZATION_URL]["modules"]["jvfe"]["wget"]["installed_by"] == [ - "get_genome_annotation" + + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["fastqc"][ + "installed_by" + ] == ["modules"] + + assert mod_json["repos"][CROSS_ORGANIZATION_URL]["modules"]["nf-core-test"]["fastqc"]["installed_by"] == [ + "fastq_trim_fastp_fastqc" ] def test_subworkflows_install_tracking(self): diff --git a/tests/subworkflows/test_remove.py b/tests/subworkflows/test_remove.py index bad5a2ddbb..a4ff6a76ee 100644 --- a/tests/subworkflows/test_remove.py +++ b/tests/subworkflows/test_remove.py @@ -1,6 +1,7 @@ from pathlib import Path from nf_core.modules.modules_json import ModulesJson +from tests.utils import CROSS_ORGANIZATION_URL from ..test_subworkflows import TestSubworkflows @@ -99,3 +100,27 @@ def test_subworkflows_remove_included_subworkflow(self): assert Path.exists(samtools_index_path) is True assert Path.exists(samtools_stats_path) is True self.subworkflow_remove.remove("bam_sort_stats_samtools") + + def test_subworkflows_remove_subworkflow_keep_installed_cross_org_module(self): + """Test removing subworkflow and all it's dependencies after installing it, except for a separately installed module from another organisation""" + self.subworkflow_install_cross_org.install("fastq_trim_fastp_fastqc") + self.mods_install.install("fastqc") + + subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "jvfe") + fastq_trim_fastp_fastqc_path = Path(subworkflow_path, "fastq_trim_fastp_fastqc") + fastqc_path = Path(self.subworkflow_install.directory, "modules", "jvfe", "fastqc") + nfcore_fastqc_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "fastqc") + + mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() + assert self.subworkflow_remove_cross_org.remove("fastq_trim_fastp_fastqc") + mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() + + assert Path.exists(fastq_trim_fastp_fastqc_path) is False + assert Path.exists(fastqc_path) is False + assert Path.exists(nfcore_fastqc_path) is True + assert mod_json_before != mod_json_after + # assert subworkflows key is removed from modules.json + assert CROSS_ORGANIZATION_URL not in mod_json_after["repos"].keys() + assert ( + "fastqc" in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"].keys() + ) diff --git a/tests/subworkflows/test_update.py b/tests/subworkflows/test_update.py index 153038cd1d..423eb516bc 100644 --- a/tests/subworkflows/test_update.py +++ b/tests/subworkflows/test_update.py @@ -11,10 +11,11 @@ from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.modules.modules_json import ModulesJson from nf_core.modules.update import ModuleUpdate +from nf_core.subworkflows.install import SubworkflowInstall from nf_core.subworkflows.update import SubworkflowUpdate from ..test_subworkflows import TestSubworkflows -from ..utils import OLD_SUBWORKFLOWS_SHA, cmp_component +from ..utils import CROSS_ORGANIZATION_URL, OLD_SUBWORKFLOWS_SHA, cmp_component class TestSubworkflowsUpdate(TestSubworkflows): @@ -372,3 +373,31 @@ def test_update_change_of_included_modules(self): assert "ensemblvep" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] assert "ensemblvep/vep" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "ensemblvep/vep").is_dir() + + def test_update_subworkflow_across_orgs(self): + """Install and update a subworkflow with modules from different organizations""" + install_obj = SubworkflowInstall( + self.pipeline_dir, + remote_url=CROSS_ORGANIZATION_URL, + # Hash for an old version of fastq_trim_fastp_fastqc + # A dummy code change was made in order to have a commit to compare with + sha="9627f4367b11527194ef14473019d0e1a181b741", + ) + # The fastq_trim_fastp_fastqc subworkflow contains the cross-org fastqc module, not the nf-core one + install_obj.install("fastq_trim_fastp_fastqc") + + patch_path = Path(self.pipeline_dir, "fastq_trim_fastp_fastqc.patch") + update_obj = SubworkflowUpdate( + self.pipeline_dir, + remote_url=CROSS_ORGANIZATION_URL, + save_diff_fn=patch_path, + update_all=False, + update_deps=True, + show_diff=False, + ) + assert update_obj.update("fastq_trim_fastp_fastqc") is True + + with open(patch_path) as fh: + content = fh.read() + assert "- fastqc_raw_html = FASTQC_RAW.out.html" in content + assert "+ ch_fastqc_raw_html = FASTQC_RAW.out.html" in content diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 7c18ab0a2d..446d4aedff 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -12,6 +12,8 @@ import nf_core.subworkflows from .utils import ( + CROSS_ORGANIZATION_BRANCH, + CROSS_ORGANIZATION_URL, GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL, @@ -103,10 +105,17 @@ def setUp(self): force=False, sha="8c343b3c8a0925949783dc547666007c245c235b", ) + self.subworkflow_install_cross_org = nf_core.subworkflows.SubworkflowInstall( + self.pipeline_dir, remote_url=CROSS_ORGANIZATION_URL, branch=CROSS_ORGANIZATION_BRANCH + ) + self.mods_install = nf_core.modules.install.ModuleInstall(self.pipeline_dir, prompt=False, force=True) # Set up remove objects self.subworkflow_remove = nf_core.subworkflows.SubworkflowRemove(self.pipeline_dir) + self.subworkflow_remove_cross_org = nf_core.subworkflows.SubworkflowRemove( + self.pipeline_dir, remote_url=CROSS_ORGANIZATION_URL, branch=CROSS_ORGANIZATION_BRANCH + ) @pytest.fixture(autouse=True) def _use_caplog(self, caplog): diff --git a/tests/utils.py b/tests/utils.py index b6127ee056..9a661c5927 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -20,7 +20,7 @@ OLD_TRIMGALORE_SHA = "9b7a3bdefeaad5d42324aa7dd50f87bea1b04386" OLD_TRIMGALORE_BRANCH = "mimic-old-trimgalore" GITLAB_URL = "https://gitlab.com/nf-core/modules-test.git" -CROSS_ORGANIZATION_URL = "https://github.com/jvfe/test-subworkflow-remote.git" +CROSS_ORGANIZATION_URL = "https://github.com/nf-core-test/modules.git" CROSS_ORGANIZATION_BRANCH = "main" GITLAB_REPO = "nf-core-test" GITLAB_DEFAULT_BRANCH = "main" From c0b51d7295c93df2ed866a47d93988f9a7ce0bc6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Cavalcante?= Date: Wed, 4 Dec 2024 15:01:29 -0300 Subject: [PATCH 57/60] Merge dev third rev (#20) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * write only relevant fields in `.nf-core.yml` * switch from docstrings to Field descsriptions * [automated] Update CHANGELOG.md * update test to new error * second attempt to add ro crates * fix import * first running version * restructure code and add tests * add missing dep * add recommendations from comments * add git contributors * fix filename for CI * better help hint message * fix class name * add correct parent directory to faked crate * add empty ro-crate after sync * fix tests * fix ci test * use github to guess author name, set names as ids if no orcid * add bioschemas, datecreated and datemodified (only set to current time atm) * follow `ComputationalWorkflow` schema for main entitty. * fix date created field * add "about" field to workflow diagram * add input as separate entity * add version to main entity * fix incorrect type * switch to add_workflow method to add main entity and add components as datasets with descriptions * ro_crate -> rocrate * allow mixed list and dict in lint config * nested too deeply * [automated] Update CHANGELOG.md * use repo2crate to generate the main structure of the crate * chore(deps): update dependency prompt_toolkit to <=3.0.48 * chore(deps): update pre-commit hook astral-sh/ruff-pre-commit to v0.7.0 * chore(deps): update pre-commit hook pre-commit/mirrors-mypy to v1.12.0 * [automated] Update CHANGELOG.md * [automated] Update CHANGELOG.md * Fix typos * Fix typos (second round) * Undo accidental typos fixes * Fix typos (third round) * Update textual snapshots * chore(deps): update python:3.12-slim docker digest to 032c526 * [automated] Update CHANGELOG.md * handle new nf-core.yml structure * update documentation for `multiqc_config` linting * parse yaml correctly * found a better way to handle the ignore file being None * handle new lint config structure * add tests with different valid yaml structures * use correct profile options for sfw test command * [automated] Update CHANGELOG.md * remove last traces of LintConfigType * fix incorrect type * more type fixes * Update CHANGELOG.md * add all lint tests to config * switch all defaults to None and drop them on dump * Update nf_core/utils.py Co-authored-by: Júlia Mir Pedrol * Update nf_core/utils.py * drop None values when checking for test names * [automated] Fix code linting * no need for Field value * fix test_lint tests * fix worklfow_dispatch trigger and parse more review comments * [automated] Update CHANGELOG.md * Update pre-commit hook pre-commit/mirrors-mypy to v1.13.0 * chore(deps): update github actions * [automated] Update CHANGELOG.md * remove outdated file * Revert "Add new command `nf-core rocrate` to create a Research Object (RO) crate for a pipeline" * move test in correct directory * fix tests * Add seqera containers example * bump ci * Modify test_find_container_images_modules() for Seqera Containers. * Enable Seqera Containers download. * run tests without commiting ro-crate * add ro-crate creation to pipelines create command * fix command import * add rocrate to skip features * remove schema loading, because it is not needed anymore * update snapshots * `modules update`: add a panel around diffs * add `--dir/-d` option to schema commands * [automated] Update CHANGELOG.md * [automated] Update CHANGELOG.md * try to fix coverage report generation * fix test_cli * nf-core download: Fix forgotten symlinks in cache. * try specifying the coverage output file * try naming the files directly * fix variable name * add hiddnen files for upload-artifact action v4 * group diffs by module * Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.1 * [automated] Update CHANGELOG.md * Update GitHub Actions * Ensure that prioritize_direct_download() retains Seqera Singularity Containers and write additional test. * make group border blue * make module names white * Apply suggestions from code review Co-authored-by: Matthias Hörtenhuber * Add resource limits to Gitpod profile * Update changelog * I fix typos for infrastickers * Fix typo * Update changelog * Update Gitpod to use Miniforge instead of Miniconda * Update Changelog * Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.3 * [automated] Update CHANGELOG.md * handle new schema structure in create-params-file * add tests * [automated] Update CHANGELOG.md * add hint to solve git errors with a synced repo * run pre-commit when testing linting the template pipeline * [automated] Update CHANGELOG.md * [automated] Update CHANGELOG.md * fix template trailing spaces * write proper path and raise a UserWarning to avoid printing the error trace * Apply suggestions from code review Co-authored-by: Matthias Hörtenhuber * remove logging git error from wrong place * add option to skip .vscode from the template * update textual snapshots * update and fix swf patch tests * Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.4 * [automated] Update CHANGELOG.md * Update codecov/codecov-action action to v5 (#3283) * Update codecov/codecov-action action to v5 * [automated] Update CHANGELOG.md --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: nf-core-bot * Update python:3.12-slim Docker digest to 2a6386a (#3284) * Update python:3.12-slim Docker digest to 2a6386a * [automated] Update CHANGELOG.md --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: nf-core-bot * apply patch reverse when linting a patched subworkflow * update get_patch_fn to work with subworkflows * move modules_differ.py to components_differ.py * add subworkflows patch missing tests * fix subworkflows update test * update changelog * add help text for --remove flag * apply code review suggestions to patch tests * Update tests/modules/test_patch.py * apply suggestions by @mashehu * remove def from nextflow.config and add trace_report_suffix param * [automated] Update CHANGELOG.md * Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.0 * [automated] Update CHANGELOG.md * add validation.monochromeLogs to config * ignore trace_report_suffix default check * update snapshots * add more tests * enable zip output * Move modules config import after base.config * Update nf_core/pipelines/create/template_features.yml * Update documentation * Update CHANGELOG * Update CHANGELOG.md * Fix linting * Update CHANGELOG * Download: Need to deduplicate Seqera Container matches as well, otherwise a race condition emerges. * Add new function to handle Seqera Container Oras URIs. * Ensure, that oras:// containers are correctly handled. * Download: Add test data for oras:// modules. * Test the new container elimination routine. * Download: Update the tests. * Add dedicated ORAS image format error. * Include oras:// regex in download to recognize the paths. * Changelog. * Update dawidd6/action-download-artifact action to v7 (#3306) * Update dawidd6/action-download-artifact action to v7 * [automated] Update CHANGELOG.md --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: nf-core-bot * Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.1 * Update gitpod/workspace-base Docker digest to 12853f7 (#3309) * Update gitpod/workspace-base Docker digest to 12853f7 * [automated] Update CHANGELOG.md --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: nf-core-bot * update modules and subworkflows * [automated] Update CHANGELOG.md * also add monochrome_logs if nf-schema is used * add manifest.contributors to nextflow.config * [automated] Update CHANGELOG.md * fix contributors map * Update nf_core/pipeline-template/.github/CONTRIBUTING.md Co-authored-by: Phil Ewels * Typo in error message. Co-authored-by: Matthias Hörtenhuber * test also the main sync function itsel * combine json parsing code * loop over list of authors to supply contributors * remove broken test * fix type error * Apply suggestions from code review Co-authored-by: Júlia Mir Pedrol --------- Co-authored-by: mashehu Co-authored-by: nf-core-bot Co-authored-by: Matthias Hörtenhuber Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Marcel Ribeiro-Dantas Co-authored-by: Júlia Mir Pedrol Co-authored-by: Jonathan Manning Co-authored-by: Matthias Zepper Co-authored-by: Matthias Zepper <6963520+MatthiasZepper@users.noreply.github.com> Co-authored-by: Mahesh Binzer-Panchal Co-authored-by: James A. Fellows Yates Co-authored-by: Simon Pearce <24893913+SPPearce@users.noreply.github.com> Co-authored-by: Phil Ewels Co-authored-by: Louis LE NEZET <58640615+LouisLeNezet@users.noreply.github.com> Co-authored-by: Louis Le Nezet Co-authored-by: LouisLeNezet --- .github/.coveragerc | 1 - .github/PULL_REQUEST_TEMPLATE.md | 2 +- .github/RELEASE_CHECKLIST.md | 14 +- .github/actions/create-lint-wf/action.yml | 14 +- .github/workflows/branch.yml | 16 +- .github/workflows/changelog.yml | 6 +- .github/workflows/create-lint-wf.yml | 11 +- .../create-test-lint-wf-template.yml | 24 +- .github/workflows/create-test-wf.yml | 14 +- .github/workflows/deploy-pypi.yml | 4 +- .github/workflows/fix-linting.yml | 4 +- .github/workflows/lint-code.yml | 4 +- .github/workflows/push_dockerhub_dev.yml | 2 +- .github/workflows/push_dockerhub_release.yml | 2 +- .github/workflows/pytest.yml | 36 +- .github/workflows/rich-codex.yml | 39 - .github/workflows/sync.yml | 26 +- .../workflows/update-textual-snapshots.yml | 6 +- .../workflows/update_components_template.yml | 4 +- .gitignore | 1 - .pre-commit-config.yaml | 6 +- CHANGELOG.md | 155 +- CITATION.cff | 2 +- CONTRIBUTING.md | 2 +- Dockerfile | 2 +- README.md | 10 +- docs/api/_src/api/index.md | 2 +- docs/api/_src/api/utils.md | 9 + docs/api/_src/conf.py | 12 +- .../pipeline_lint_tests/included_configs.md | 5 + docs/api/_src/pipeline_lint_tests/index.md | 2 + docs/api/requirements.txt | 1 + nf_core/__main__.py | 173 +- nf_core/commands_modules.py | 3 +- nf_core/commands_pipelines.py | 34 +- nf_core/commands_subworkflows.py | 3 +- .../components_differ.py} | 152 +- nf_core/components/components_utils.py | 27 + nf_core/components/create.py | 6 +- nf_core/components/info.py | 80 +- nf_core/components/lint/__init__.py | 6 +- nf_core/components/list.py | 2 +- nf_core/components/nfcore_component.py | 125 +- nf_core/components/patch.py | 20 +- nf_core/components/remove.py | 2 +- nf_core/components/update.py | 34 +- nf_core/gitpod/gitpod.Dockerfile | 8 +- nf_core/module-template/meta.yml | 67 +- nf_core/modules/lint/__init__.py | 118 +- nf_core/modules/lint/main_nf.py | 8 +- nf_core/modules/lint/meta_yml.py | 277 +- nf_core/modules/lint/module_changes.py | 5 +- nf_core/modules/lint/module_patch.py | 17 +- nf_core/modules/modules_json.py | 74 +- nf_core/pipeline-template/.editorconfig | 4 +- .../pipeline-template/.github/CONTRIBUTING.md | 20 +- .../.github/workflows/awsfulltest.yml | 7 +- .../.github/workflows/ci.yml | 67 +- .../.github/workflows/download_pipeline.yml | 51 +- .../.github/workflows/fix-linting.yml | 4 +- .../.github/workflows/linting.yml | 14 +- .../.github/workflows/linting_comment.yml | 2 +- .../workflows/release-announcements.yml | 4 +- .../workflows/template_version_comment.yml | 23 +- nf_core/pipeline-template/.gitignore | 1 + .../pipeline-template/.pre-commit-config.yaml | 2 +- nf_core/pipeline-template/.prettierignore | 2 +- .../pipeline-template/.vscode/settings.json | 3 + nf_core/pipeline-template/CITATIONS.md | 11 +- nf_core/pipeline-template/README.md | 6 +- .../assets/multiqc_config.yml | 4 +- nf_core/pipeline-template/conf/base.config | 36 +- .../conf/igenomes_ignored.config | 9 + nf_core/pipeline-template/conf/modules.config | 4 +- nf_core/pipeline-template/conf/test.config | 13 +- nf_core/pipeline-template/docs/output.md | 18 +- nf_core/pipeline-template/docs/usage.md | 32 +- nf_core/pipeline-template/main.nf | 9 +- nf_core/pipeline-template/modules.json | 10 +- .../modules/nf-core/fastqc/environment.yml | 1 - .../modules/nf-core/fastqc/main.nf | 7 +- .../modules/nf-core/fastqc/meta.yml | 58 +- .../modules/nf-core/fastqc/tests/main.nf.test | 225 +- .../nf-core/fastqc/tests/main.nf.test.snap | 370 +- .../modules/nf-core/multiqc/environment.yml | 3 +- .../modules/nf-core/multiqc/main.nf | 16 +- .../modules/nf-core/multiqc/meta.yml | 78 +- .../nf-core/multiqc/tests/main.nf.test | 8 + .../nf-core/multiqc/tests/main.nf.test.snap | 24 +- .../nf-core/multiqc/tests/nextflow.config | 5 + nf_core/pipeline-template/nextflow.config | 169 +- .../pipeline-template/nextflow_schema.json | 48 +- .../utils_nfcore_pipeline_pipeline/main.nf | 52 +- .../nf-core/utils_nextflow_pipeline/main.nf | 62 +- .../tests/main.workflow.nf.test | 10 +- .../nf-core/utils_nfcore_pipeline/main.nf | 324 +- .../tests/main.function.nf.test | 52 - .../tests/main.function.nf.test.snap | 30 - .../utils_nfschema_plugin/tests/main.nf.test | 4 +- .../pipeline-template/workflows/pipeline.nf | 29 +- nf_core/pipelines/bump_version.py | 182 +- nf_core/pipelines/create/__init__.py | 19 +- nf_core/pipelines/create/basicdetails.py | 2 +- nf_core/pipelines/create/create.py | 30 +- nf_core/pipelines/create/custompipeline.py | 18 +- nf_core/pipelines/create/finaldetails.py | 2 +- nf_core/pipelines/create/githubrepo.py | 2 +- .../pipelines/create/template_features.yml | 33 +- nf_core/pipelines/download.py | 120 +- nf_core/pipelines/launch.py | 8 +- nf_core/pipelines/lint/__init__.py | 18 +- nf_core/pipelines/lint/files_exist.py | 15 +- nf_core/pipelines/lint/included_configs.py | 36 + nf_core/pipelines/lint/multiqc_config.py | 9 + nf_core/pipelines/lint/nextflow_config.py | 33 +- nf_core/pipelines/lint/nfcore_yml.py | 31 +- nf_core/pipelines/lint/readme.py | 17 +- nf_core/pipelines/lint/template_strings.py | 4 +- nf_core/pipelines/lint_utils.py | 30 +- nf_core/pipelines/params_file.py | 43 +- nf_core/pipelines/refgenie.py | 4 +- nf_core/pipelines/rocrate.py | 362 ++ nf_core/pipelines/schema.py | 22 +- nf_core/pipelines/sync.py | 54 +- nf_core/subworkflows/__init__.py | 1 + nf_core/subworkflows/lint/__init__.py | 62 +- nf_core/subworkflows/lint/meta_yml.py | 7 + .../subworkflows/lint/subworkflow_changes.py | 27 +- nf_core/subworkflows/patch.py | 10 + nf_core/synced_repo.py | 12 +- nf_core/utils.py | 297 +- requirements-dev.txt | 4 +- requirements.txt | 5 +- setup.py | 2 +- .../modules/mock_seqera_container_http.nf | 11 + .../modules/mock_seqera_container_oras.nf | 11 + .../mock_seqera_container_oras_mulled.nf | 11 + tests/modules/test_lint.py | 39 +- tests/modules/test_modules_json.py | 15 +- tests/modules/test_patch.py | 50 +- .../__snapshots__/test_create_app.ambr | 3321 ----------------- .../test_basic_details_custom.svg | 271 ++ .../test_basic_details_nfcore.svg | 274 ++ .../test_create_app/test_choose_type.svg | 269 ++ .../test_customisation_help.svg | 275 ++ .../test_create_app/test_final_details.svg | 269 ++ .../test_create_app/test_github_details.svg | 276 ++ .../test_github_exit_message.svg | 272 ++ .../test_create_app/test_github_question.svg | 265 ++ .../test_create_app/test_type_custom.svg | 274 ++ .../test_create_app/test_type_nfcore.svg | 272 ++ .../test_type_nfcore_validation.svg | 273 ++ .../test_create_app/test_welcome.svg | 271 ++ tests/pipelines/lint/test_files_exist.py | 85 +- tests/pipelines/lint/test_nextflow_config.py | 56 +- tests/pipelines/lint/test_nfcore_yml.py | 116 +- tests/pipelines/lint/test_template_strings.py | 28 +- tests/pipelines/test_bump_version.py | 19 +- tests/pipelines/test_download.py | 291 +- tests/pipelines/test_launch.py | 34 +- tests/pipelines/test_lint.py | 6 +- tests/pipelines/test_params_file.py | 90 +- tests/pipelines/test_rocrate.py | 127 + tests/pipelines/test_schema.py | 2 +- tests/pipelines/test_sync.py | 32 + tests/subworkflows/test_lint.py | 8 + tests/subworkflows/test_patch.py | 307 ++ tests/subworkflows/test_update.py | 2 +- tests/test_cli.py | 8 +- tests/test_modules.py | 15 +- tests/test_pipelines.py | 6 + tests/test_utils.py | 2 +- tests/utils.py | 13 +- 173 files changed, 8075 insertions(+), 5022 deletions(-) delete mode 100644 .github/workflows/rich-codex.yml create mode 100644 docs/api/_src/api/utils.md create mode 100644 docs/api/_src/pipeline_lint_tests/included_configs.md rename nf_core/{modules/modules_differ.py => components/components_differ.py} (76%) create mode 100644 nf_core/pipeline-template/.vscode/settings.json create mode 100644 nf_core/pipeline-template/conf/igenomes_ignored.config create mode 100644 nf_core/pipeline-template/modules/nf-core/multiqc/tests/nextflow.config create mode 100644 nf_core/pipelines/lint/included_configs.py create mode 100644 nf_core/pipelines/rocrate.py create mode 100644 nf_core/subworkflows/patch.py create mode 100644 tests/data/mock_module_containers/modules/mock_seqera_container_http.nf create mode 100644 tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf create mode 100644 tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf delete mode 100644 tests/pipelines/__snapshots__/test_create_app.ambr create mode 100644 tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg create mode 100644 tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg create mode 100644 tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg create mode 100644 tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg create mode 100644 tests/pipelines/__snapshots__/test_create_app/test_final_details.svg create mode 100644 tests/pipelines/__snapshots__/test_create_app/test_github_details.svg create mode 100644 tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg create mode 100644 tests/pipelines/__snapshots__/test_create_app/test_github_question.svg create mode 100644 tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg create mode 100644 tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg create mode 100644 tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg create mode 100644 tests/pipelines/__snapshots__/test_create_app/test_welcome.svg create mode 100644 tests/pipelines/test_rocrate.py create mode 100644 tests/subworkflows/test_patch.py diff --git a/.github/.coveragerc b/.github/.coveragerc index 24a419ae07..cbdcccdac0 100644 --- a/.github/.coveragerc +++ b/.github/.coveragerc @@ -2,4 +2,3 @@ omit = nf_core/*-template/* source = nf_core relative_files = True - diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 71411be1b9..9dbd7a1f6b 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -6,7 +6,7 @@ These are the most common things requested on pull requests (PRs). Remember that PRs should be made against the dev branch, unless you're preparing a release. -Learn more about contributing: https://github.com/nf-core/tools/tree/master/.github/CONTRIBUTING.md +Learn more about contributing: https://github.com/nf-core/tools/tree/main/.github/CONTRIBUTING.md --> ## PR checklist diff --git a/.github/RELEASE_CHECKLIST.md b/.github/RELEASE_CHECKLIST.md index 9a1905c7a0..d64799382f 100644 --- a/.github/RELEASE_CHECKLIST.md +++ b/.github/RELEASE_CHECKLIST.md @@ -6,14 +6,13 @@ 4. Check that modules/subworkflows in template are up to date with the latest releases 5. Create a PR to `dev` to bump the version in `CHANGELOG.md` and `setup.py` and change the gitpod container to `nfcore/gitpod:latest`. 6. Make sure all CI tests are passing! -7. Create a PR from `dev` to `master` -8. Make sure all CI tests are passing again (additional tests are run on PRs to `master`) +7. Create a PR from `dev` to `main` +8. Make sure all CI tests are passing again (additional tests are run on PRs to `main`) 9. Request review (2 approvals required) -10. Run `rich-codex` to regenerate docs screengrabs (actions `workflow_dispatch` button) -11. Merge the PR into `master` -12. Wait for CI tests on the commit to passed -13. (Optional but a good idea) Run a manual sync on `nf-core/testpipeline` and check that CI is passing on the resulting PR. -14. Create a new release copying the `CHANGELOG` for that release into the description section. +10. Merge the PR into `main` +11. Wait for CI tests on the commit to passed +12. (Optional but a good idea) Run a manual sync on `nf-core/testpipeline` and check that CI is passing on the resulting PR. +13. Create a new release copying the `CHANGELOG` for that release into the description section. ## After release @@ -21,3 +20,4 @@ 2. Check that the automatic `PyPi` deployment has worked: [pypi.org/project/nf-core](https://pypi.org/project/nf-core/) 3. Check `BioConda` has an automated PR to bump the version, and merge. eg. [bioconda/bioconda-recipes #20065](https://github.com/bioconda/bioconda-recipes/pull/20065) 4. Create a tools PR to `dev` to bump back to the next development version in `CHANGELOG.md` and `setup.py` and change the gitpod container to `nfcore/gitpod:dev`. +5. Run `rich-codex` on the [tools/website repo](https://github.com/nf-core/website/actions/workflows/rich-codex.yml) to regenerate docs screengrabs (actions `workflow_dispatch` button) diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml index 8760901db1..3ef0760513 100644 --- a/.github/actions/create-lint-wf/action.yml +++ b/.github/actions/create-lint-wf/action.yml @@ -53,12 +53,24 @@ runs: run: find nf-core-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-lint-wf + # Uncomment includeConfig statement + - name: uncomment include config + shell: bash + run: find nf-core-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; + working-directory: create-lint-wf + # Replace zenodo.XXXXXX to pass readme linting - name: replace zenodo.XXXXXX shell: bash run: find nf-core-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; working-directory: create-lint-wf + # Add empty ro-crate file + - name: add empty ro-crate file + shell: bash + run: touch nf-core-testpipeline/ro-crate-metadata.json + working-directory: create-lint-wf + # Run nf-core pipelines linting - name: nf-core pipelines lint shell: bash @@ -77,7 +89,7 @@ runs: - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 with: name: nf-core-log-file-${{ matrix.NXF_VER }} path: create-lint-wf/log.txt diff --git a/.github/workflows/branch.yml b/.github/workflows/branch.yml index bbac1cc6ff..9775c70e10 100644 --- a/.github/workflows/branch.yml +++ b/.github/workflows/branch.yml @@ -1,15 +1,15 @@ name: nf-core branch protection -# This workflow is triggered on PRs to master branch on the repository -# It fails when someone tries to make a PR against the nf-core `master` branch instead of `dev` +# This workflow is triggered on PRs to main branch on the repository +# It fails when someone tries to make a PR against the nf-core `main` branch instead of `dev` on: pull_request_target: - branches: [master] + branches: [main] jobs: test: runs-on: ubuntu-latest steps: - # PRs to the nf-core repo master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches + # PRs to the nf-core repo main branch are only ok if coming from the nf-core repo `dev` or any `patch` branches - name: Check PRs if: github.repository == 'nf-core/tools' run: | @@ -21,7 +21,7 @@ jobs: uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 with: message: | - ## This PR is against the `master` branch :x: + ## This PR is against the `main` branch :x: * Do not close this PR * Click _Edit_ and change the `base` to `dev` @@ -31,9 +31,9 @@ jobs: Hi @${{ github.event.pull_request.user.login }}, - It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `master` branch. - The `master` branch on nf-core repositories should always contain code from the latest release. - Because of this, PRs to `master` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. + It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `main` branch. + The `main` branch on nf-core repositories should always contain code from the latest release. + Because of this, PRs to `main` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. You do not need to close this PR, you can change the target branch to `dev` by clicking the _"Edit"_ button at the top of this page. Note that even after this, the test will continue to show as failing until you push a new commit. diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml index cebcc854bc..d4fe34c25a 100644 --- a/.github/workflows/changelog.yml +++ b/.github/workflows/changelog.yml @@ -19,7 +19,7 @@ jobs: ) steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: token: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} @@ -36,7 +36,7 @@ jobs: fi gh pr checkout $PR_NUMBER - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" @@ -65,7 +65,7 @@ jobs: echo "File changed: ${{ env.changed }}" - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" cache: "pip" diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index f07b31e9de..37ab71bc3b 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -38,7 +38,7 @@ jobs: strategy: matrix: NXF_VER: - - "23.10.0" + - "24.04.2" - "latest-everything" steps: - name: go to subdirectory and change nextflow workdir @@ -48,12 +48,12 @@ jobs: export NXF_WORK=$(pwd) # Get the repo code - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out source-code repository # Set up nf-core/tools - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" cache: pip @@ -78,6 +78,11 @@ jobs: run: find nf-core-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-lint-wf + # Uncomment includeConfig statement + - name: uncomment include config + run: find nf-core-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; + working-directory: create-lint-wf + # Run the other nf-core commands - name: nf-core pipelines list run: nf-core --log-file log.txt pipelines list diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 1fb521b4bb..fffa9ffe7a 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -40,7 +40,7 @@ jobs: - name: 🏗 Set up yq uses: frenck/action-setup-yq@v1 - name: checkout - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Create Matrix id: create_matrix run: | @@ -77,11 +77,11 @@ jobs: cd create-lint-wf-template export NXF_WORK=$(pwd) - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out source-code repository - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" @@ -137,16 +137,32 @@ jobs: run: find my-prefix-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-test-lint-wf + # Uncomment includeConfig statement + - name: uncomment include config + run: find my-prefix-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; + working-directory: create-test-lint-wf + # Replace zenodo.XXXXXX to pass readme linting - name: replace zenodo.XXXXXX run: find my-prefix-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; working-directory: create-test-lint-wf + # Add empty ro-crate file + - name: add empty ro-crate file + run: touch my-prefix-testpipeline/ro-crate-metadata.json + working-directory: create-test-lint-wf + # Run nf-core linting - name: nf-core pipelines lint run: nf-core --log-file log.txt --hide-progress pipelines lint --dir my-prefix-testpipeline --fail-warned working-directory: create-test-lint-wf + # Run code style linting + - name: run pre-commit + shell: bash + run: pre-commit run --all-files + working-directory: create-test-lint-wf + # Run bump-version - name: nf-core pipelines bump-version run: nf-core --log-file log.txt pipelines bump-version --dir my-prefix-testpipeline/ 1.1 @@ -163,7 +179,7 @@ jobs: - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 with: name: nf-core-log-file-${{ matrix.TEMPLATE }} path: create-test-lint-wf/artifact_files.tar diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 56c6c822a9..93581b9153 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -39,7 +39,7 @@ jobs: strategy: matrix: NXF_VER: - - "23.10.0" + - "24.04.2" - "latest-everything" steps: - name: go to working directory @@ -48,11 +48,11 @@ jobs: cd create-test-wf export NXF_WORK=$(pwd) - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out source-code repository - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" @@ -71,15 +71,11 @@ jobs: mkdir create-test-wf && cd create-test-wf export NXF_WORK=$(pwd) nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" - # echo current directory - pwd - # echo content of current directory - ls -la - nextflow run nf-core-testpipeline -profile test,self_hosted_runner --outdir ./results + nextflow run nf-core-testpipeline -profile self_hosted_runner,test --outdir ./results - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 with: name: nf-core-log-file-${{ matrix.NXF_VER }} path: create-test-wf/log.txt diff --git a/.github/workflows/deploy-pypi.yml b/.github/workflows/deploy-pypi.yml index 1202891e4d..d2b631f15f 100644 --- a/.github/workflows/deploy-pypi.yml +++ b/.github/workflows/deploy-pypi.yml @@ -13,11 +13,11 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out source-code repository - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml index 4334871c4c..6d749b0d98 100644 --- a/.github/workflows/fix-linting.yml +++ b/.github/workflows/fix-linting.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: # Use the @nf-core-bot token to check out so we can push later - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: token: ${{ secrets.nf_core_bot_auth_token }} @@ -32,7 +32,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} # Install and run pre-commit - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml index 3bddd42d49..7ffd6e9df9 100644 --- a/.github/workflows/lint-code.yml +++ b/.github/workflows/lint-code.yml @@ -18,10 +18,10 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" cache: "pip" diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml index c613e13a2d..6409335ac7 100644 --- a/.github/workflows/push_dockerhub_dev.yml +++ b/.github/workflows/push_dockerhub_dev.yml @@ -23,7 +23,7 @@ jobs: fail-fast: false steps: - name: Check out code - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Build nfcore/tools:dev docker image run: docker build --no-cache . -t nfcore/tools:dev diff --git a/.github/workflows/push_dockerhub_release.yml b/.github/workflows/push_dockerhub_release.yml index 5a076f6d3b..d29b03b687 100644 --- a/.github/workflows/push_dockerhub_release.yml +++ b/.github/workflows/push_dockerhub_release.yml @@ -23,7 +23,7 @@ jobs: fail-fast: false steps: - name: Check out code - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Build nfcore/tools:latest docker image run: docker build --no-cache . -t nfcore/tools:latest diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index dc88031886..76d5d710c0 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -49,7 +49,7 @@ jobs: steps: - name: Check conditions id: conditions - run: echo "run-tests=${{ github.ref == 'refs/heads/master' || (matrix.runner == 'ubuntu-20.04' && matrix.python-version == '3.8') }}" >> "$GITHUB_OUTPUT" + run: echo "run-tests=${{ github.ref == 'refs/heads/main' || (matrix.runner == 'ubuntu-20.04' && matrix.python-version == '3.8') }}" >> "$GITHUB_OUTPUT" outputs: python-version: ${{ matrix.python-version }} @@ -61,7 +61,7 @@ jobs: name: Get test file matrix runs-on: "ubuntu-latest" steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out source-code repository - name: List tests @@ -87,11 +87,11 @@ jobs: cd pytest export NXF_WORK=$(pwd) - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out source-code repository - name: Set up Python ${{ needs.setup.outputs.python-version }} - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: ${{ needs.setup.outputs.python-version }} cache: "pip" @@ -132,7 +132,7 @@ jobs: - name: Test with pytest run: | - python3 -m pytest tests/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$? + python3 -m pytest tests/${{matrix.test}} --color=yes --cov --cov-config=.coveragerc --durations=0 && exit_code=0|| exit_code=$? # don't fail if no tests were collected, e.g. for test_licence.py if [ "${exit_code}" -eq 5 ]; then echo "No tests were collected" @@ -142,21 +142,23 @@ jobs: exit 1 fi - - name: Store snapshot report - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 - if: always() - with: - name: Snapshot Report ${{ matrix.test }} - path: ./snapshot_report.html - - name: remove slashes from test name run: | test=$(echo ${{ matrix.test }} | sed 's/\//__/g') echo "test=${test}" >> $GITHUB_ENV + - name: Store snapshot report + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 + if: always() + with: + include-hidden-files: true + name: Snapshot Report ${{ env.test }} + path: ./snapshot_report.html + - name: Upload coverage - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 with: + include-hidden-files: true name: coverage_${{ env.test }} path: .coverage @@ -170,9 +172,9 @@ jobs: mkdir -p pytest cd pytest - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 env: AGENT_TOOLSDIRECTORY: /opt/actions-runner/_work/tools/tools/ with: @@ -189,14 +191,14 @@ jobs: mv .github/.coveragerc . - name: Download all artifacts - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4 - name: Run coverage run: | coverage combine --keep coverage*/.coverage* coverage report coverage xml - - uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be # v4 + - uses: codecov/codecov-action@5c47607acb93fed5485fdbf7232e8a31425f672a # v5 with: files: coverage.xml env: diff --git a/.github/workflows/rich-codex.yml b/.github/workflows/rich-codex.yml deleted file mode 100644 index cd12b139d3..0000000000 --- a/.github/workflows/rich-codex.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Generate images for docs -on: - workflow_dispatch: -jobs: - rich_codex: - runs-on: ubuntu-latest - steps: - - name: Check out the repo - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 - - name: Set up Python - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 - with: - python-version: 3.x - cache: pip - cache-dependency-path: setup.py - - - name: Install Nextflow - uses: nf-core/setup-nextflow@v2 - - - name: Install nf-test - uses: nf-core/setup-nf-test@v1 - - - name: Install nf-core/tools - run: pip install git+https://github.com/nf-core/tools.git@dev - - - name: Generate terminal images with rich-codex - uses: ewels/rich-codex@8ce988cc253c240a3027ba58e33e47640935dd8b # v1 - env: - COLUMNS: 100 - HIDE_PROGRESS: "true" - with: - commit_changes: "true" - clean_img_paths: docs/images/*.svg - terminal_width: 100 - before_command: > - which nextflow && - which nf-core && - nextflow -version && - nf-core --version diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index 55880e8130..625f00d247 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -1,7 +1,7 @@ name: Sync template on: - release: - types: [published] + # release: + # types: [published] workflow_dispatch: inputs: testpipeline: @@ -19,6 +19,10 @@ on: description: "Force a PR to be created" type: boolean default: false + pipeline: + description: "Pipeline to sync" + type: string + default: "all" # Cancel if a newer run is started concurrency: @@ -35,6 +39,14 @@ jobs: run: | if [ "${{ github.event.inputs.testpipeline }}" == "true" ]; then echo '{"pipeline":["testpipeline"]}' > pipeline_names.json + elif [ "${{ github.event.inputs.pipeline }}" != "all" ] && [ "${{ github.event.inputs.pipeline }}" != "" ]; then + curl -O https://nf-co.re/pipeline_names.json + # check if the pipeline exists + if ! grep -q "\"${{ github.event.inputs.pipeline }}\"" pipeline_names.json; then + echo "Pipeline ${{ github.event.inputs.pipeline }} does not exist" + exit 1 + fi + echo '{"pipeline":["${{ github.event.inputs.pipeline }}"]}' > pipeline_names.json else curl -O https://nf-co.re/pipeline_names.json fi @@ -48,10 +60,10 @@ jobs: matrix: ${{fromJson(needs.get-pipelines.outputs.matrix)}} fail-fast: false steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out nf-core/tools - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out nf-core/${{ matrix.pipeline }} with: repository: nf-core/${{ matrix.pipeline }} @@ -61,7 +73,7 @@ jobs: fetch-depth: "0" - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" @@ -82,7 +94,7 @@ jobs: run: | git config --global user.email "core@nf-co.re" git config --global user.name "nf-core-bot" - nf-core --log-file sync_log_${{ matrix.pipeline }}.txt sync -d nf-core/${{ matrix.pipeline }} \ + nf-core --log-file sync_log_${{ matrix.pipeline }}.txt pipelines sync -d nf-core/${{ matrix.pipeline }} \ --from-branch dev \ --pull-request \ --username nf-core-bot \ @@ -90,7 +102,7 @@ jobs: - name: Upload sync log file artifact if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 with: name: sync_log_${{ matrix.pipeline }} path: sync_log_${{ matrix.pipeline }}.txt diff --git a/.github/workflows/update-textual-snapshots.yml b/.github/workflows/update-textual-snapshots.yml index fb936762f8..9706ef55c0 100644 --- a/.github/workflows/update-textual-snapshots.yml +++ b/.github/workflows/update-textual-snapshots.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: # Use the @nf-core-bot token to check out so we can push later - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: token: ${{ secrets.nf_core_bot_auth_token }} @@ -33,7 +33,7 @@ jobs: # Install dependencies and run pytest - name: Set up Python - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" cache: "pip" @@ -46,7 +46,7 @@ jobs: - name: Run pytest to update snapshots id: pytest run: | - python3 -m pytest tests/test_create_app.py --snapshot-update --color=yes --durations=0 + python3 -m pytest tests/pipelines/test_create_app.py --snapshot-update --color=yes --durations=0 continue-on-error: true # indication that the run has finished diff --git a/.github/workflows/update_components_template.yml b/.github/workflows/update_components_template.yml index e2ecebfcb4..575065d383 100644 --- a/.github/workflows/update_components_template.yml +++ b/.github/workflows/update_components_template.yml @@ -11,10 +11,10 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Set up Python - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.x" diff --git a/.gitignore b/.gitignore index a3721da86e..7fe467abc9 100644 --- a/.gitignore +++ b/.gitignore @@ -113,7 +113,6 @@ ENV/ # Jetbrains IDEs .idea pip-wheel-metadata -.vscode .*.sw? # Textual diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4f08d8419d..1494f58182 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.0 + rev: v0.8.1 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix @@ -13,13 +13,13 @@ repos: - prettier@3.3.3 - repo: https://github.com/editorconfig-checker/editorconfig-checker.python - rev: "2.7.3" + rev: "3.0.3" hooks: - id: editorconfig-checker alias: ec - repo: https://github.com/pre-commit/mirrors-mypy - rev: "v1.11.1" + rev: "v1.13.0" hooks: - id: mypy additional_dependencies: diff --git a/CHANGELOG.md b/CHANGELOG.md index 1bcc937852..da5f72c357 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,18 +1,103 @@ # nf-core/tools: Changelog -## v3.0.0dev +## v3.0.3dev + +### Template + +- Keep pipeline name in version.yml file ([#3223](https://github.com/nf-core/tools/pull/3223)) +- Fix Manifest DOI text ([#3224](https://github.com/nf-core/tools/pull/3224)) +- Do not assume pipeline name is url ([#3225](https://github.com/nf-core/tools/pull/3225)) +- fix workflow_dispatch trigger and parse more review comments in awsfulltest ([#3235](https://github.com/nf-core/tools/pull/3235)) +- Add resource limits to Gitpod profile([#3255](https://github.com/nf-core/tools/pull/3255)) +- Fix a typo ([#3268](https://github.com/nf-core/tools/pull/3268)) +- Remove `def` from `nextflow.config` and add `trace_report_suffix` param ([#3296](https://github.com/nf-core/tools/pull/3296)) +- Move `includeConfig 'conf/modules.config'` next to `includeConfig 'conf/base.config'` to not overwrite tests profiles configurations ([#3301](https://github.com/nf-core/tools/pull/3301)) +- Use `params.monochrome_logs` in the template and update nf-core components ([#3310](https://github.com/nf-core/tools/pull/3310)) +- Fix some typos and improve writing in `usage.md` and `CONTRIBUTING.md` ([#3302](https://github.com/nf-core/tools/pull/3302)) +- Add `manifest.contributors` to `nextflow.config` ([#3311](https://github.com/nf-core/tools/pull/3311)) + +### Download + +- First steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Modify `prioritize_direct_download()` to retain Seqera Singularity https:// Container URIs and hardcode Seqera Containers into `gather_registries()` ([#3244](https://github.com/nf-core/tools/pull/3244)). +- Further steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Enable limited support for `oras://` container paths (_only absolute URIs, no flexible registries like with Docker_) and prevent unnecessary image downloads for Seqera Container modules with `reconcile_seqera_container_uris()` ([#3293](https://github.com/nf-core/tools/pull/3293)). +- Update dawidd6/action-download-artifact action to v7 ([#3306](https://github.com/nf-core/tools/pull/3306)) + +### Linting + +- allow mixed `str` and `dict` entries in lint config ([#3228](https://github.com/nf-core/tools/pull/3228)) + +### Modules + +- add a panel around diff previews when updating ([#3246](https://github.com/nf-core/tools/pull/3246)) + +### Subworkflows + +- Add `nf-core subworkflows patch` command ([#2861](https://github.com/nf-core/tools/pull/2861)) + +### General + +- Include .nf-core.yml in `nf-core pipelines bump-version` ([#3220](https://github.com/nf-core/tools/pull/3220)) +- create: add shortcut to toggle all switches ([#3226](https://github.com/nf-core/tools/pull/3226)) +- Remove unrelated values when saving `.nf-core` file ([#3227](https://github.com/nf-core/tools/pull/3227)) +- chore(deps): update pre-commit hook pre-commit/mirrors-mypy to v1.12.0 ([#3230](https://github.com/nf-core/tools/pull/3230)) +- chore(deps): update pre-commit hook astral-sh/ruff-pre-commit to v0.7.0 ([#3229](https://github.com/nf-core/tools/pull/3229)) +- Update python:3.12-slim Docker digest to 032c526 ([#3232](https://github.com/nf-core/tools/pull/3232)) +- use correct `--profile` options for `nf-core subworkflows test` ([#3233](https://github.com/nf-core/tools/pull/3233)) +- Update GitHub Actions ([#3237](https://github.com/nf-core/tools/pull/3237)) +- add `--dir/-d` option to schema commands ([#3247](https://github.com/nf-core/tools/pull/3247)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.1 ([#3250](https://github.com/nf-core/tools/pull/3250)) +- handle new schema structure in `nf-core pipelines create-params-file` ([#3276](https://github.com/nf-core/tools/pull/3276)) +- Update Gitpod image to use Miniforge instead of Miniconda([#3274](https://github.com/nf-core/tools/pull/3274)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.3 ([#3275](https://github.com/nf-core/tools/pull/3275)) +- Add hint to solve git errors with a synced repo ([#3279](https://github.com/nf-core/tools/pull/3279)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.4 ([#3282](https://github.com/nf-core/tools/pull/3282)) +- Update codecov/codecov-action action to v5 ([#3283](https://github.com/nf-core/tools/pull/3283)) +- Update python:3.12-slim Docker digest to 2a6386a ([#3284](https://github.com/nf-core/tools/pull/3284)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.0 ([#3299](https://github.com/nf-core/tools/pull/3299)) +- Update gitpod/workspace-base Docker digest to 12853f7 ([#3309](https://github.com/nf-core/tools/pull/3309)) +- Run pre-commit when testing linting the template pipeline ([#3280](https://github.com/nf-core/tools/pull/3280)) + +## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] + +### Template + +- Add null/ to .gitignore ([#3191](https://github.com/nf-core/tools/pull/3191)) +- Parallelize pipeline GHA tests over docker/conda/singularity ([#3214](https://github.com/nf-core/tools/pull/3214)) +- Fix `template_version_comment.yml` github action ([#3212](https://github.com/nf-core/tools/pull/3212)) +- Fix pre-commit linting on pipeline template ([#3218](https://github.com/nf-core/tools/pull/3218)) + +### Linting + +- Fix bug when linting schema params and when using `defaultIgnoreParams` ([#3213](https://github.com/nf-core/tools/pull/3213)) + +### General + +- Use updated pipeline commands in docstrings ([#3215](https://github.com/nf-core/tools/pull/3215)) +- Disable automatic sync on release, fix handling empty pipeline input ([#3217](https://github.com/nf-core/tools/pull/3217)) + +## [v3.0.1 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.1) - [2024-10-09] + +### Template + +- Fixed an issue where the linting CI action didn't read the correct file ([#3202](https://github.com/nf-core/tools/pull/3202)) +- Fixed condition for `awsfulltest` to run ([#3203](https://github.com/nf-core/tools/pull/3203)) +- Fix too many empty lines added by jinja ([#3204](https://github.com/nf-core/tools/pull/3204) and [#3206](https://github.com/nf-core/tools/pull/3206)) +- Fix header blocks in local subworkflow including git merge marker-like strings ([#3201](https://github.com/nf-core/tools/pull/3201)) +- Update included subworkflows and modules ([#3208](https://github.com/nf-core/tools/pull/3208)) + +## [v3.0.0 - Titanium Tapir](https://github.com/nf-core/tools/releases/tag/3.0.0) - [2024-10-08] **Highlights** -- Pipeline commands are renamed from `nf-core ` to `nf-core pipelines ` to follow the same command structure as modules and subworkflows commands. +- Pipeline commands are renamed from `nf-core ` to `nf-core pipelines ` to follow the same command structure as modules and subworkflows commands. - More customisation for pipeline templates. The template has been divided into features which can be skipped, e.g. you can create a new pipeline without any traces of FastQC in it. - A new Text User Interface app when running `nf-core pipelines create` to help us guide you through the process better (no worries, you can still use the cli if you give all values as parameters) - We replaced nf-validation with nf-schema in the pipeline template - CI tests now lint with the nf-core tools version matching the template version of the pipeline, to minimise errors in opened PRs with every new tools release. -- New command `nf-core pipelines ro-crate` to create a [Research Object (RO) crate](https://www.researchobject.org/ro-crate/) for a pipeline - `nf-core licences` command is deprecated. -- The structure of nf-core/tools pytests has been updated -- The structure of the API docs has been updated +- Changed default branch to `main`. +- The structure of nf-core/tools pytests has been updated. +- The structure of the API docs has been updated. ### Template @@ -23,21 +108,27 @@ - Remove release announcement for non nf-core pipelines ([#3072](https://github.com/nf-core/tools/pull/3072)) - handle template features with a yaml file ([#3108](https://github.com/nf-core/tools/pull/3108), [#3112](https://github.com/nf-core/tools/pull/3112)) - add option to exclude code linters for custom pipeline template ([#3084](https://github.com/nf-core/tools/pull/3084)) -- add option to exclude citations for custom pipeline template ([#3101](https://github.com/nf-core/tools/pull/3101)) +- add option to exclude citations for custom pipeline template ([#3101](https://github.com/nf-core/tools/pull/3101) and [#3169](https://github.com/nf-core/tools/pull/3169)) - add option to exclude gitpod for custom pipeline template ([#3100](https://github.com/nf-core/tools/pull/3100)) - add option to exclude codespaces from pipeline template ([#3105](https://github.com/nf-core/tools/pull/3105)) - add option to exclude multiqc from pipeline template ([#3103](https://github.com/nf-core/tools/pull/3103)) - add option to exclude changelog from custom pipeline template ([#3104](https://github.com/nf-core/tools/pull/3104)) - add option to exclude license from pipeline template ([#3125](https://github.com/nf-core/tools/pull/3125)) - add option to exclude email from pipeline template ([#3126](https://github.com/nf-core/tools/pull/3126)) -- Use nf-schema instead of nf-validation ([#3116](https://github.com/nf-core/tools/pull/3116)) - add option to exclude nf-schema from the template ([#3116](https://github.com/nf-core/tools/pull/3116)) - add option to exclude fastqc from pipeline template ([#3129](https://github.com/nf-core/tools/pull/3129)) - add option to exclude documentation from pipeline template ([#3130](https://github.com/nf-core/tools/pull/3130)) - add option to exclude test configs from pipeline template ([#3133](https://github.com/nf-core/tools/pull/3133)) - add option to exclude tower.yml from pipeline template ([#3134](https://github.com/nf-core/tools/pull/3134)) +- Use nf-schema instead of nf-validation ([#3116](https://github.com/nf-core/tools/pull/3116)) +- test pipeline with conda and singularity on PRs to master ([#3149](https://github.com/nf-core/tools/pull/3149)) - run nf-core lint `--release` on PRs to master ([#3148](https://github.com/nf-core/tools/pull/3148)) - Add tests to ensure all files are part of a template customisation group and all groups are tested ([#3099](https://github.com/nf-core/tools/pull/3099)) +- Update the syntax of `utils_nfcore_pipeline_pipeline` local subworkflow ([#3166](https://github.com/nf-core/tools/pull/3166)) +- Remove if/else block to include `igenomes.config` ([#3168](https://github.com/nf-core/tools/pull/3168)) +- Fixed release announcement hashtags for Mastodon ([#3099](https://github.com/nf-core/tools/pull/3176)) +- Remove try/catch blocks from `nextflow.config` ([#3167](https://github.com/nf-core/tools/pull/3167)) +- Extend `download_pipeline.yml` to count pre-downloaded container images. ([#3182](https://github.com/nf-core/tools/pull/3182)) ### Linting @@ -55,22 +146,35 @@ ### Pipeline create command -- Create: allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) -- Create: Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) -- Create app: display input textbox with equally spaced grid ([#3038](https://github.com/nf-core/tools/pull/3038)) -- Pipelines: allow numbers in custom pipeline name ([#3094](https://github.com/nf-core/tools/pull/3094)) +- Allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) +- Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) +- Display input textbox with equally spaced grid ([#3038](https://github.com/nf-core/tools/pull/3038)) +- Allow numbers in custom pipeline name ([#3094](https://github.com/nf-core/tools/pull/3094)) ### Components - The `modules_nfcore` tag in the `main.nf.test` file of modules/subworkflows now displays the organization name in custom modules repositories ([#3005](https://github.com/nf-core/tools/pull/3005)) - Add `--migrate_pytest` option to `nf-core test` command ([#3085](https://github.com/nf-core/tools/pull/3085)) -- Components: allow spaces at the beginning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) +- Allow spaces at the beginning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) +- Add option `--fix` to update the `meta.yml` file of subworkflows ([#3077](https://github.com/nf-core/tools/pull/3077)) + +### Download + +- Fully removed already deprecated `-t` / `--tower` flag. +- Refactored the CLI for consistency (short flag is usually second word, e.g. also for `--container-library` etc.): + +| Old parameter | New parameter | +| --------------------------------- | --------------------------------- | +| `-d` / `--download-configuration` | `-c` / `--download-configuration` | +| `-p` / `--parallel-downloads` | `-d` / `--parallel-downloads` | +| new parameter | `-p` / (`--platform`) | ### General +- Change default branch to `main` for the nf-core/tools repository - Update output of generation script for API docs to new structure ([#2988](https://github.com/nf-core/tools/pull/2988)) +- Remove `rich-codex.yml` action, images are now generated on the website repo ([#2989](https://github.com/nf-core/tools/pull/2989)) - Add no clobber and put bash options on their own line ([#2991](https://github.com/nf-core/tools/pull/2991)) -- update minimal textual version and snapshots ([#2998](https://github.com/nf-core/tools/pull/2998)) - move pipeline subcommands for v3.0 ([#2983](https://github.com/nf-core/tools/pull/2983)) - return directory if base_dir is the root directory ([#3003](https://github.com/nf-core/tools/pull/3003)) - Remove nf-core licences command ([#3012](https://github.com/nf-core/tools/pull/3012)) @@ -94,6 +198,7 @@ - Update python:3.12-slim Docker digest to 59c7332 ([#3124](https://github.com/nf-core/tools/pull/3124)) - Update pre-commit hook pre-commit/mirrors-mypy to v1.11.1 ([#3091](https://github.com/nf-core/tools/pull/3091)) - Update to pytest v8 and move it to dev dependencies ([#3058](https://github.com/nf-core/tools/pull/3058)) +- Update minimal textual version and snapshots ([#2998](https://github.com/nf-core/tools/pull/2998)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] @@ -137,7 +242,7 @@ ### Download -- Replace `--tower` with `--platform`. The former will remain for backwards compatability for now but will be removed in a future release. ([#2853](https://github.com/nf-core/tools/pull/2853)) +- Replace `--tower` with `--platform`. The former will remain for backwards compatibility for now but will be removed in a future release. ([#2853](https://github.com/nf-core/tools/pull/2853)) - Better error message when GITHUB_TOKEN exists but is wrong/outdated - New `--tag` argument to add custom tags during a pipeline download ([#2938](https://github.com/nf-core/tools/pull/2938)) @@ -446,7 +551,7 @@ - Refactored the CLI parameters related to container images. Although downloading other images than those of the Singularity/Apptainer container system is not supported for the time being, a generic name for the parameters seemed preferable. So the new parameter `--singularity-cache-index` introduced in [#2247](https://github.com/nf-core/tools/pull/2247) has been renamed to `--container-cache-index` prior to release ([#2336](https://github.com/nf-core/tools/pull/2336)). - To address issue [#2311](https://github.com/nf-core/tools/issues/2311), a new parameter `--container-library` was created allowing to specify the container library (registry) from which container images in OCI format (Docker) should be pulled ([#2336](https://github.com/nf-core/tools/pull/2336)). - Container detection in configs was improved. This allows for DSL2-like container definitions inside the container parameter value provided to process scopes [#2346](https://github.com/nf-core/tools/pull/2346). -- Add apptainer to the list of false positve container strings ([#2353](https://github.com/nf-core/tools/pull/2353)). +- Add apptainer to the list of false positive container strings ([#2353](https://github.com/nf-core/tools/pull/2353)). #### Updated CLI parameters @@ -480,7 +585,7 @@ _In addition, `-r` / `--revision` has been changed to a parameter that can be pr - GitPod base image: Always self-update to the latest version of Nextflow. Add [pre-commit](https://pre-commit.com/) dependency. - GitPod configs: Update Nextflow as an init task, init pre-commit in pipeline config. - Refgenie: Create `nxf_home/nf-core/refgenie_genomes.config` path if it doesn't exist ([#2312](https://github.com/nf-core/tools/pull/2312)) -- Add CI tests to test running a pipeline whe it's created from a template skipping different areas +- Add CI tests to test running a pipeline when it's created from a template skipping different areas ## [v2.8 - Ruthenium Monkey](https://github.com/nf-core/tools/releases/tag/2.8) - [2023-04-27] @@ -516,7 +621,7 @@ _In addition, `-r` / `--revision` has been changed to a parameter that can be pr - Add an `--empty-template` option to create a module without TODO statements or examples ([#2175](https://github.com/nf-core/tools/pull/2175) & [#2177](https://github.com/nf-core/tools/pull/2177)) - Removed the `nf-core modules mulled` command and all its code dependencies ([2199](https://github.com/nf-core/tools/pull/2199)). -- Take into accout the provided `--git_remote` URL when linting all modules ([2243](https://github.com/nf-core/tools/pull/2243)). +- Take into account the provided `--git_remote` URL when linting all modules ([2243](https://github.com/nf-core/tools/pull/2243)). ### Subworkflows @@ -925,7 +1030,7 @@ Please note that there are many excellent integrations for Prettier available, f - `input:` / `output:` not being specified in module - Allow for containers from other biocontainers resource as defined [here](https://github.com/nf-core/modules/blob/cde237e7cec07798e5754b72aeca44efe89fc6db/modules/cat/fastq/main.nf#L7-L8) - Fixed traceback when using `stageAs` syntax as defined [here](https://github.com/nf-core/modules/blob/cde237e7cec07798e5754b72aeca44efe89fc6db/modules/cat/fastq/main.nf#L11) -- Added `nf-core schema docs` command to output pipline parameter documentation in Markdown format for inclusion in GitHub and other documentation systems ([#741](https://github.com/nf-core/tools/issues/741)) +- Added `nf-core schema docs` command to output pipeline parameter documentation in Markdown format for inclusion in GitHub and other documentation systems ([#741](https://github.com/nf-core/tools/issues/741)) - Allow conditional process execution from the configuration file ([#1393](https://github.com/nf-core/tools/pull/1393)) - Add linting for when condition([#1397](https://github.com/nf-core/tools/pull/1397)) - Added modules ignored table to `nf-core modules bump-versions`. ([#1234](https://github.com/nf-core/tools/issues/1234)) @@ -944,7 +1049,7 @@ Please note that there are many excellent integrations for Prettier available, f - Update repo logos to utilize [GitHub's `#gh-light/dark-mode-only`](https://docs.github.com/en/github/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#specifying-the-theme-an-image-is-shown-to), to switch between logos optimized for light or dark themes. The old repo logos have to be removed (in `docs/images` and `assets/`). - Deal with authentication with private repositories -- Bump minimun Nextflow version to 21.10.3 +- Bump minimum Nextflow version to 21.10.3 - Convert pipeline template to updated Nextflow DSL2 syntax - Solve circular import when importing `nf_core.modules.lint` - Disable cache in `nf_core.utils.fetch_wf_config` while performing `test_wf_use_local_configs`. @@ -966,15 +1071,15 @@ Please note that there are many excellent integrations for Prettier available, f - Defaults in `nextflow.config` must now match the variable _type_ specified in the schema - If you want the parameter to not have a default value, use `null` - Strings set to `false` or an empty string in `nextflow.config` will now fail linting -- Bump minimun Nextflow version to 21.10.3 -- Changed `questionary` `ask()` to `unsafe_ask()` to not catch `KeyboardInterupts` ([#1237](https://github.com/nf-core/tools/issues/1237)) +- Bump minimum Nextflow version to 21.10.3 +- Changed `questionary` `ask()` to `unsafe_ask()` to not catch `KeyboardInterrupts` ([#1237](https://github.com/nf-core/tools/issues/1237)) - Fixed bug in `nf-core launch` due to revisions specified with `-r` not being added to nextflow command. ([#1246](https://github.com/nf-core/tools/issues/1246)) - Update regex in `readme` test of `nf-core lint` to agree with the pipeline template ([#1260](https://github.com/nf-core/tools/issues/1260)) - Update 'fix' message in `nf-core lint` to conform to the current command line options. ([#1259](https://github.com/nf-core/tools/issues/1259)) - Fixed bug in `nf-core list` when `NXF_HOME` is set - Run CI test used to create and lint/run the pipeline template with minimum and latest edge release of NF ([#1304](https://github.com/nf-core/tools/issues/1304)) - New YAML issue templates for tools bug reports and feature requests, with a much richer interface ([#1165](https://github.com/nf-core/tools/pull/1165)) -- Handle synax errors in Nextflow config nicely when running `nf-core schema build` ([#1267](https://github.com/nf-core/tools/pull/1267)) +- Handle syntax errors in Nextflow config nicely when running `nf-core schema build` ([#1267](https://github.com/nf-core/tools/pull/1267)) - Erase temporary files and folders while performing Python tests (pytest) - Remove base `Dockerfile` used for DSL1 pipeline container builds - Run tests with Python 3.10 @@ -1080,7 +1185,7 @@ This marks the first Nextflow DSL2-centric release of `tools` which means that s - Updated `nf-core modules install` and `modules.json` to work with new directory structure ([#1159](https://github.com/nf-core/tools/issues/1159)) - Updated `nf-core modules remove` to work with new directory structure [[#1159](https://github.com/nf-core/tools/issues/1159)] - Restructured code and removed old table style in `nf-core modules list` -- Fixed bug causing `modules.json` creation to loop indefinitly +- Fixed bug causing `modules.json` creation to loop indefinitely - Added `--all` flag to `nf-core modules install` - Added `remote` and `local` subcommands to `nf-core modules list` - Fix bug due to restructuring in modules template @@ -1161,7 +1266,7 @@ This marks the first Nextflow DSL2-centric release of `tools` which means that s ## [v1.13.2 - Copper Crocodile CPR :crocodile: :face_with_head_bandage:](https://github.com/nf-core/tools/releases/tag/1.13.2) - [2021-03-23] - Make module template pass the EC linter [[#953](https://github.com/nf-core/tools/pull/953)] -- Added better logging message if a user doesn't specificy the directory correctly with `nf-core modules` commands [[#942](https://github.com/nf-core/tools/pull/942)] +- Added better logging message if a user doesn't specify the directory correctly with `nf-core modules` commands [[#942](https://github.com/nf-core/tools/pull/942)] - Fixed parameter validation bug caused by JSONObject [[#937](https://github.com/nf-core/tools/issues/937)] - Fixed template creation error regarding file permissions [[#932](https://github.com/nf-core/tools/issues/932)] - Split the `create-lint-wf` tests up into separate steps in GitHub Actions to make the CI results easier to read @@ -1401,7 +1506,7 @@ making a pull-request. See [`.github/CONTRIBUTING.md`](.github/CONTRIBUTING.md) ### Linting - Refactored PR branch tests to be a little clearer. -- Linting error docs explain how to add an additional branch protecton rule to the `branch.yml` GitHub Actions workflow. +- Linting error docs explain how to add an additional branch protection rule to the `branch.yml` GitHub Actions workflow. - Adapted linting docs to the new PR branch tests. - Failure for missing the readme bioconda badge is now a warn, in case this badge is not relevant - Added test for template `{{ cookiecutter.var }}` placeholders diff --git a/CITATION.cff b/CITATION.cff index 017666c018..d1246b69d7 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -24,7 +24,7 @@ version: 2.4.1 doi: 10.1038/s41587-020-0439-x date-released: 2022-05-16 url: https://github.com/nf-core/tools -prefered-citation: +preferred-citation: type: article authors: - family-names: Ewels diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f9773296c1..ce36354331 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -153,7 +153,7 @@ Optionally followed by the description that you want to add to the changelog. - Update Textual snapshots: -If the Textual snapshots (run by `tests/test_crate_app.py`) fail, an HTML report is generated and uploaded as an artifact. +If the Textual snapshots (run by `tests/pipelines/test_crate_app.py`) fail, an HTML report is generated and uploaded as an artifact. If you are sure that these changes are correct, you can automatically update the snapshots form the PR by posting a comment with the magic words: ``` diff --git a/Dockerfile b/Dockerfile index fb1a867937..dc9948ea4b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:59c7332a4a24373861c4a5f0eec2c92b87e3efeb8ddef011744ef9a751b1d11c +FROM python:3.12-slim@sha256:2a6386ad2db20e7f55073f69a98d6da2cf9f168e05e7487d2670baeb9b7601c5 LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" diff --git a/README.md b/README.md index 58fb708a0d..8a3e7d05e6 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,12 @@

- - nf-core/tools + + nf-core/tools

-[![Python tests](https://github.com/nf-core/tools/workflows/Python%20tests/badge.svg?branch=master&event=push)](https://github.com/nf-core/tools/actions?query=workflow%3A%22Python+tests%22+branch%3Amaster) -[![codecov](https://codecov.io/gh/nf-core/tools/branch/master/graph/badge.svg)](https://codecov.io/gh/nf-core/tools) +[![Python tests](https://github.com/nf-core/tools/workflows/Python%20tests/badge.svg?branch=main&event=push)](https://github.com/nf-core/tools/actions?query=workflow%3A%22Python+tests%22+branch%3Amain) +[![codecov](https://codecov.io/gh/nf-core/tools/branch/main/graph/badge.svg)](https://codecov.io/gh/nf-core/tools) [![code style: prettier](https://img.shields.io/badge/code%20style-prettier-ff69b4.svg)](https://github.com/prettier/prettier) [![code style: Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v1.json)](https://github.com/charliermarsh/ruff) @@ -21,7 +21,7 @@ For documentation of the internal Python functions, please refer to the [Tools P ## Installation -For full installation instructions, please see the [nf-core documentation](https://nf-co.re/docs/usage/tools). +For full installation instructions, please see the [nf-core documentation](https://nf-co.re/docs/nf-core-tools/installation). Below is a quick-start for those who know what they're doing: ### Bioconda diff --git a/docs/api/_src/api/index.md b/docs/api/_src/api/index.md index 035a896888..f25e166a90 100644 --- a/docs/api/_src/api/index.md +++ b/docs/api/_src/api/index.md @@ -8,4 +8,4 @@ This API documentation is for the [`nf-core/tools`](https://github.com/nf-core/t - [Module commands](./module_lint_tests/) (run by `nf-core modules lint`) - [Subworkflow commands](./subworkflow_lint_tests/) (run by `nf-core subworkflows lint`) - [nf-core/tools Python package API reference](./api/) - - [nf-core/tools pipeline commands API referece](./api/pipelines/) + - [nf-core/tools pipeline commands API reference](./api/pipelines/) diff --git a/docs/api/_src/api/utils.md b/docs/api/_src/api/utils.md new file mode 100644 index 0000000000..1353f97ef5 --- /dev/null +++ b/docs/api/_src/api/utils.md @@ -0,0 +1,9 @@ +# nf_core.utils + +```{eval-rst} +.. automodule:: nf_core.utils + :members: + :undoc-members: + :show-inheritance: + :private-members: +``` diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py index bfdbd7888d..5a45483d9c 100644 --- a/docs/api/_src/conf.py +++ b/docs/api/_src/conf.py @@ -40,7 +40,7 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ["myst_parser", "sphinx.ext.autodoc", "sphinx.ext.napoleon"] +extensions = ["myst_parser", "sphinx.ext.autodoc", "sphinx.ext.napoleon", "sphinxcontrib.autodoc_pydantic"] # Add any paths that contain templates here, relative to this directory. templates_path = ["./_templates"] @@ -51,8 +51,8 @@ # source_suffix = ['.rst', '.md'] source_suffix = ".rst" -# The master toctree document. -master_doc = "index" +# The main toctree document. +main_doc = "index" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -133,7 +133,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, "nf-core.tex", "nf-core tools API documentation", "Phil Ewels, Sven Fillinger", "manual"), + (main_doc, "nf-core.tex", "nf-core tools API documentation", "Phil Ewels, Sven Fillinger", "manual"), ] @@ -141,7 +141,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, "nf-core", "nf-core tools API documentation", [author], 1)] +man_pages = [(main_doc, "nf-core", "nf-core tools API documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- @@ -151,7 +151,7 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + main_doc, "nf-core", "nf-core tools API documentation", author, diff --git a/docs/api/_src/pipeline_lint_tests/included_configs.md b/docs/api/_src/pipeline_lint_tests/included_configs.md new file mode 100644 index 0000000000..f68f7da25e --- /dev/null +++ b/docs/api/_src/pipeline_lint_tests/included_configs.md @@ -0,0 +1,5 @@ +# included_configs + + ```{eval-rst} + .. automethod:: nf_core.pipelines.lint.PipelineLint.included_configs + ``` diff --git a/docs/api/_src/pipeline_lint_tests/index.md b/docs/api/_src/pipeline_lint_tests/index.md index 3575c08db4..4dd93442d2 100644 --- a/docs/api/_src/pipeline_lint_tests/index.md +++ b/docs/api/_src/pipeline_lint_tests/index.md @@ -7,6 +7,7 @@ - [base_config](./base_config/) - [files_exist](./files_exist/) - [files_unchanged](./files_unchanged/) + - [included_configs](./included_configs/) - [merge_markers](./merge_markers/) - [modules_config](./modules_config/) - [modules_json](./modules_json/) @@ -16,6 +17,7 @@ - [nfcore_yml](./nfcore_yml/) - [pipeline_name_conventions](./pipeline_name_conventions/) - [pipeline_todos](./pipeline_todos/) + - [plugin_includes](./plugin_includes/) - [readme](./readme/) - [schema_description](./schema_description/) - [schema_lint](./schema_lint/) diff --git a/docs/api/requirements.txt b/docs/api/requirements.txt index abffe30740..1d23f0b27d 100644 --- a/docs/api/requirements.txt +++ b/docs/api/requirements.txt @@ -1,3 +1,4 @@ +autodoc_pydantic Sphinx>=3.3.1 sphinxcontrib-napoleon sphinx-markdown-builder diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 0efea13ec9..9f16188e95 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -4,6 +4,7 @@ import logging import os import sys +from pathlib import Path import rich import rich.console @@ -35,6 +36,7 @@ pipelines_launch, pipelines_lint, pipelines_list, + pipelines_rocrate, pipelines_schema_build, pipelines_schema_docs, pipelines_schema_lint, @@ -85,7 +87,7 @@ }, { "name": "For developers", - "commands": ["create", "lint", "bump-version", "sync", "schema", "create-logo"], + "commands": ["create", "lint", "bump-version", "sync", "schema", "rocrate", "create-logo"], }, ], "nf-core modules": [ @@ -366,26 +368,18 @@ def command_pipelines_lint( help="Archive compression type", ) @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") -# TODO: Remove this in a future release. Deprecated in March 2024. -@click.option( - "-t", - "--tower", - is_flag=True, - default=False, - hidden=True, - help="Download for Seqera Platform. DEPRECATED: Please use `--platform` instead.", -) @click.option( + "-p", "--platform", is_flag=True, default=False, help="Download for Seqera Platform (formerly Nextflow Tower)", ) @click.option( - "-d", + "-c", "--download-configuration", - is_flag=True, - default=False, + type=click.Choice(["yes", "no"]), + default="no", help="Include configuration profiles in download. Not available with `--platform`", ) @click.option( @@ -420,7 +414,7 @@ def command_pipelines_lint( help="List of images already available in a remote `singularity.cacheDir`.", ) @click.option( - "-p", + "-d", "--parallel-downloads", type=int, default=4, @@ -434,7 +428,6 @@ def command_pipelines_download( outdir, compress, force, - tower, platform, download_configuration, tag, @@ -454,7 +447,6 @@ def command_pipelines_download( outdir, compress, force, - tower, platform, download_configuration, tag, @@ -579,6 +571,44 @@ def command_pipelines_list(ctx, keywords, sort, json, show_archived): pipelines_list(ctx, keywords, sort, json, show_archived) +# nf-core pipelines rocrate +@pipelines.command("rocrate") +@click.argument( + "pipeline_dir", + type=click.Path(exists=True), + default=Path.cwd(), + required=True, + metavar="", +) +@click.option( + "-j", + "--json_path", + default=Path.cwd(), + type=str, + help="Path to save RO Crate metadata json file to", +) +@click.option("-z", "--zip_path", type=str, help="Path to save RO Crate zip file to") +@click.option( + "-pv", + "--pipeline_version", + type=str, + help="Version of pipeline to use for RO Crate", + default="", +) +@click.pass_context +def rocrate( + ctx, + pipeline_dir: str, + json_path: str, + zip_path: str, + pipeline_version: str, +): + """ + Make an Research Object Crate + """ + pipelines_rocrate(ctx, pipeline_dir, json_path, zip_path, pipeline_version) + + # nf-core pipelines sync @pipelines.command("sync") @click.pass_context @@ -706,12 +736,24 @@ def pipeline_schema(): # nf-core pipelines schema validate @pipeline_schema.command("validate") +@click.option( + "-d", + "--dir", + "directory", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) @click.argument("pipeline", required=True, metavar="") @click.argument("params", type=click.Path(exists=True), required=True, metavar="") -def command_pipelines_schema_validate(pipeline, params): +def command_pipelines_schema_validate(directory, pipeline, params): """ Validate a set of parameters against a pipeline schema. """ + if Path(directory, pipeline).exists(): + # this is a local pipeline + pipeline = Path(directory, pipeline) + pipelines_schema_validate(pipeline, params) @@ -750,23 +792,39 @@ def command_pipelines_schema_build(directory, no_prompts, web_only, url): # nf-core pipelines schema lint @pipeline_schema.command("lint") +@click.option( + "-d", + "--dir", + "directory", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) @click.argument( - "schema_path", + "schema_file", type=click.Path(exists=True), default="nextflow_schema.json", metavar="", ) -def command_pipelines_schema_lint(schema_path): +def command_pipelines_schema_lint(directory, schema_file): """ Check that a given pipeline schema is valid. """ - pipelines_schema_lint(schema_path) + pipelines_schema_lint(Path(directory, schema_file)) # nf-core pipelines schema docs @pipeline_schema.command("docs") +@click.option( + "-d", + "--dir", + "directory", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) @click.argument( - "schema_path", + "schema_file", type=click.Path(exists=True), default="nextflow_schema.json", required=False, @@ -795,11 +853,11 @@ def command_pipelines_schema_lint(schema_path): help="CSV list of columns to include in the parameter tables (parameter,description,type,default,required,hidden)", default="parameter,description,type,default,required,hidden", ) -def command_pipelines_schema_docs(schema_path, output, format, force, columns): +def command_pipelines_schema_docs(directory, schema_file, output, format, force, columns): """ Outputs parameter documentation for a pipeline schema. """ - pipelines_schema_docs(schema_path, output, format, force, columns) + pipelines_schema_docs(Path(directory, schema_file), output, format, force, columns) # nf-core modules subcommands @@ -1005,7 +1063,7 @@ def command_modules_update( default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -@click.option("-r", "--remove", is_flag=True, default=False) +@click.option("-r", "--remove", is_flag=True, default=False, help="Remove an existent patch file and regenerate it.") def command_modules_patch(ctx, tool, directory, remove): """ Create a patch file for minor changes in a module @@ -1231,11 +1289,14 @@ def command_modules_test(ctx, tool, directory, no_prompts, update, once, profile is_flag=True, help="Fix the module version if a newer version is available", ) -def command_modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version): +@click.option("--fix", is_flag=True, help="Fix all linting tests if possible.") +def command_modules_lint( + ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version, fix +): """ Lint one or more modules in a directory. """ - modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version) + modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version, fix) # nf-core modules info @@ -1378,7 +1439,7 @@ def command_subworkflows_create(ctx, subworkflow, directory, author, force, migr ) @click.option( "--profile", - type=click.Choice(["none", "singularity"]), + type=click.Choice(["docker", "singularity", "conda"]), default=None, help="Run tests with a specific profile", ) @@ -1476,11 +1537,14 @@ def command_subworkflows_list_local(ctx, keywords, json, directory): # pylint: help="Sort lint output by subworkflow or test name.", show_default=True, ) -def command_subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by): +@click.option("--fix", is_flag=True, help="Fix all linting tests if possible.") +def command_subworkflows_lint( + ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by, fix +): """ Lint one or more subworkflows in a directory. """ - subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by) + subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by, fix) # nf-core subworkflows info @@ -1542,6 +1606,43 @@ def command_subworkflows_install(ctx, subworkflow, directory, prompt, force, sha subworkflows_install(ctx, subworkflow, directory, prompt, force, sha) +# nf-core subworkflows patch +@subworkflows.command("patch") +@click.pass_context +@click.argument("tool", type=str, required=False, metavar=" or ") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) +@click.option("-r", "--remove", is_flag=True, default=False, help="Remove an existent patch file and regenerate it.") +def subworkflows_patch(ctx, tool, dir, remove): + """ + Create a patch file for minor changes in a subworkflow + + Checks if a subworkflow has been modified locally and creates a patch file + describing how the module has changed from the remote version + """ + from nf_core.subworkflows import SubworkflowPatch + + try: + subworkflow_patch = SubworkflowPatch( + dir, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + if remove: + subworkflow_patch.remove(tool) + else: + subworkflow_patch.patch(tool) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + # nf-core subworkflows remove @subworkflows.command("remove") @click.pass_context @@ -1594,7 +1695,7 @@ def command_subworkflows_remove(ctx, directory, subworkflow): "limit_output", is_flag=True, default=False, - help="Limit ouput to only the difference in main.nf", + help="Limit output to only the difference in main.nf", ) @click.option( "-a", @@ -1696,7 +1797,7 @@ def command_schema_validate(pipeline, params): @click.option( "--url", type=str, - default="https://nf-co.re/pipeline_schema_builder", + default="https://oldsite.nf-co.re/pipeline_schema_builder", help="Customise the builder URL (for development work)", ) def command_schema_build(directory, no_prompts, web_only, url): @@ -1812,13 +1913,14 @@ def command_create_logo(logo_text, directory, name, theme, width, format, force) Use `nf-core pipelines create-logo` instead. """ log.warning( - "The `[magenta]nf-core create-logo[/]` command is deprecated. Use `[magenta]nf-core pipelines screate-logo[/]` instead." + "The `[magenta]nf-core create-logo[/]` command is deprecated. Use `[magenta]nf-core pipeliness create-logo[/]` instead." ) pipelines_create_logo(logo_text, directory, name, theme, width, format, force) # nf-core sync (deprecated) @nf_core_cli.command("sync", hidden=True, deprecated=True) +@click.pass_context @click.option( "-d", "--dir", @@ -1849,14 +1951,14 @@ def command_create_logo(logo_text, directory, name, theme, width, format, force) @click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -def command_sync(directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +def command_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ Use `nf-core pipelines sync` instead. """ log.warning( "The `[magenta]nf-core sync[/]` command is deprecated. Use `[magenta]nf-core pipelines sync[/]` instead." ) - pipelines_sync(directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr) + pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr) # nf-core bump-version (deprecated) @@ -2114,8 +2216,7 @@ def command_download( outdir, compress, force, - tower, - platform, + platform or tower, download_configuration, tag, container_system, diff --git a/nf_core/commands_modules.py b/nf_core/commands_modules.py index 57c8e9777c..33b1f75160 100644 --- a/nf_core/commands_modules.py +++ b/nf_core/commands_modules.py @@ -261,7 +261,7 @@ def modules_test(ctx, tool, directory, no_prompts, update, once, profile, migrat sys.exit(1) -def modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version): +def modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version, fix): """ Lint one or more modules in a directory. @@ -278,6 +278,7 @@ def modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, p module_lint = ModuleLint( directory, fail_warned=fail_warned, + fix=fix, registry=ctx.params["registry"], remote_url=ctx.obj["modules_repo_url"], branch=ctx.obj["modules_repo_branch"], diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index 23affb1d27..3b28f4979c 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -2,6 +2,7 @@ import os import sys from pathlib import Path +from typing import Optional, Union import rich @@ -167,7 +168,6 @@ def pipelines_download( outdir, compress, force, - tower, platform, download_configuration, tag, @@ -185,16 +185,13 @@ def pipelines_download( """ from nf_core.pipelines.download import DownloadWorkflow - if tower: - log.warning("[red]The `-t` / `--tower` flag is deprecated. Please use `--platform` instead.[/]") - dl = DownloadWorkflow( pipeline, revision, outdir, compress, force, - tower or platform, # True if either specified + platform, download_configuration, tag, container_system, @@ -281,6 +278,33 @@ def pipelines_list(ctx, keywords, sort, json, show_archived): stdout.print(list_workflows(keywords, sort, json, show_archived)) +# nf-core pipelines rocrate +def pipelines_rocrate( + ctx, + pipeline_dir: Union[str, Path], + json_path: Optional[Union[str, Path]], + zip_path: Optional[Union[str, Path]], + pipeline_version: str, +) -> None: + from nf_core.pipelines.rocrate import ROCrate + + if json_path is None and zip_path is None: + log.error("Either `--json_path` or `--zip_path` must be specified.") + sys.exit(1) + else: + pipeline_dir = Path(pipeline_dir) + if json_path is not None: + json_path = Path(json_path) + if zip_path is not None: + zip_path = Path(zip_path) + try: + rocrate_obj = ROCrate(pipeline_dir, pipeline_version) + rocrate_obj.create_rocrate(json_path=json_path, zip_path=zip_path) + except (UserWarning, LookupError, FileNotFoundError) as e: + log.error(e) + sys.exit(1) + + # nf-core pipelines sync def pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ diff --git a/nf_core/commands_subworkflows.py b/nf_core/commands_subworkflows.py index a32f8d5c3e..8e90a8116b 100644 --- a/nf_core/commands_subworkflows.py +++ b/nf_core/commands_subworkflows.py @@ -104,7 +104,7 @@ def subworkflows_list_local(ctx, keywords, json, directory): # pylint: disable= sys.exit(1) -def subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by): +def subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by, fix): """ Lint one or more subworkflows in a directory. @@ -121,6 +121,7 @@ def subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warn subworkflow_lint = SubworkflowLint( directory, fail_warned=fail_warned, + fix=fix, registry=ctx.params["registry"], remote_url=ctx.obj["modules_repo_url"], branch=ctx.obj["modules_repo_branch"], diff --git a/nf_core/modules/modules_differ.py b/nf_core/components/components_differ.py similarity index 76% rename from nf_core/modules/modules_differ.py rename to nf_core/components/components_differ.py index b6d7f0d0fa..db51c1910d 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/components/components_differ.py @@ -6,7 +6,9 @@ from pathlib import Path from typing import Dict, List, Union -from rich.console import Console +from rich import box +from rich.console import Console, Group, RenderableType +from rich.panel import Panel from rich.syntax import Syntax import nf_core.utils @@ -14,10 +16,10 @@ log = logging.getLogger(__name__) -class ModulesDiffer: +class ComponentsDiffer: """ Static class that provides functionality for computing diffs between - different instances of a module + different instances of a module or subworkflow """ class DiffEnum(enum.Enum): @@ -32,15 +34,15 @@ class DiffEnum(enum.Enum): REMOVED = enum.auto() @staticmethod - def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_dir=None): + def get_component_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_dir=None): """ - Compute the diff between the current module version + Compute the diff between the current component version and the new version. Args: - from_dir (strOrPath): The folder containing the old module files - to_dir (strOrPath): The folder containing the new module files - path_in_diff (strOrPath): The directory displayed containing the module + from_dir (strOrPath): The folder containing the old component files + to_dir (strOrPath): The folder containing the new component files + path_in_diff (strOrPath): The directory displayed containing the component file in the diff. Added so that temporary dirs are not shown for_git (bool): indicates whether the diff file is to be @@ -50,7 +52,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d dsp_to_dir (str | Path): The to directory to display in the diff Returns: - dict[str, (ModulesDiffer.DiffEnum, str)]: A dictionary containing + dict[str, (ComponentsDiffer.DiffEnum, str)]: A dictionary containing the diff type and the diff string (empty if no diff) """ if for_git: @@ -70,7 +72,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d ) files = list(files) - # Loop through all the module files and compute their diffs if needed + # Loop through all the component files and compute their diffs if needed for file in files: temp_path = Path(to_dir, file) curr_path = Path(from_dir, file) @@ -82,7 +84,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d if new_lines == old_lines: # The files are identical - diffs[file] = (ModulesDiffer.DiffEnum.UNCHANGED, ()) + diffs[file] = (ComponentsDiffer.DiffEnum.UNCHANGED, ()) else: # Compute the diff diff = difflib.unified_diff( @@ -91,7 +93,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d fromfile=str(Path(dsp_from_dir, file)), tofile=str(Path(dsp_to_dir, file)), ) - diffs[file] = (ModulesDiffer.DiffEnum.CHANGED, diff) + diffs[file] = (ComponentsDiffer.DiffEnum.CHANGED, diff) elif temp_path.exists(): with open(temp_path) as fh: @@ -104,7 +106,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d fromfile=str(Path("/dev", "null")), tofile=str(Path(dsp_to_dir, file)), ) - diffs[file] = (ModulesDiffer.DiffEnum.CREATED, diff) + diffs[file] = (ComponentsDiffer.DiffEnum.CREATED, diff) elif curr_path.exists(): # The file was removed @@ -117,14 +119,14 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d fromfile=str(Path(dsp_from_dir, file)), tofile=str(Path("/dev", "null")), ) - diffs[file] = (ModulesDiffer.DiffEnum.REMOVED, diff) + diffs[file] = (ComponentsDiffer.DiffEnum.REMOVED, diff) return diffs @staticmethod def write_diff_file( diff_path, - module, + component, repo_path, from_dir, to_dir, @@ -137,20 +139,19 @@ def write_diff_file( limit_output=False, ): """ - Writes the diffs of a module to the diff file. + Writes the diffs of a component to the diff file. Args: diff_path (str | Path): The path to the file that should be appended - module (str): The module name - repo_path (str): The name of the repo where the module resides - from_dir (str | Path): The directory containing the old module files - to_dir (str | Path): The directory containing the new module files - diffs (dict[str, (ModulesDiffer.DiffEnum, str)]): A dictionary containing + component (str): The component name + repo_path (str): The name of the repo where the component resides + from_dir (str | Path): The directory containing the old component files + to_dir (str | Path): The directory containing the new component files + diffs (dict[str, (ComponentsDiffer.DiffEnum, str)]): A dictionary containing the type of change and the diff (if any) - module_dir (str | Path): The path to the current installation of the module - current_version (str): The installed version of the module - new_version (str): The version of the module the diff is computed against + current_version (str): The installed version of the component + new_version (str): The version of the component the diff is computed against for_git (bool): indicates whether the diff file is to be compatible with `git apply`. If true it adds a/ and b/ prefixes to the file paths @@ -163,36 +164,36 @@ def write_diff_file( if dsp_to_dir is None: dsp_to_dir = to_dir - diffs = ModulesDiffer.get_module_diffs(from_dir, to_dir, for_git, dsp_from_dir, dsp_to_dir) - if all(diff_status == ModulesDiffer.DiffEnum.UNCHANGED for _, (diff_status, _) in diffs.items()): - raise UserWarning("Module is unchanged") - log.debug(f"Writing diff of '{module}' to '{diff_path}'") + diffs = ComponentsDiffer.get_component_diffs(from_dir, to_dir, for_git, dsp_from_dir, dsp_to_dir) + if all(diff_status == ComponentsDiffer.DiffEnum.UNCHANGED for _, (diff_status, _) in diffs.items()): + raise UserWarning("Component is unchanged") + log.debug(f"Writing diff of '{component}' to '{diff_path}'") with open(diff_path, file_action) as fh: if current_version is not None and new_version is not None: fh.write( - f"Changes in module '{Path(repo_path, module)}' between" + f"Changes in component '{Path(repo_path, component)}' between" f" ({current_version}) and" f" ({new_version})\n" ) else: - fh.write(f"Changes in module '{Path(repo_path, module)}'\n") + fh.write(f"Changes in component '{Path(repo_path, component)}'\n") for file, (diff_status, diff) in diffs.items(): - if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: + if diff_status == ComponentsDiffer.DiffEnum.UNCHANGED: # The files are identical fh.write(f"'{Path(dsp_from_dir, file)}' is unchanged\n") - elif diff_status == ModulesDiffer.DiffEnum.CREATED: + elif diff_status == ComponentsDiffer.DiffEnum.CREATED: # The file was created between the commits fh.write(f"'{Path(dsp_from_dir, file)}' was created\n") - elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: # The file was removed between the commits fh.write(f"'{Path(dsp_from_dir, file)}' was removed\n") elif limit_output and not file.suffix == ".nf": # Skip printing the diff for files other than main.nf - fh.write(f"Changes in '{Path(module, file)}' but not shown\n") + fh.write(f"Changes in '{Path(component, file)}' but not shown\n") else: # The file has changed write the diff lines to the file - fh.write(f"Changes in '{Path(module, file)}':\n") + fh.write(f"Changes in '{Path(component, file)}':\n") for line in diff: fh.write(line) fh.write("\n") @@ -235,7 +236,7 @@ def append_modules_json_diff(diff_path, old_modules_json, new_modules_json, modu @staticmethod def print_diff( - module, + component, repo_path, from_dir, to_dir, @@ -246,16 +247,15 @@ def print_diff( limit_output=False, ): """ - Prints the diffs between two module versions to the terminal + Prints the diffs between two component versions to the terminal Args: - module (str): The module name - repo_path (str): The name of the repo where the module resides - from_dir (str | Path): The directory containing the old module files - to_dir (str | Path): The directory containing the new module files - module_dir (str): The path to the current installation of the module - current_version (str): The installed version of the module - new_version (str): The version of the module the diff is computed against + component (str): The component name + repo_path (str): The name of the repo where the component resides + from_dir (str | Path): The directory containing the old component files + to_dir (str | Path): The directory containing the new component files + current_version (str): The installed version of the component + new_version (str): The version of the component the diff is computed against dsp_from_dir (str | Path): The 'from' directory displayed in the diff dsp_to_dir (str | Path): The 'to' directory displayed in the diff limit_output (bool): If true, don't print the diff for files other than main.nf @@ -265,35 +265,49 @@ def print_diff( if dsp_to_dir is None: dsp_to_dir = to_dir - diffs = ModulesDiffer.get_module_diffs( + diffs = ComponentsDiffer.get_component_diffs( from_dir, to_dir, for_git=False, dsp_from_dir=dsp_from_dir, dsp_to_dir=dsp_to_dir ) console = Console(force_terminal=nf_core.utils.rich_force_colors()) if current_version is not None and new_version is not None: log.info( - f"Changes in module '{Path(repo_path, module)}' between" f" ({current_version}) and" f" ({new_version})" + f"Changes in component '{Path(repo_path, component)}' between" + f" ({current_version}) and" + f" ({new_version})" ) else: - log.info(f"Changes in module '{Path(repo_path, module)}'") + log.info(f"Changes in component '{Path(repo_path, component)}'") + panel_group: list[RenderableType] = [] for file, (diff_status, diff) in diffs.items(): - if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: + if diff_status == ComponentsDiffer.DiffEnum.UNCHANGED: # The files are identical log.info(f"'{Path(dsp_from_dir, file)}' is unchanged") - elif diff_status == ModulesDiffer.DiffEnum.CREATED: + elif diff_status == ComponentsDiffer.DiffEnum.CREATED: # The file was created between the commits log.info(f"'{Path(dsp_from_dir, file)}' was created") - elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: # The file was removed between the commits log.info(f"'{Path(dsp_from_dir, file)}' was removed") elif limit_output and not file.suffix == ".nf": # Skip printing the diff for files other than main.nf - log.info(f"Changes in '{Path(module, file)}' but not shown") + log.info(f"Changes in '{Path(component, file)}' but not shown") else: # The file has changed - log.info(f"Changes in '{Path(module, file)}':") + log.info(f"Changes in '{Path(component, file)}':") # Pretty print the diff using the pygments diff lexer - console.print(Syntax("".join(diff), "diff", theme="ansi_dark", padding=1)) + syntax = Syntax("".join(diff), "diff", theme="ansi_dark", line_numbers=True) + panel_group.append(Panel(syntax, title=str(file), title_align="left", padding=0)) + console.print( + Panel( + Group(*panel_group), + title=f"[white]{str(component)}[/white]", + title_align="left", + padding=0, + border_style="blue", + box=box.HEAVY, + ) + ) @staticmethod def per_file_patch(patch_fn: Union[str, Path]) -> Dict[str, List[str]]: @@ -391,8 +405,8 @@ def get_new_and_old_lines(patch): def try_apply_single_patch(file_lines, patch, reverse=False): """ Tries to apply a patch to a modified file. Since the line numbers in - the patch does not agree if the file is modified, the old and new - lines inpatch are reconstructed and then we look for the old lines + the patch do not agree if the file is modified, the old and new + lines in the patch are reconstructed and then we look for the old lines in the modified file. If all hunk in the patch are found in the new file it is updated with the new lines from the patch file. @@ -408,7 +422,7 @@ def try_apply_single_patch(file_lines, patch, reverse=False): LookupError: If it fails to find the old lines from the patch in the file. """ - org_lines, patch_lines = ModulesDiffer.get_new_and_old_lines(patch) + org_lines, patch_lines = ComponentsDiffer.get_new_and_old_lines(patch) if reverse: patch_lines, org_lines = org_lines, patch_lines @@ -452,16 +466,22 @@ def try_apply_single_patch(file_lines, patch, reverse=False): @staticmethod def try_apply_patch( - module: str, repo_path: Union[str, Path], patch_path: Union[str, Path], module_dir: Path, reverse: bool = False + component_type: str, + component: str, + repo_path: Union[str, Path], + patch_path: Union[str, Path], + component_dir: Path, + reverse: bool = False, ) -> Dict[str, List[str]]: """ - Try applying a full patch file to a module + Try applying a full patch file to a module or subworkflow Args: - module (str): Name of the module - repo_path (str): Name of the repository where the module resides + component_type (str): The type of component (modules or subworkflows) + component (str): Name of the module or subworkflow + repo_path (str): Name of the repository where the component resides patch_path (str): The absolute path to the patch file to be applied - module_dir (Path): The directory containing the module + component_dir (Path): The directory containing the component reverse (bool): Apply the patch in reverse Returns: @@ -471,19 +491,19 @@ def try_apply_patch( Raises: LookupError: If the patch application fails in a file """ - module_relpath = Path("modules", repo_path, module) - patches = ModulesDiffer.per_file_patch(patch_path) + component_relpath = Path(component_type, repo_path, component) + patches = ComponentsDiffer.per_file_patch(patch_path) new_files = {} for file, patch in patches.items(): log.debug(f"Applying patch to {file}") - fn = Path(file).relative_to(module_relpath) - file_path = module_dir / fn + fn = Path(file).relative_to(component_relpath) + file_path = component_dir / fn try: with open(file_path) as fh: file_lines = fh.readlines() except FileNotFoundError: # The file was added with the patch file_lines = [""] - patched_new_lines = ModulesDiffer.try_apply_single_patch(file_lines, patch, reverse=reverse) + patched_new_lines = ComponentsDiffer.try_apply_single_patch(file_lines, patch, reverse=reverse) new_files[str(fn)] = patched_new_lines return new_files diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 8a00e758cb..3acacb4fe4 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -4,6 +4,7 @@ from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union import questionary +import requests import rich.prompt import yaml @@ -199,3 +200,29 @@ def get_components_to_install( current_comp_dict[component_name].update(component_dict) return list(modules.values()), list(subworkflows.values()) + + +def get_biotools_id(tool_name) -> str: + """ + Try to find a bio.tools ID for 'tool' + """ + url = f"https://bio.tools/api/t/?q={tool_name}&format=json" + try: + # Send a GET request to the API + response = requests.get(url) + response.raise_for_status() # Raise an error for bad status codes + # Parse the JSON response + data = response.json() + + # Iterate through the tools in the response to find the tool name + for tool in data["list"]: + if tool["name"].lower() == tool_name: + return tool["biotoolsCURIE"] + + # If the tool name was not found in the response + log.warning(f"Could not find a bio.tools ID for '{tool_name}'") + return "" + + except requests.exceptions.RequestException as e: + log.warning(f"Could not find a bio.tools ID for '{tool_name}': {e}") + return "" diff --git a/nf_core/components/create.py b/nf_core/components/create.py index c71b128415..c781905618 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -21,6 +21,7 @@ import nf_core import nf_core.utils from nf_core.components.components_command import ComponentCommand +from nf_core.components.components_utils import get_biotools_id from nf_core.pipelines.lint_utils import run_prettier_on_file log = logging.getLogger(__name__) @@ -61,6 +62,7 @@ def __init__( self.file_paths: Dict[str, Path] = {} self.not_empty_template = not empty_template self.migrate_pytest = migrate_pytest + self.tool_identifier = "" def create(self) -> bool: """ @@ -149,6 +151,8 @@ def create(self) -> bool: if self.component_type == "modules": # Try to find a bioconda package for 'component' self._get_bioconda_tool() + # Try to find a biotools entry for 'component' + self.tool_identifier = get_biotools_id(self.component) # Prompt for GitHub username self._get_username() @@ -244,7 +248,7 @@ def _get_module_structure_components(self): if self.process_label is None: log.info( "Provide an appropriate resource label for the process, taken from the " - "[link=https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/conf/base.config#L29]nf-core pipeline template[/link].\n" + "[link=https://github.com/nf-core/tools/blob/main/nf_core/pipeline-template/conf/base.config#L29]nf-core pipeline template[/link].\n" "For example: {}".format(", ".join(process_label_defaults)) ) while self.process_label is None: diff --git a/nf_core/components/info.py b/nf_core/components/info.py index 98f8be5272..31769785a1 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -211,9 +211,9 @@ def get_local_yaml(self) -> Optional[Dict]: return yaml.safe_load(fh) log.debug(f"{self.component_type[:-1].title()} '{self.component}' meta.yml not found locally") - return None + return {} - def get_remote_yaml(self) -> Optional[dict]: + def get_remote_yaml(self) -> Optional[Dict]: """Attempt to get the meta.yml file from a remote repo. Returns: @@ -229,6 +229,25 @@ def get_remote_yaml(self) -> Optional[dict]: self.remote_location = self.modules_repo.remote_url return yaml.safe_load(file_contents) + def generate_params_table(self, type) -> Table: + "Generate a rich table for inputs and outputs" + table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) + table.add_column(f":inbox_tray: {type}") + table.add_column("Description") + if self.component_type == "modules": + table.add_column("Pattern", justify="right", style="green") + elif self.component_type == "subworkflows": + table.add_column("Structure", justify="right", style="green") + return table + + def get_channel_structure(self, structure: dict) -> str: + "Get the structure of a channel" + structure_str = "" + for key, info in structure.items(): + pattern = f" - {info['pattern']}" if info.get("pattern") else "" + structure_str += f"{key} ({info['type']}{pattern})" + return structure_str + def generate_component_info_help(self): """Take the parsed meta.yml and generate rich help. @@ -277,33 +296,48 @@ def generate_component_info_help(self): # Inputs if self.meta.get("input"): - inputs_table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) - inputs_table.add_column(":inbox_tray: Inputs") - inputs_table.add_column("Description") - inputs_table.add_column("Pattern", justify="right", style="green") - for input in self.meta["input"]: - for key, info in input.items(): - inputs_table.add_row( - f"[orange1 on black] {key} [/][dim i] ({info['type']})", - Markdown(info["description"] if info["description"] else ""), - info.get("pattern", ""), - ) + inputs_table = self.generate_params_table("Inputs") + for i, input in enumerate(self.meta["input"]): + inputs_table.add_row(f"[italic]input[{i}][/]", "", "") + if self.component_type == "modules": + for element in input: + for key, info in element.items(): + inputs_table.add_row( + f"[orange1 on black] {key} [/][dim i] ({info['type']})", + Markdown(info["description"] if info["description"] else ""), + info.get("pattern", ""), + ) + elif self.component_type == "subworkflows": + for key, info in input.items(): + inputs_table.add_row( + f"[orange1 on black] {key} [/][dim i]", + Markdown(info["description"] if info["description"] else ""), + self.get_channel_structure(info["structure"]) if info.get("structure") else "", + ) renderables.append(inputs_table) # Outputs if self.meta.get("output"): - outputs_table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) - outputs_table.add_column(":outbox_tray: Outputs") - outputs_table.add_column("Description") - outputs_table.add_column("Pattern", justify="right", style="green") + outputs_table = self.generate_params_table("Outputs") for output in self.meta["output"]: - for key, info in output.items(): - outputs_table.add_row( - f"[orange1 on black] {key} [/][dim i] ({info['type']})", - Markdown(info["description"] if info["description"] else ""), - info.get("pattern", ""), - ) + if self.component_type == "modules": + for ch_name, elements in output.items(): + outputs_table.add_row(f"{ch_name}", "", "") + for element in elements: + for key, info in element.items(): + outputs_table.add_row( + f"[orange1 on black] {key} [/][dim i] ({info['type']})", + Markdown(info["description"] if info["description"] else ""), + info.get("pattern", ""), + ) + elif self.component_type == "subworkflows": + for key, info in output.items(): + outputs_table.add_row( + f"[orange1 on black] {key} [/][dim i]", + Markdown(info["description"] if info["description"] else ""), + self.get_channel_structure(info["structure"]) if info.get("structure") else "", + ) renderables.append(outputs_table) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index c1b1f24cb7..69740135a8 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -22,7 +22,7 @@ from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_json import ModulesJson from nf_core.pipelines.lint_utils import console -from nf_core.utils import LintConfigType +from nf_core.utils import NFCoreYamlLintConfig from nf_core.utils import plural_s as _s log = logging.getLogger(__name__) @@ -57,6 +57,7 @@ def __init__( component_type: str, directory: Union[str, Path], fail_warned: bool = False, + fix: bool = False, remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -73,12 +74,13 @@ def __init__( ) self.fail_warned = fail_warned + self.fix = fix self.passed: List[LintResult] = [] self.warned: List[LintResult] = [] self.failed: List[LintResult] = [] self.all_local_components: List[NFCoreComponent] = [] - self.lint_config: Optional[LintConfigType] = None + self.lint_config: Optional[NFCoreYamlLintConfig] = None self.modules_json: Optional[ModulesJson] = None if self.component_type == "modules": diff --git a/nf_core/components/list.py b/nf_core/components/list.py index 05a8f71120..4c20e60864 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -25,7 +25,7 @@ def __init__( self.remote = remote super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) - def _configure_repo_and_paths(self, nf_dir_req=True) -> None: + def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ Override the default with nf_dir_req set to False to allow info to be run from anywhere and still return remote info diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 0f3cdcdfbd..37e43a536e 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -5,7 +5,7 @@ import logging import re from pathlib import Path -from typing import List, Optional, Tuple, Union +from typing import Any, Dict, List, Optional, Tuple, Union log = logging.getLogger(__name__) @@ -41,6 +41,7 @@ def __init__( remote_component (bool): Whether the module is to be treated as a nf-core or local component """ + self.component_type = component_type self.component_name = component_name self.repo_url = repo_url self.component_dir = component_dir @@ -49,7 +50,7 @@ def __init__( self.passed: List[Tuple[str, str, Path]] = [] self.warned: List[Tuple[str, str, Path]] = [] self.failed: List[Tuple[str, str, Path]] = [] - self.inputs: List[str] = [] + self.inputs: List[List[Dict[str, Dict[str, str]]]] = [] self.outputs: List[str] = [] self.has_meta: bool = False self.git_sha: Optional[str] = None @@ -170,45 +171,95 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st def get_inputs_from_main_nf(self) -> None: """Collect all inputs from the main.nf file.""" - inputs: List[str] = [] + inputs: Any = [] # Can be 'list[list[dict[str, dict[str, str]]]]' or 'list[str]' with open(self.main_nf) as f: data = f.read() - # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo - # regex matches: - # val(foo) - # path(bar) - # val foo - # val bar - # path bar - # path foo - # don't match anything inside comments or after "output:" - if "input:" not in data: - log.debug(f"Could not find any inputs in {self.main_nf}") - input_data = data.split("input:")[1].split("output:")[0] - regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" - matches = re.finditer(regex, input_data, re.MULTILINE) - for _, match in enumerate(matches, start=1): - if match.group(3): - input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases - inputs.append(input_val) - elif match.group(4): - input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases - inputs.append(input_val) - log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") - self.inputs = inputs + if self.component_type == "modules": + # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo + # regex matches: + # val(foo) + # path(bar) + # val foo + # val bar + # path bar + # path foo + # don't match anything inside comments or after "output:" + if "input:" not in data: + log.debug(f"Could not find any inputs in {self.main_nf}") + return + input_data = data.split("input:")[1].split("output:")[0] + for line in input_data.split("\n"): + channel_elements: Any = [] + regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + matches = re.finditer(regex, line) + for _, match in enumerate(matches, start=1): + input_val = None + if match.group(3): + input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + elif match.group(4): + input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + if input_val: + channel_elements.append({input_val: {}}) + if len(channel_elements) > 0: + inputs.append(channel_elements) + log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") + self.inputs = inputs + elif self.component_type == "subworkflows": + # get input values from main.nf after "take:" + if "take:" not in data: + log.debug(f"Could not find any inputs in {self.main_nf}") + return + # get all lines between "take" and "main" or "emit" + input_data = data.split("take:")[1].split("main:")[0].split("emit:")[0] + for line in input_data.split("\n"): + try: + inputs.append(line.split()[0]) + except IndexError: + pass # Empty lines + log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") + self.inputs = inputs def get_outputs_from_main_nf(self): outputs = [] with open(self.main_nf) as f: data = f.read() - # get output values from main.nf after "output:". the names are always after "emit:" - if "output:" not in data: - log.debug(f"Could not find any outputs in {self.main_nf}") - return outputs - output_data = data.split("output:")[1].split("when:")[0] - regex = r"emit:\s*([^)\s,]+)" - matches = re.finditer(regex, output_data, re.MULTILINE) - for _, match in enumerate(matches, start=1): - outputs.append(match.group(1)) - log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") - self.outputs = outputs + if self.component_type == "modules": + # get output values from main.nf after "output:". the names are always after "emit:" + if "output:" not in data: + log.debug(f"Could not find any outputs in {self.main_nf}") + return outputs + output_data = data.split("output:")[1].split("when:")[0] + regex_emit = r"emit:\s*([^)\s,]+)" + regex_elements = r"(val|path|env|stdout)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + for line in output_data.split("\n"): + match_emit = re.search(regex_emit, line) + matches_elements = re.finditer(regex_elements, line) + if not match_emit: + continue + output_channel = {match_emit.group(1): []} + for _, match_element in enumerate(matches_elements, start=1): + output_val = None + if match_element.group(3): + output_val = match_element.group(3) + elif match_element.group(4): + output_val = match_element.group(4) + if output_val: + output_val = output_val.strip("'").strip('"') # remove quotes + output_channel[match_emit.group(1)].append({output_val: {}}) + outputs.append(output_channel) + log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") + self.outputs = outputs + elif self.component_type == "subworkflows": + # get output values from main.nf after "emit:". Can be named outputs or not. + if "emit:" not in data: + log.debug(f"Could not find any outputs in {self.main_nf}") + return outputs + output_data = data.split("emit:")[1].split("}")[0] + for line in output_data.split("\n"): + try: + outputs.append(line.split("=")[0].split()[0]) + except IndexError: + # Empty lines + pass + log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") + self.outputs = outputs diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py index 41fccd8be2..59ec7a381b 100644 --- a/nf_core/components/patch.py +++ b/nf_core/components/patch.py @@ -8,7 +8,7 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand -from nf_core.modules.modules_differ import ModulesDiffer +from nf_core.components.components_differ import ComponentsDiffer from nf_core.modules.modules_json import ModulesJson log = logging.getLogger(__name__) @@ -65,7 +65,9 @@ def patch(self, component=None): component_fullname = str(Path(self.component_type, self.modules_repo.repo_path, component)) # Verify that the component has an entry in the modules.json file - if not self.modules_json.module_present(component, self.modules_repo.remote_url, component_dir): + if not self.modules_json.component_present( + component, self.modules_repo.remote_url, component_dir, self.component_type + ): raise UserWarning( f"The '{component_fullname}' {self.component_type[:-1]} does not have an entry in the 'modules.json' file. Cannot compute patch" ) @@ -112,7 +114,7 @@ def patch(self, component=None): # Write the patch to a temporary location (otherwise it is printed to the screen later) patch_temp_path = tempfile.mktemp() try: - ModulesDiffer.write_diff_file( + ComponentsDiffer.write_diff_file( patch_temp_path, component, self.modules_repo.repo_path, @@ -127,11 +129,13 @@ def patch(self, component=None): raise UserWarning(f"{self.component_type[:-1]} '{component_fullname}' is unchanged. No patch to compute") # Write changes to modules.json - self.modules_json.add_patch_entry(component, self.modules_repo.remote_url, component_dir, patch_relpath) + self.modules_json.add_patch_entry( + self.component_type, component, self.modules_repo.remote_url, component_dir, patch_relpath + ) log.debug(f"Wrote patch path for {self.component_type[:-1]} {component} to modules.json") # Show the changes made to the module - ModulesDiffer.print_diff( + ComponentsDiffer.print_diff( component, self.modules_repo.repo_path, component_install_dir, @@ -166,7 +170,9 @@ def remove(self, component): component_fullname = str(Path(self.component_type, component_dir, component)) # Verify that the component has an entry in the modules.json file - if not self.modules_json.module_present(component, self.modules_repo.remote_url, component_dir): + if not self.modules_json.component_present( + component, self.modules_repo.remote_url, component_dir, self.component_type + ): raise UserWarning( f"The '{component_fullname}' {self.component_type[:-1]} does not have an entry in the 'modules.json' file. Cannot compute patch" ) @@ -202,7 +208,7 @@ def remove(self, component): # Try to apply the patch in reverse and move resulting files to module dir temp_component_dir = self.modules_json.try_apply_patch_reverse( - component, self.modules_repo.repo_path, patch_relpath, component_path + self.component_type, component, self.modules_repo.repo_path, patch_relpath, component_path ) try: for file in Path(temp_component_dir).glob("*"): diff --git a/nf_core/components/remove.py b/nf_core/components/remove.py index c2c5843918..37208629c0 100644 --- a/nf_core/components/remove.py +++ b/nf_core/components/remove.py @@ -68,7 +68,7 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals if not component_dir.exists(): log.error(f"Installation directory '{component_dir}' does not exist.") - if modules_json.module_present(component, self.modules_repo.remote_url, repo_path): + if modules_json.component_present(component, self.modules_repo.remote_url, repo_path, self.component_type): log.error(f"Found entry for '{component}' in 'modules.json'. Removing...") modules_json.remove_entry(self.component_type, component, self.modules_repo.remote_url, repo_path) return False diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 97aba9aa20..1e80b05e3a 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -10,13 +10,13 @@ import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.components_command import ComponentCommand +from nf_core.components.components_differ import ComponentsDiffer from nf_core.components.components_utils import ( get_components_to_install, prompt_component_version_sha, ) from nf_core.components.install import ComponentInstall from nf_core.components.remove import ComponentRemove -from nf_core.modules.modules_differ import ModulesDiffer from nf_core.modules.modules_json import ModulesJson from nf_core.modules.modules_repo import ModulesRepo from nf_core.utils import plural_es, plural_s, plural_y @@ -58,7 +58,7 @@ def __init__( self.branch = branch def _parameter_checks(self): - """Checks the compatibilty of the supplied parameters. + """Checks the compatibility of the supplied parameters. Raises: UserWarning: if any checks fail. @@ -233,7 +233,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr f"Writing diff file for {self.component_type[:-1]} '{component_fullname}' to '{self.save_diff_fn}'" ) try: - ModulesDiffer.write_diff_file( + ComponentsDiffer.write_diff_file( self.save_diff_fn, component, modules_repo.repo_path, @@ -275,7 +275,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr self.manage_changes_in_linked_components(component, modules_to_update, subworkflows_to_update) elif self.show_diff: - ModulesDiffer.print_diff( + ComponentsDiffer.print_diff( component, modules_repo.repo_path, component_dir, @@ -323,7 +323,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr if self.save_diff_fn: # Write the modules.json diff to the file - ModulesDiffer.append_modules_json_diff( + ComponentsDiffer.append_modules_json_diff( self.save_diff_fn, old_modules_json, self.modules_json.get_modules_json(), @@ -459,7 +459,9 @@ def get_single_component_info(self, component): self.modules_repo.setup_branch(current_branch) # If there is a patch file, get its filename - patch_fn = self.modules_json.get_patch_fn(component, self.modules_repo.remote_url, install_dir) + patch_fn = self.modules_json.get_patch_fn( + self.component_type, component, self.modules_repo.remote_url, install_dir + ) return (self.modules_repo, component, sha, patch_fn) @@ -705,7 +707,12 @@ def get_all_components_info(self, branch=None): # Add patch filenames to the components that have them components_info = [ - (repo, comp, sha, self.modules_json.get_patch_fn(comp, repo.remote_url, repo.repo_path)) + ( + repo, + comp, + sha, + self.modules_json.get_patch_fn(self.component_type, comp, repo.remote_url, repo.repo_path), + ) for repo, comp, sha in components_info ] @@ -820,7 +827,9 @@ def try_apply_patch( shutil.copytree(component_install_dir, temp_component_dir) try: - new_files = ModulesDiffer.try_apply_patch(component, repo_path, patch_path, temp_component_dir) + new_files = ComponentsDiffer.try_apply_patch( + self.component_type, component, repo_path, patch_path, temp_component_dir + ) except LookupError: # Patch failed. Save the patch file by moving to the install dir shutil.move(patch_path, Path(component_install_dir, patch_path.relative_to(component_dir))) @@ -838,7 +847,7 @@ def try_apply_patch( # Create the new patch file log.debug("Regenerating patch file") - ModulesDiffer.write_diff_file( + ComponentsDiffer.write_diff_file( Path(temp_component_dir, patch_path.relative_to(component_dir)), component, repo_path, @@ -858,7 +867,12 @@ def try_apply_patch( # Add the patch file to the modules.json file self.modules_json.add_patch_entry( - component, self.modules_repo.remote_url, repo_path, patch_relpath, write_file=write_file + self.component_type, + component, + self.modules_repo.remote_url, + repo_path, + patch_relpath, + write_file=write_file, ) return True diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index c462c6a47e..a0002ed424 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -2,7 +2,7 @@ # docker build -t gitpod:test -f nf_core/gitpod/gitpod.Dockerfile . # See https://docs.renovatebot.com/docker/#digest-pinning for why a digest is used. -FROM gitpod/workspace-base@sha256:f189a4195c3861365356f9c1b438ab26fd88e1ff46ce2843afc62861fc982e0c +FROM gitpod/workspace-base@sha256:12853f7c901eb2b677a549cb112c85f9679d18feb30093bcc63aa252540ecad9 USER root @@ -23,9 +23,9 @@ RUN apt-get update --quiet && \ add-apt-repository -y ppa:apptainer/ppa && \ apt-get update --quiet && \ apt-get install --quiet --yes apptainer && \ - wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh && \ - bash Miniconda3-latest-Linux-x86_64.sh -b -p /opt/conda && \ - rm Miniconda3-latest-Linux-x86_64.sh && \ + wget --quiet https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-x86_64.sh && \ + bash Miniforge3-Linux-x86_64.sh -b -p /opt/conda && \ + rm Miniforge3-Linux-x86_64.sh && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* diff --git a/nf_core/module-template/meta.yml b/nf_core/module-template/meta.yml index 9d3f3c1c12..d9d1cc8ae8 100644 --- a/nf_core/module-template/meta.yml +++ b/nf_core/module-template/meta.yml @@ -20,48 +20,67 @@ tools: tool_dev_url: "{{ tool_dev_url }}" doi: "" licence: {{ tool_licence }} + identifier: {{ tool_identifier }} {% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as input {% endif -%} input: #{% if has_meta %} Only when we have meta - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. `[ id:'sample1', single_end:false ]` + - - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'sample1', single_end:false ]` {% endif %} {% if not_empty_template -%} ## TODO nf-core: Delete / customise this example input {%- endif %} - - {{ 'bam:' if not_empty_template else "input:" }} - type: file - description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} - pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + - {{ 'bam:' if not_empty_template else "input:" }} + type: file + description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} + pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + ontologies: + {% if not_empty_template -%} + - edam: "http://edamontology.org/format_25722" + - edam: "http://edamontology.org/format_2573" + - edam: "http://edamontology.org/format_3462" + {% else %} + - edam: "" + {%- endif %} {% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as output {% endif -%} output: + - {{ 'bam:' if not_empty_template else "output:" }} #{% if has_meta -%} Only when we have meta - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. `[ id:'sample1', single_end:false ]` - {% endif %} - - versions: - type: file - description: File containing software versions - pattern: "versions.yml" + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'sample1', single_end:false ]` + {%- endif %} {% if not_empty_template -%} - ## TODO nf-core: Delete / customise this example output + ## TODO nf-core: Delete / customise this example output {%- endif %} - - {{ 'bam:' if not_empty_template else "output:" }} - type: file - description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} - pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + - {{ '"*.bam":' if not_empty_template else '"*":' }} + type: file + description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} + pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + ontologies: + {% if not_empty_template -%} + - edam: "http://edamontology.org/format_25722" + - edam: "http://edamontology.org/format_2573" + - edam: "http://edamontology.org/format_3462" + {% else -%} + - edam: "" + {%- endif %} + - versions: + - "versions.yml": + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "{{ author }}" diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 017b3965b4..49012cff40 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -14,20 +14,22 @@ import questionary import rich import rich.progress +import ruamel.yaml import nf_core.components import nf_core.components.nfcore_component import nf_core.modules.modules_utils import nf_core.utils +from nf_core.components.components_utils import get_biotools_id from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.pipelines.lint_utils import console +from nf_core.pipelines.lint_utils import console, run_prettier_on_file log = logging.getLogger(__name__) from .environment_yml import environment_yml from .main_nf import main_nf -from .meta_yml import meta_yml +from .meta_yml import meta_yml, obtain_correct_and_specified_inputs, obtain_correct_and_specified_outputs, read_meta_yml from .module_changes import module_changes from .module_deprecations import module_deprecations from .module_patch import module_patch @@ -46,6 +48,9 @@ class ModuleLint(ComponentLint): environment_yml = environment_yml main_nf = main_nf meta_yml = meta_yml + obtain_correct_and_specified_inputs = obtain_correct_and_specified_inputs + obtain_correct_and_specified_outputs = obtain_correct_and_specified_outputs + read_meta_yml = read_meta_yml module_changes = module_changes module_deprecations = module_deprecations module_patch = module_patch @@ -57,6 +62,7 @@ def __init__( self, directory: Union[str, Path], fail_warned: bool = False, + fix: bool = False, remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -67,6 +73,7 @@ def __init__( component_type="modules", directory=directory, fail_warned=fail_warned, + fix=fix, remote_url=remote_url, branch=branch, no_pull=no_pull, @@ -237,6 +244,12 @@ def lint_module( # Otherwise run all the lint tests else: + mod.get_inputs_from_main_nf() + mod.get_outputs_from_main_nf() + # Update meta.yml file if requested + if self.fix: + self.update_meta_yml_file(mod) + if self.repo_type == "pipeline" and self.modules_json and mod.repo_url: # Set correct sha version = self.modules_json.get_module_version(mod.component_name, mod.repo_url, mod.org) @@ -256,3 +269,104 @@ def lint_module( self.failed += warned self.failed += [LintResult(mod, *m) for m in mod.failed] + + def update_meta_yml_file(self, mod): + """ + Update the meta.yml file with the correct inputs and outputs + """ + meta_yml = self.read_meta_yml(mod) + corrected_meta_yml = meta_yml.copy() + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.indent(mapping=2, sequence=2, offset=0) + + # Obtain inputs and outputs from main.nf and meta.yml + # Used to compare only the structure of channels and elements + # Do not compare features to allow for custom features in meta.yml (i.e. pattern) + if "input" in meta_yml: + correct_inputs, meta_inputs = self.obtain_correct_and_specified_inputs(mod, meta_yml) + if "output" in meta_yml: + correct_outputs, meta_outputs = self.obtain_correct_and_specified_outputs(mod, meta_yml) + + if "input" in meta_yml and correct_inputs != meta_inputs: + log.debug( + f"Correct inputs: '{correct_inputs}' differ from current inputs: '{meta_inputs}' in '{mod.meta_yml}'" + ) + corrected_meta_yml["input"] = mod.inputs.copy() # list of lists (channels) of dicts (elements) + for i, channel in enumerate(corrected_meta_yml["input"]): + for j, element in enumerate(channel): + element_name = list(element.keys())[0] + for k, meta_element in enumerate(meta_yml["input"]): + try: + # Handle old format of meta.yml: list of dicts (channels) + if element_name in meta_element.keys(): + # Copy current features of that input element form meta.yml + for feature in meta_element[element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["input"][i][j][element_name][feature] = meta_element[ + element_name + ][feature] + break + except AttributeError: + # Handle new format of meta.yml: list of lists (channels) of elements (dicts) + for x, meta_ch_element in enumerate(meta_element): + if element_name in meta_ch_element.keys(): + # Copy current features of that input element form meta.yml + for feature in meta_element[x][element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["input"][i][j][element_name][feature] = meta_element[x][ + element_name + ][feature] + break + + if "output" in meta_yml and correct_outputs != meta_outputs: + log.debug( + f"Correct outputs: '{correct_outputs}' differ from current outputs: '{meta_outputs}' in '{mod.meta_yml}'" + ) + corrected_meta_yml["output"] = mod.outputs.copy() # list of dicts (channels) with list of dicts (elements) + for i, channel in enumerate(corrected_meta_yml["output"]): + ch_name = list(channel.keys())[0] + for j, element in enumerate(channel[ch_name]): + element_name = list(element.keys())[0] + for k, meta_element in enumerate(meta_yml["output"]): + if element_name in meta_element.keys(): + # Copy current features of that output element form meta.yml + for feature in meta_element[element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["output"][i][ch_name][j][element_name][feature] = meta_element[ + element_name + ][feature] + break + elif ch_name in meta_element.keys(): + # When the previous output element was using the name of the channel + # Copy current features of that output element form meta.yml + try: + # Handle old format of meta.yml + for feature in meta_element[ch_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["output"][i][ch_name][j][element_name][feature] = ( + meta_element[ch_name][feature] + ) + except AttributeError: + # Handle new format of meta.yml + for x, meta_ch_element in enumerate(meta_element[ch_name]): + for meta_ch_element_name in meta_ch_element.keys(): + for feature in meta_ch_element[meta_ch_element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["output"][i][ch_name][j][element_name][feature] = ( + meta_ch_element[meta_ch_element_name][feature] + ) + break + + # Add bio.tools identifier + for i, tool in enumerate(corrected_meta_yml["tools"]): + tool_name = list(tool.keys())[0] + if "identifier" not in tool[tool_name]: + corrected_meta_yml["tools"][i][tool_name]["identifier"] = get_biotools_id( + mod.component_name if "/" not in mod.component_name else mod.component_name.split("/")[0] + ) + + with open(mod.meta_yml, "w") as fh: + log.info(f"Updating {mod.meta_yml}") + yaml.dump(corrected_meta_yml, fh) + run_prettier_on_file(fh.name) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index dbc1bed737..848e17130e 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -15,8 +15,8 @@ import nf_core import nf_core.modules.modules_utils +from nf_core.components.components_differ import ComponentsDiffer from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.modules.modules_differ import ModulesDiffer log = logging.getLogger(__name__) @@ -50,7 +50,8 @@ def main_nf( # otherwise read the lines directly from the module lines: List[str] = [] if module.is_patched: - lines = ModulesDiffer.try_apply_patch( + lines = ComponentsDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, module.patch_path, @@ -269,7 +270,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): url = None line = raw_line.strip(" \n'\"}:") - # Catch preceeding "container " + # Catch preceding "container " if line.startswith("container"): line = line.replace("container", "").strip(" \n'\"}:") @@ -342,6 +343,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): continue try: container_url = "https://" + urlunparse(url) if not url.scheme == "https" else urlunparse(url) + log.debug(f"Trying to connect to URL: {container_url}") response = requests.head( container_url, stream=True, diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 4a0ef6e01e..d0268a40cc 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -1,12 +1,16 @@ import json +import logging from pathlib import Path +from typing import Union -import yaml +import ruamel.yaml from jsonschema import exceptions, validators +from nf_core.components.components_differ import ComponentsDiffer from nf_core.components.lint import ComponentLint, LintExceptionError from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.modules.modules_differ import ModulesDiffer + +log = logging.getLogger(__name__) def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None: @@ -39,12 +43,11 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None """ - module.get_inputs_from_main_nf() - module.get_outputs_from_main_nf() # Check if we have a patch file, get original file in that case - meta_yaml = None + meta_yaml = read_meta_yml(module_lint_object, module) if module.is_patched and module_lint_object.modules_repo.repo_path is not None: - lines = ModulesDiffer.try_apply_patch( + lines = ComponentsDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, module.patch_path, @@ -52,17 +55,15 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None reverse=True, ).get("meta.yml") if lines is not None: + yaml = ruamel.yaml.YAML() meta_yaml = yaml.safe_load("".join(lines)) if module.meta_yml is None: raise LintExceptionError("Module does not have a `meta.yml` file") if meta_yaml is None: - try: - with open(module.meta_yml) as fh: - meta_yaml = yaml.safe_load(fh) - module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) - except FileNotFoundError: - module.failed.append(("meta_yml_exists", "Module `meta.yml` does not exist", module.meta_yml)) - return + module.failed.append(("meta_yml_exists", "Module `meta.yml` does not exist", module.meta_yml)) + return + else: + module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) # Confirm that the meta.yml file is valid according to the JSON schema valid_meta_yml = False @@ -93,79 +94,8 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None ) ) - # Confirm that all input and output channels are specified + # Confirm that all input and output channels are correctly specified if valid_meta_yml: - if "input" in meta_yaml: - meta_input = [list(x.keys())[0] for x in meta_yaml["input"]] - for input in module.inputs: - if input in meta_input: - module.passed.append(("meta_input_main_only", f"`{input}` specified", module.meta_yml)) - else: - module.warned.append( - ( - "meta_input_main_only", - f"`{input}` is present as an input in the `main.nf`, but missing in `meta.yml`", - module.meta_yml, - ) - ) - # check if there are any inputs in meta.yml that are not in main.nf - for input in meta_input: - if input in module.inputs: - module.passed.append( - ( - "meta_input_meta_only", - f"`{input}` is present as an input in `meta.yml` and `main.nf`", - module.meta_yml, - ) - ) - else: - module.warned.append( - ( - "meta_input_meta_only", - f"`{input}` is present as an input in `meta.yml` but not in `main.nf`", - module.meta_yml, - ) - ) - - if "output" in meta_yaml and meta_yaml["output"] is not None: - meta_output = [list(x.keys())[0] for x in meta_yaml["output"]] - for output in module.outputs: - if output in meta_output: - module.passed.append(("meta_output_main_only", f"`{output}` specified", module.meta_yml)) - else: - module.warned.append( - ( - "meta_output_main_only", - f"`{output}` is present as an output in the `main.nf`, but missing in `meta.yml`", - module.meta_yml, - ) - ) - # check if there are any outputs in meta.yml that are not in main.nf - for output in meta_output: - if output in module.outputs: - module.passed.append( - ( - "meta_output_meta_only", - f"`{output}` is present as an output in `meta.yml` and `main.nf`", - module.meta_yml, - ) - ) - elif output == "meta": - module.passed.append( - ( - "meta_output_meta_only", - f"`{output}` is skipped for `meta.yml` outputs", - module.meta_yml, - ) - ) - else: - module.warned.append( - ( - "meta_output_meta_only", - f"`{output}` is present as an output in `meta.yml` but not in `main.nf`", - module.meta_yml, - ) - ) # confirm that the name matches the process name in main.nf if meta_yaml["name"].upper() == module.process_name: module.passed.append( @@ -183,3 +113,180 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None module.meta_yml, ) ) + # Check that inputs are specified in meta.yml + if len(module.inputs) > 0 and "input" not in meta_yaml: + module.failed.append( + ( + "meta_input", + "Inputs not specified in module `meta.yml`", + module.meta_yml, + ) + ) + elif len(module.inputs) > 0: + module.passed.append( + ( + "meta_input", + "Inputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + else: + log.debug(f"No inputs specified in module `main.nf`: {module.component_name}") + # Check that all inputs are correctly specified + if "input" in meta_yaml: + correct_inputs, meta_inputs = obtain_correct_and_specified_inputs(module_lint_object, module, meta_yaml) + + if correct_inputs == meta_inputs: + module.passed.append( + ( + "correct_meta_inputs", + "Correct inputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + else: + module.failed.append( + ( + "correct_meta_inputs", + f"Module `meta.yml` does not match `main.nf`. Inputs should contain: {correct_inputs}\nRun `nf-core modules lint --fix` to update the `meta.yml` file.", + module.meta_yml, + ) + ) + + # Check that outputs are specified in meta.yml + if len(module.outputs) > 0 and "output" not in meta_yaml: + module.failed.append( + ( + "meta_output", + "Outputs not specified in module `meta.yml`", + module.meta_yml, + ) + ) + elif len(module.outputs) > 0: + module.passed.append( + ( + "meta_output", + "Outputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + # Check that all outputs are correctly specified + if "output" in meta_yaml: + correct_outputs, meta_outputs = obtain_correct_and_specified_outputs(module_lint_object, module, meta_yaml) + + if correct_outputs == meta_outputs: + module.passed.append( + ( + "correct_meta_outputs", + "Correct outputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + else: + module.failed.append( + ( + "correct_meta_outputs", + f"Module `meta.yml` does not match `main.nf`. Outputs should contain: {correct_outputs}\nRun `nf-core modules lint --fix` to update the `meta.yml` file.", + module.meta_yml, + ) + ) + + +def read_meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> Union[dict, None]: + """ + Read a `meta.yml` file and return it as a dictionary + + Args: + module_lint_object (ComponentLint): The lint object for the module + module (NFCoreComponent): The module to read + + Returns: + dict: The `meta.yml` file as a dictionary + """ + meta_yaml = None + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + # Check if we have a patch file, get original file in that case + if module.is_patched: + lines = ComponentsDiffer.try_apply_patch( + module.component_type, + module.component_name, + module_lint_object.modules_repo.repo_path, + module.patch_path, + Path(module.component_dir).relative_to(module.base_dir), + reverse=True, + ).get("meta.yml") + if lines is not None: + meta_yaml = yaml.load("".join(lines)) + if meta_yaml is None: + if module.meta_yml is None: + return None + with open(module.meta_yml) as fh: + meta_yaml = yaml.load(fh) + return meta_yaml + + +def obtain_correct_and_specified_inputs(_, module, meta_yaml): + """ + Obtain the list of correct inputs and the elements of each input channel. + + Args: + module (object): The module object. + meta_yaml (dict): The meta.yml dictionary. + + Returns: + tuple: A tuple containing two lists. The first list contains the correct inputs, + and the second list contains the inputs specified in meta.yml. + """ + correct_inputs = [] + for input_channel in module.inputs: + channel_elements = [] + for element in input_channel: + channel_elements.append(list(element.keys())[0]) + correct_inputs.append(channel_elements) + + meta_inputs = [] + for input_channel in meta_yaml["input"]: + if isinstance(input_channel, list): # Correct format + channel_elements = [] + for element in input_channel: + channel_elements.append(list(element.keys())[0]) + meta_inputs.append(channel_elements) + elif isinstance(input_channel, dict): # Old format + meta_inputs.append(list(input_channel.keys())[0]) + + return correct_inputs, meta_inputs + + +def obtain_correct_and_specified_outputs(_, module, meta_yaml): + """ + Obtain the dictionary of correct outputs and elements of each output channel. + + Args: + module (object): The module object. + meta_yaml (dict): The meta.yml dictionary. + + Returns: + correct_outputs (dict): A dictionary containing the correct outputs and their elements. + meta_outputs (dict): A dictionary containing the outputs specified in meta.yml. + """ + correct_outputs = {} + for output_channel in module.outputs: + channel_name = list(output_channel.keys())[0] + channel_elements = [] + for element in output_channel[channel_name]: + channel_elements.append(list(element.keys())[0]) + correct_outputs[channel_name] = channel_elements + + meta_outputs = {} + for output_channel in meta_yaml["output"]: + channel_name = list(output_channel.keys())[0] + if isinstance(output_channel[channel_name], list): # Correct format + channel_elements = [] + for element in output_channel[channel_name]: + channel_elements.append(list(element.keys())[0]) + meta_outputs[channel_name] = channel_elements + elif isinstance(output_channel[channel_name], dict): # Old format + meta_outputs[channel_name] = [] + + return correct_outputs, meta_outputs diff --git a/nf_core/modules/lint/module_changes.py b/nf_core/modules/lint/module_changes.py index eb76f4b88b..121de00c0a 100644 --- a/nf_core/modules/lint/module_changes.py +++ b/nf_core/modules/lint/module_changes.py @@ -7,7 +7,7 @@ from pathlib import Path import nf_core.modules.modules_repo -from nf_core.modules.modules_differ import ModulesDiffer +from nf_core.components.components_differ import ComponentsDiffer def module_changes(module_lint_object, module): @@ -30,7 +30,8 @@ def module_changes(module_lint_object, module): tempdir = tempdir_parent / "tmp_module_dir" shutil.copytree(module.component_dir, tempdir) try: - new_lines = ModulesDiffer.try_apply_patch( + new_lines = ComponentsDiffer.try_apply_patch( + module.component_type, module.component_name, module.org, module.patch_path, diff --git a/nf_core/modules/lint/module_patch.py b/nf_core/modules/lint/module_patch.py index 29bf78a66b..6347c5c553 100644 --- a/nf_core/modules/lint/module_patch.py +++ b/nf_core/modules/lint/module_patch.py @@ -1,7 +1,7 @@ from pathlib import Path +from ...components.components_differ import ComponentsDiffer from ...components.nfcore_component import NFCoreComponent -from ..modules_differ import ModulesDiffer def module_patch(module_lint_obj, module: NFCoreComponent): @@ -66,11 +66,11 @@ def check_patch_valid(module, patch_path): continue topath = Path(line.split(" ")[1].strip("\n")) if frompath == Path("/dev/null"): - paths_in_patch.append((frompath, ModulesDiffer.DiffEnum.CREATED)) + paths_in_patch.append((frompath, ComponentsDiffer.DiffEnum.CREATED)) elif topath == Path("/dev/null"): - paths_in_patch.append((frompath, ModulesDiffer.DiffEnum.REMOVED)) + paths_in_patch.append((frompath, ComponentsDiffer.DiffEnum.REMOVED)) elif frompath == topath: - paths_in_patch.append((frompath, ModulesDiffer.DiffEnum.CHANGED)) + paths_in_patch.append((frompath, ComponentsDiffer.DiffEnum.CHANGED)) else: module.failed.append( ( @@ -105,7 +105,7 @@ def check_patch_valid(module, patch_path): # Warn about any created or removed files passed = True for path, diff_status in paths_in_patch: - if diff_status == ModulesDiffer.DiffEnum.CHANGED: + if diff_status == ComponentsDiffer.DiffEnum.CHANGED: if not Path(module.base_dir, path).exists(): module.failed.append( ( @@ -116,7 +116,7 @@ def check_patch_valid(module, patch_path): ) passed = False continue - elif diff_status == ModulesDiffer.DiffEnum.CREATED: + elif diff_status == ComponentsDiffer.DiffEnum.CREATED: if not Path(module.base_dir, path).exists(): module.failed.append( ( @@ -130,7 +130,7 @@ def check_patch_valid(module, patch_path): module.warned.append( ("patch", f"Patch file performs file creation of {path}. This is discouraged."), patch_path ) - elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: if Path(module.base_dir, path).exists(): module.failed.append( ( @@ -161,7 +161,8 @@ def patch_reversible(module_lint_object, module, patch_path): (bool): False if any test failed, True otherwise """ try: - ModulesDiffer.try_apply_patch( + ComponentsDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, patch_path, diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 08e117b1ad..a9ba3b442c 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -19,7 +19,7 @@ from nf_core.modules.modules_repo import ModulesRepo from nf_core.pipelines.lint_utils import dump_json_with_prettier -from .modules_differ import ModulesDiffer +from ..components.components_differ import ComponentsDiffer log = logging.getLogger(__name__) @@ -308,7 +308,9 @@ def determine_branches_and_shas( # If the module/subworkflow is patched patch_file = component_path / f"{component}.diff" if patch_file.is_file(): - temp_module_dir = self.try_apply_patch_reverse(component, install_dir, patch_file, component_path) + temp_module_dir = self.try_apply_patch_reverse( + component_type, component, install_dir, patch_file, component_path + ) correct_commit_sha = self.find_correct_commit_sha( component_type, component, temp_module_dir, modules_repo ) @@ -432,7 +434,7 @@ def move_component_to_local(self, component_type: str, component: str, repo_name to_name += f"-{datetime.datetime.now().strftime('%y%m%d%H%M%S')}" shutil.move(str(current_path), local_dir / to_name) - def unsynced_components(self) -> Tuple[List[str], List[str], dict]: + def unsynced_components(self) -> Tuple[List[str], List[str], Dict]: """ Compute the difference between the modules/subworkflows in the directory and the modules/subworkflows in the 'modules.json' file. This is done by looking at all @@ -805,7 +807,7 @@ def remove_entry(self, component_type, name, repo_url, install_dir, removed_by=N return False - def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, write_file=True): + def add_patch_entry(self, component_type, component_name, repo_url, install_dir, patch_filename, write_file=True): """ Adds (or replaces) the patch entry for a module """ @@ -815,9 +817,11 @@ def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, wr if repo_url not in self.modules_json["repos"]: raise LookupError(f"Repo '{repo_url}' not present in 'modules.json'") - if module_name not in self.modules_json["repos"][repo_url]["modules"][install_dir]: - raise LookupError(f"Module '{install_dir}/{module_name}' not present in 'modules.json'") - self.modules_json["repos"][repo_url]["modules"][install_dir][module_name]["patch"] = str(patch_filename) + if component_name not in self.modules_json["repos"][repo_url][component_type][install_dir]: + raise LookupError( + f"{component_type[:-1].title()} '{install_dir}/{component_name}' not present in 'modules.json'" + ) + self.modules_json["repos"][repo_url][component_type][install_dir][component_name]["patch"] = str(patch_filename) if write_file: self.dump() @@ -833,17 +837,17 @@ def remove_patch_entry(self, module_name, repo_url, install_dir, write_file=True if write_file: self.dump() - def get_patch_fn(self, module_name, repo_url, install_dir): + def get_patch_fn(self, component_type, component_name, repo_url, install_dir): """ - Get the patch filename of a module + Get the patch filename of a component Args: - module_name (str): The name of the module - repo_url (str): The URL of the repository containing the module - install_dir (str): The name of the directory where modules are installed + component_name (str): The name of the component + repo_url (str): The URL of the repository containing the component + install_dir (str): The name of the directory where components are installed Returns: - (str): The patch filename for the module, None if not present + (str): The patch filename for the component, None if not present """ if self.modules_json is None: self.load() @@ -851,48 +855,53 @@ def get_patch_fn(self, module_name, repo_url, install_dir): path = ( self.modules_json["repos"] .get(repo_url, {}) - .get("modules") + .get(component_type) .get(install_dir) - .get(module_name, {}) + .get(component_name, {}) .get("patch") ) return Path(path) if path is not None else None - def try_apply_patch_reverse(self, module, repo_name, patch_relpath, module_dir): + def try_apply_patch_reverse(self, component_type, component, repo_name, patch_relpath, component_dir): """ - Try reverse applying a patch file to the modified module files + Try reverse applying a patch file to the modified module or subworkflow files Args: - module (str): The name of the module - repo_name (str): The name of the repository where the module resides + component_type (str): The type of component [modules, subworkflows] + component (str): The name of the module or subworkflow + repo_name (str): The name of the repository where the component resides patch_relpath (Path | str): The path to patch file in the pipeline - module_dir (Path | str): The module directory in the pipeline + component_dir (Path | str): The component directory in the pipeline Returns: - (Path | str): The path of the folder where the module patched files are + (Path | str): The path of the folder where the component patched files are Raises: LookupError: If patch was not applied """ - module_fullname = str(Path(repo_name, module)) + component_fullname = str(Path(repo_name, component)) patch_path = Path(self.directory / patch_relpath) try: - new_files = ModulesDiffer.try_apply_patch(module, repo_name, patch_path, module_dir, reverse=True) + new_files = ComponentsDiffer.try_apply_patch( + component_type, component, repo_name, patch_path, component_dir, reverse=True + ) except LookupError as e: - raise LookupError(f"Failed to apply patch in reverse for module '{module_fullname}' due to: {e}") + raise LookupError( + f"Failed to apply patch in reverse for {component_type[:-1]} '{component_fullname}' due to: {e}" + ) # Write the patched files to a temporary directory log.debug("Writing patched files to tmpdir") temp_dir = Path(tempfile.mkdtemp()) - temp_module_dir = temp_dir / module - temp_module_dir.mkdir(parents=True, exist_ok=True) + temp_component_dir = temp_dir / component + temp_component_dir.mkdir(parents=True, exist_ok=True) for file, new_content in new_files.items(): - fn = temp_module_dir / file + fn = temp_component_dir / file with open(fn, "w") as fh: fh.writelines(new_content) - return temp_module_dir + return temp_component_dir def repo_present(self, repo_name): """ @@ -908,20 +917,21 @@ def repo_present(self, repo_name): return repo_name in self.modules_json.get("repos", {}) - def module_present(self, module_name, repo_url, install_dir): + def component_present(self, module_name, repo_url, install_dir, component_type): """ Checks if a module is present in the modules.json file Args: module_name (str): Name of the module repo_url (str): URL of the repository install_dir (str): Name of the directory where modules are installed + component_type (str): Type of component [modules, subworkflows] Returns: (bool): Whether the module is present in the 'modules.json' file """ if self.modules_json is None: self.load() assert self.modules_json is not None # mypy - return module_name in self.modules_json.get("repos", {}).get(repo_url, {}).get("modules", {}).get( + return module_name in self.modules_json.get("repos", {}).get(repo_url, {}).get(component_type, {}).get( install_dir, {} ) @@ -1119,8 +1129,10 @@ def dump(self, run_prettier: bool = False) -> None: """ Sort the modules.json, and write it to file """ + # Sort the modules.json + if self.modules_json is None: + self.load() if self.modules_json is not None: - # Sort the modules.json self.modules_json["repos"] = nf_core.utils.sort_dictionary(self.modules_json["repos"]) if run_prettier: dump_json_with_prettier(self.modules_json_path, self.modules_json) diff --git a/nf_core/pipeline-template/.editorconfig b/nf_core/pipeline-template/.editorconfig index 5145366e50..c78ec8e960 100644 --- a/nf_core/pipeline-template/.editorconfig +++ b/nf_core/pipeline-template/.editorconfig @@ -11,7 +11,7 @@ indent_style = space [*.{md,yml,yaml,html,css,scss,js}] indent_size = 2 -{%- if modules %} +{% if modules -%} # These files are edited and tested upstream in nf-core/modules [/modules/nf-core/**] charset = unset @@ -27,7 +27,7 @@ trim_trailing_whitespace = unset indent_style = unset {%- endif %} -{%- if email %} +{% if email -%} [/assets/email*] indent_size = unset {%- endif %} diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index f331d38673..37970c09e8 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -1,4 +1,4 @@ -# {{ name }}: Contributing Guidelines +# `{{ name }}`: Contributing Guidelines Hi there! Many thanks for taking an interest in improving {{ name }}. @@ -30,14 +30,14 @@ If you're not used to this workflow with git, you can start with some [docs from ## Tests -{%- if test_config %} +{% if test_config -%} You have the option to test your changes locally by running the pipeline. For receiving warnings about process selectors and other `debug` information, it is recommended to use the debug profile. Execute all the tests with the following command: ```bash nf-test test --profile debug,test,docker --verbose ``` -{% endif %} +{% endif -%} When you create a pull request with changes, [GitHub Actions](https://github.com/features/actions) will run automatic tests. Typically, pull-requests are only fully reviewed when these tests are passing, though of course we can help out before then. @@ -66,7 +66,7 @@ These tests are run both with the latest available version of `Nextflow` and als - On your own fork, make a new branch `patch` based on `upstream/master`. - Fix the bug, and bump version (X.Y.Z+1). -- A PR should be made on `master` from patch to directly this particular bug. +- Open a pull-request from `patch` to `master` with the changes. {% if is_nfcore -%} @@ -78,13 +78,13 @@ For further information/help, please consult the [{{ name }} documentation](http ## Pipeline contribution conventions -To make the {{ name }} code and processing logic more understandable for new contributors and to ensure quality, we semi-standardise the way the code and other contributions are written. +To make the `{{ name }}` code and processing logic more understandable for new contributors and to ensure quality, we semi-standardise the way the code and other contributions are written. ### Adding a new step If you wish to contribute a new step, please use the following coding standards: -1. Define the corresponding input channel into your new process from the expected previous process channel +1. Define the corresponding input channel into your new process from the expected previous process channel. 2. Write the process block (see below). 3. Define the output channel if needed (see below). 4. Add any new parameters to `nextflow.config` with a default (see below). @@ -95,17 +95,17 @@ If you wish to contribute a new step, please use the following coding standards: {%- if multiqc %} 9. Update MultiQC config `assets/multiqc_config.yml` so relevant suffixes, file name clean up and module plots are in the appropriate order. If applicable, add a [MultiQC](https://https://multiqc.info/) module. 10. Add a description of the output files and if relevant any appropriate images from the MultiQC report to `docs/output.md`. - {% endif %} + {%- endif %} ### Default values -Parameters should be initialised / defined with default values in `nextflow.config` under the `params` scope. +Parameters should be initialised / defined with default values within the `params` scope in `nextflow.config`. Once there, use `nf-core pipelines schema build` to add to `nextflow_schema.json`. ### Default processes resource requirements -Sensible defaults for process resource requirements (CPUs / memory / time) for a process should be defined in `conf/base.config`. These should generally be specified generic with `withLabel:` selectors so they can be shared across multiple processes/steps of the pipeline. A nf-core standard set of labels that should be followed where possible can be seen in the [nf-core pipeline template](https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/conf/base.config), which has the default process as a single core-process, and then different levels of multi-core configurations for increasingly large memory requirements defined with standardised labels. +Sensible defaults for process resource requirements (CPUs / memory / time) for a process should be defined in `conf/base.config`. These should generally be specified generic with `withLabel:` selectors so they can be shared across multiple processes/steps of the pipeline. A nf-core standard set of labels that should be followed where possible can be seen in the [nf-core pipeline template](https://github.com/nf-core/tools/blob/main/nf_core/pipeline-template/conf/base.config), which has the default process as a single core-process, and then different levels of multi-core configurations for increasingly large memory requirements defined with standardised labels. The process resources can be passed on to the tool dynamically within the process with the `${task.cpus}` and `${task.memory}` variables in the `script:` block. @@ -139,4 +139,4 @@ To get started: Devcontainer specs: - [DevContainer config](.devcontainer/devcontainer.json) - {% endif %} + {%- endif %} diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index dc0450be43..1ca2ac2c74 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -14,16 +14,19 @@ on: jobs: run-platform: name: Run AWS full tests - if: github.repository == '{{ name }}' && github.event.review.state == 'approved' + # run only if the PR is approved by at least 2 reviewers and against the master branch or manually triggered + if: github.repository == '{{ name }}' && github.event.review.state == 'approved' && github.event.pull_request.base.ref == 'master' || github.event_name == 'workflow_dispatch' runs-on: ubuntu-latest steps: - uses: octokit/request-action@v2.x + if: github.event_name != 'workflow_dispatch' id: check_approvals with: - route: GET /repos/{%- raw -%}${{ github.repository }}/pulls/${{ github.event.review.number }}/reviews + route: GET /repos/{%- raw -%}${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/reviews?per_page=100 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - id: test_variables + if: github.event_name != 'workflow_dispatch' run: | JSON_RESPONSE='${{ steps.check_approvals.outputs.data }}'{% endraw %} CURRENT_APPROVALS_COUNT=$(echo $JSON_RESPONSE | jq -c '[.[] | select(.state | contains("APPROVED")) ] | length') diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 63fa99cec8..9db393d9f0 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -1,5 +1,5 @@ name: nf-core CI -# This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors +# {% raw %}This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors on: push: branches: @@ -7,40 +7,79 @@ on: pull_request: release: types: [published] + workflow_dispatch: env: NXF_ANSI_LOG: false + NXF_SINGULARITY_CACHEDIR: ${{ github.workspace }}/.singularity + NXF_SINGULARITY_LIBRARYDIR: ${{ github.workspace }}/.singularity concurrency: - group: "{% raw %}${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}{% endraw %}" + group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" cancel-in-progress: true jobs: test: - name: Run pipeline with test data + name: "Run pipeline with test data (${{ matrix.NXF_VER }} | ${{ matrix.test_name }} | ${{ matrix.profile }})" # Only run on push if this is the nf-core dev branch (merged PRs) - if: "{% raw %}${{{% endraw %} github.event_name != 'push' || (github.event_name == 'push' && github.repository == '{{ name }}') {% raw %}}}{% endraw %}" + if: "${{{% endraw %} github.event_name != 'push' || (github.event_name == 'push' && github.repository == '{{ name }}') {% raw %}}}" runs-on: ubuntu-latest strategy: matrix: NXF_VER: - - "23.10.0" + - "24.04.2" - "latest-everything" + profile: + - "conda" + - "docker" + - "singularity" + test_name: + - "test" + isMaster: + - ${{ github.base_ref == 'master' }} + # Exclude conda and singularity on dev + exclude: + - isMaster: false + profile: "conda" + - isMaster: false + profile: "singularity" steps: - name: Check out pipeline code - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - - name: Install Nextflow + - name: Set up Nextflow uses: nf-core/setup-nextflow@v2 with: - version: "{% raw %}${{ matrix.NXF_VER }}{% endraw %}" + version: "${{ matrix.NXF_VER }}" - - name: Disk space cleanup + - name: Set up Apptainer + if: matrix.profile == 'singularity' + uses: eWaterCycle/setup-apptainer@main + + - name: Set up Singularity + if: matrix.profile == 'singularity' + run: | + mkdir -p $NXF_SINGULARITY_CACHEDIR + mkdir -p $NXF_SINGULARITY_LIBRARYDIR + + - name: Set up Miniconda + if: matrix.profile == 'conda' + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3 + with: + miniconda-version: "latest" + auto-update-conda: true + conda-solver: libmamba + channels: conda-forge,bioconda + + - name: Set up Conda + if: matrix.profile == 'conda' + run: | + echo $(realpath $CONDA)/condabin >> $GITHUB_PATH + echo $(realpath python) >> $GITHUB_PATH + + - name: Clean up Disk space uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1 - - name: Run pipeline with test data - # TODO nf-core: You can customise CI pipeline run tests as required - # For example: adding multiple test runs with different parameters - # Remember that you can parallelise this by using strategy.matrix + - name: "Run pipeline with test data ${{ matrix.NXF_VER }} | ${{ matrix.test_name }} | ${{ matrix.profile }}" run: | - nextflow run ${GITHUB_WORKSPACE} -profile test,docker --outdir ./results + nextflow run ${GITHUB_WORKSPACE} -profile ${{ matrix.test_name }},${{ matrix.profile }} --outdir ./results{% endraw %} diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index e7a28e5ac4..1bc42469c4 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -35,13 +35,15 @@ jobs: - name: Disk space cleanup uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1 - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" architecture: "x64" - - uses: eWaterCycle/setup-singularity@931d4e31109e875b13309ae1d07c70ca8fbc8537 # v7 + + - name: Setup Apptainer + uses: eWaterCycle/setup-apptainer@4bb22c52d4f63406c49e94c804632975787312b3 # v2.0.0 with: - singularity-version: 3.8.3 + apptainer-version: 1.3.4 - name: Install dependencies run: | @@ -54,33 +56,64 @@ jobs: echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV} echo "{% raw %}REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> ${GITHUB_ENV} + - name: Make a cache directory for the container images + run: | + mkdir -p ./singularity_container_images + - name: Download the pipeline env: - NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images run: | nf-core pipelines download ${{ env.REPO_LOWERCASE }} \ --revision ${{ env.REPO_BRANCH }} \ --outdir ./${{ env.REPOTITLE_LOWERCASE }} \ --compress "none" \ --container-system 'singularity' \ - --container-library "quay.io" -l "docker.io" -l "ghcr.io" \ + --container-library "quay.io" -l "docker.io" -l "community.wave.seqera.io/library/" \ --container-cache-utilisation 'amend' \ - --download-configuration + --download-configuration 'yes' - name: Inspect download run: tree ./${{ env.REPOTITLE_LOWERCASE }}{% endraw %}{% if test_config %}{% raw %} + - name: Count the downloaded number of container images + id: count_initial + run: | + image_count=$(ls -1 ./singularity_container_images | wc -l | xargs) + echo "Initial container image count: $image_count" + echo "IMAGE_COUNT_INITIAL=$image_count" >> ${GITHUB_ENV} + - name: Run the downloaded pipeline (stub) id: stub_run_pipeline continue-on-error: true env: - NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images NXF_SINGULARITY_HOME_MOUNT: true run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results - name: Run the downloaded pipeline (stub run not supported) id: run_pipeline if: ${{ job.steps.stub_run_pipeline.status == failure() }} env: - NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images NXF_SINGULARITY_HOME_MOUNT: true - run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results{% endraw %}{% endif %} + run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results + + - name: Count the downloaded number of container images + id: count_afterwards + run: | + image_count=$(ls -1 ./singularity_container_images | wc -l | xargs) + echo "Post-pipeline run container image count: $image_count" + echo "IMAGE_COUNT_AFTER=$image_count" >> ${GITHUB_ENV} + + - name: Compare container image counts + run: | + if [ "${{ env.IMAGE_COUNT_INITIAL }}" -ne "${{ env.IMAGE_COUNT_AFTER }}" ]; then + initial_count=${{ env.IMAGE_COUNT_INITIAL }} + final_count=${{ env.IMAGE_COUNT_AFTER }} + difference=$((final_count - initial_count)) + echo "$difference additional container images were \n downloaded at runtime . The pipeline has no support for offline runs!" + tree ./singularity_container_images + exit 1 + else + echo "The pipeline can be downloaded successfully!" + fi{% endraw %}{% endif %} diff --git a/nf_core/pipeline-template/.github/workflows/fix-linting.yml b/nf_core/pipeline-template/.github/workflows/fix-linting.yml index 18e6f9e158..4fa3d54d75 100644 --- a/nf_core/pipeline-template/.github/workflows/fix-linting.yml +++ b/nf_core/pipeline-template/.github/workflows/fix-linting.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: # Use the @nf-core-bot token to check out so we can push later - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: token: ${{ secrets.nf_core_bot_auth_token }} @@ -32,7 +32,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} # Install and run pre-commit - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index dbba830ec1..cfdbcc12a9 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -14,10 +14,10 @@ jobs: pre-commit: runs-on: ubuntu-latest steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" @@ -31,21 +31,21 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out pipeline code - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Install Nextflow uses: nf-core/setup-nextflow@v2 - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" architecture: "x64" - name: read .nf-core.yml - uses: pietrobolcato/action-read-yaml@1.0.0 + uses: pietrobolcato/action-read-yaml@1.1.0 id: read_yml with: - config: ${{ github.workspace }}/.nf-core.yaml + config: ${{ github.workspace }}/.nf-core.yml - name: Install dependencies run: | @@ -74,7 +74,7 @@ jobs: - name: Upload linting log file artifact if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 with: name: linting-logs path: | diff --git a/nf_core/pipeline-template/.github/workflows/linting_comment.yml b/nf_core/pipeline-template/.github/workflows/linting_comment.yml index 908dcea159..63b20bb311 100644 --- a/nf_core/pipeline-template/.github/workflows/linting_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/linting_comment.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download lint results - uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6 + uses: dawidd6/action-download-artifact@80620a5d27ce0ae443b965134db88467fc607b43 # v7 with: workflow: linting.yml workflow_conclusion: completed diff --git a/nf_core/pipeline-template/.github/workflows/release-announcements.yml b/nf_core/pipeline-template/.github/workflows/release-announcements.yml index 8fee061fdd..e1b654d34b 100644 --- a/nf_core/pipeline-template/.github/workflows/release-announcements.yml +++ b/nf_core/pipeline-template/.github/workflows/release-announcements.yml @@ -12,7 +12,7 @@ jobs: - name: get topics and convert to hashtags id: get_topics run: | - echo "topics=$(curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ')" >> $GITHUB_OUTPUT + echo "topics=$(curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ')" | sed 's/-//g' >> $GITHUB_OUTPUT - uses: rzr/fediverse-action@master with: @@ -31,7 +31,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.10" - name: Install dependencies diff --git a/nf_core/pipeline-template/.github/workflows/template_version_comment.yml b/nf_core/pipeline-template/.github/workflows/template_version_comment.yml index 58db2eb63a..27737afb47 100644 --- a/nf_core/pipeline-template/.github/workflows/template_version_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/template_version_comment.yml @@ -9,10 +9,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out pipeline code - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + with: + ref: ${{ github.event.pull_request.head.sha }} - name: Read template version from .nf-core.yml - uses: pietrobolcato/action-read-yaml@1.0.0 + uses: nichmor/minimal-read-yaml@v0.0.2 id: read_yml with: config: ${{ github.workspace }}/.nf-core.yml @@ -24,20 +26,21 @@ jobs: - name: Check nf-core outdated id: nf_core_outdated - run: pip list --outdated | grep nf-core + run: echo "OUTPUT=$(pip list --outdated | grep nf-core)" >> ${GITHUB_ENV} - name: Post nf-core template version comment uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 if: | - ${{ steps.nf_core_outdated.outputs.stdout }} =~ 'nf-core' + contains(env.OUTPUT, 'nf-core') with: repo-token: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} allow-repeats: false message: | - ## :warning: Newer version of the nf-core template is available. - - Your pipeline is using an old version of the nf-core template: ${{ steps.read_yml.outputs['nf_core_version'] }}. - Please update your pipeline to the latest version. - - For more documentation on how to update your pipeline, please see the [nf-core documentation](https://github.com/nf-core/tools?tab=readme-ov-file#sync-a-pipeline-with-the-template) and [Synchronisation documentation](https://nf-co.re/docs/contributing/sync). + > [!WARNING] + > Newer version of the nf-core template is available. + > + > Your pipeline is using an old version of the nf-core template: ${{ steps.read_yml.outputs['nf_core_version'] }}. + > Please update your pipeline to the latest version. + > + > For more documentation on how to update your pipeline, please see the [nf-core documentation](https://github.com/nf-core/tools?tab=readme-ov-file#sync-a-pipeline-with-the-template) and [Synchronisation documentation](https://nf-co.re/docs/contributing/sync). #{%- endraw %} diff --git a/nf_core/pipeline-template/.gitignore b/nf_core/pipeline-template/.gitignore index 5124c9ac77..a42ce0162e 100644 --- a/nf_core/pipeline-template/.gitignore +++ b/nf_core/pipeline-template/.gitignore @@ -6,3 +6,4 @@ results/ testing/ testing* *.pyc +null/ diff --git a/nf_core/pipeline-template/.pre-commit-config.yaml b/nf_core/pipeline-template/.pre-commit-config.yaml index 4dc0f1dcd7..9e9f0e1c4e 100644 --- a/nf_core/pipeline-template/.pre-commit-config.yaml +++ b/nf_core/pipeline-template/.pre-commit-config.yaml @@ -7,7 +7,7 @@ repos: - prettier@3.2.5 - repo: https://github.com/editorconfig-checker/editorconfig-checker.python - rev: "2.7.3" + rev: "3.0.3" hooks: - id: editorconfig-checker alias: ec diff --git a/nf_core/pipeline-template/.prettierignore b/nf_core/pipeline-template/.prettierignore index c8e8ad9e11..7ecc9b61cb 100644 --- a/nf_core/pipeline-template/.prettierignore +++ b/nf_core/pipeline-template/.prettierignore @@ -1,4 +1,4 @@ -{%- if email %} +{% if email -%} email_template.html {%- endif %} {%- if adaptivecard %} diff --git a/nf_core/pipeline-template/.vscode/settings.json b/nf_core/pipeline-template/.vscode/settings.json new file mode 100644 index 0000000000..a33b527cc7 --- /dev/null +++ b/nf_core/pipeline-template/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "markdown.styles": ["public/vscode_markdown.css"] +} diff --git a/nf_core/pipeline-template/CITATIONS.md b/nf_core/pipeline-template/CITATIONS.md index 2373f1de7f..16da9a4207 100644 --- a/nf_core/pipeline-template/CITATIONS.md +++ b/nf_core/pipeline-template/CITATIONS.md @@ -15,10 +15,14 @@ {% if fastqc %}- [FastQC](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/) > Andrews, S. (2010). FastQC: A Quality Control Tool for High Throughput Sequence Data [Online]. -> {% endif %} > {% if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) + +{%- endif %} + +{% if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) > Ewels P, Magnusson M, Lundin S, Käller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. -> {%- endif %} + +{%- endif %} ## Software packaging/containerisation tools @@ -41,4 +45,5 @@ - [Singularity](https://pubmed.ncbi.nlm.nih.gov/28494014/) > Kurtzer GM, Sochat V, Bauer MW. Singularity: Scientific containers for mobility of compute. PLoS One. 2017 May 11;12(5):e0177459. doi: 10.1371/journal.pone.0177459. eCollection 2017. PubMed PMID: 28494014; PubMed Central PMCID: PMC5426675. - > {%- endif %} + + {%- endif %} diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index beb45ed511..a8f2e60546 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -7,7 +7,7 @@ -{%- else %} +{% else %} # {{ name }} @@ -20,7 +20,7 @@ [![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) [![nf-test](https://img.shields.io/badge/unit_tests-nf--test-337ab7.svg)](https://www.nf-test.com) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.10.0-23aa62.svg)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A524.04.2-23aa62.svg)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) @@ -128,7 +128,7 @@ An extensive list of references for the tools used by the pipeline can be found You can cite the `nf-core` publication as follows: {% else -%} -This pipeline uses code and infrastructure developed and maintained by the [nf-core](https://nf-co.re) community, reused here under the [MIT license](https://github.com/nf-core/tools/blob/master/LICENSE). +This pipeline uses code and infrastructure developed and maintained by the [nf-core](https://nf-co.re) community, reused here under the [MIT license](https://github.com/nf-core/tools/blob/main/LICENSE). {% endif -%} diff --git a/nf_core/pipeline-template/assets/multiqc_config.yml b/nf_core/pipeline-template/assets/multiqc_config.yml index cd4e539b31..e6fd878986 100644 --- a/nf_core/pipeline-template/assets/multiqc_config.yml +++ b/nf_core/pipeline-template/assets/multiqc_config.yml @@ -3,11 +3,11 @@ report_comment: > This report has been generated by the {{ name }} analysis pipeline.{% if is_nfcore %} For information about how to interpret these results, please see the documentation.{% endif %} - {%- else %} + {%- else -%} This report has been generated by the {{ name }} analysis pipeline.{% if is_nfcore %} For information about how to interpret these results, please see the documentation.{% endif %} - {% endif %} + {%- endif %} report_section_order: "{{ name_noslash }}-methods-description": order: -1000 diff --git a/nf_core/pipeline-template/conf/base.config b/nf_core/pipeline-template/conf/base.config index 9c62bf0634..16a4fe6cdf 100644 --- a/nf_core/pipeline-template/conf/base.config +++ b/nf_core/pipeline-template/conf/base.config @@ -11,46 +11,46 @@ process { // TODO nf-core: Check the defaults for all processes - cpus = { check_max( 1 * task.attempt, 'cpus' ) } - memory = { check_max( 6.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 1 * task.attempt } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } maxRetries = 1 maxErrors = '-1' // Process-specific resource requirements - // NOTE - Please try and re-use the labels below as much as possible. + // NOTE - Please try and reuse the labels below as much as possible. // These labels are used and recognised by default in DSL2 files hosted on nf-core/modules. // If possible, it would be nice to keep the same label naming convention when // adding in your local modules too. // TODO nf-core: Customise requirements for specific processes. // See https://www.nextflow.io/docs/latest/config.html#config-process-selectors withLabel:process_single { - cpus = { check_max( 1 , 'cpus' ) } - memory = { check_max( 6.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 1 } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } } withLabel:process_low { - cpus = { check_max( 2 * task.attempt, 'cpus' ) } - memory = { check_max( 12.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 2 * task.attempt } + memory = { 12.GB * task.attempt } + time = { 4.h * task.attempt } } withLabel:process_medium { - cpus = { check_max( 6 * task.attempt, 'cpus' ) } - memory = { check_max( 36.GB * task.attempt, 'memory' ) } - time = { check_max( 8.h * task.attempt, 'time' ) } + cpus = { 6 * task.attempt } + memory = { 36.GB * task.attempt } + time = { 8.h * task.attempt } } withLabel:process_high { - cpus = { check_max( 12 * task.attempt, 'cpus' ) } - memory = { check_max( 72.GB * task.attempt, 'memory' ) } - time = { check_max( 16.h * task.attempt, 'time' ) } + cpus = { 12 * task.attempt } + memory = { 72.GB * task.attempt } + time = { 16.h * task.attempt } } withLabel:process_long { - time = { check_max( 20.h * task.attempt, 'time' ) } + time = { 20.h * task.attempt } } withLabel:process_high_memory { - memory = { check_max( 200.GB * task.attempt, 'memory' ) } + memory = { 200.GB * task.attempt } } withLabel:error_ignore { errorStrategy = 'ignore' diff --git a/nf_core/pipeline-template/conf/igenomes_ignored.config b/nf_core/pipeline-template/conf/igenomes_ignored.config new file mode 100644 index 0000000000..b4034d8243 --- /dev/null +++ b/nf_core/pipeline-template/conf/igenomes_ignored.config @@ -0,0 +1,9 @@ +/* +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Nextflow config file for iGenomes paths +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Empty genomes dictionary to use when igenomes is ignored. +---------------------------------------------------------------------------------------- +*/ + +params.genomes = [:] diff --git a/nf_core/pipeline-template/conf/modules.config b/nf_core/pipeline-template/conf/modules.config index 35e861d9b1..1614e2b1a9 100644 --- a/nf_core/pipeline-template/conf/modules.config +++ b/nf_core/pipeline-template/conf/modules.config @@ -18,13 +18,15 @@ process { saveAs: { filename -> filename.equals('versions.yml') ? null : filename } ] - {% if fastqc -%} + {%- if fastqc %} + withName: FASTQC { ext.args = '--quiet' } {%- endif %} {%- if multiqc %} + withName: 'MULTIQC' { ext.args = { params.multiqc_title ? "--title \"$params.multiqc_title\"" : '' } publishDir = [ diff --git a/nf_core/pipeline-template/conf/test.config b/nf_core/pipeline-template/conf/test.config index 827e21b7b7..bea6f670d0 100644 --- a/nf_core/pipeline-template/conf/test.config +++ b/nf_core/pipeline-template/conf/test.config @@ -10,15 +10,18 @@ ---------------------------------------------------------------------------------------- */ +process { + resourceLimits = [ + cpus: 4, + memory: '15.GB', + time: '1.h' + ] +} + params { config_profile_name = 'Test profile' config_profile_description = 'Minimal test dataset to check pipeline function' - // Limit resources so that this can run on GitHub Actions - max_cpus = 2 - max_memory = '6.GB' - max_time = '6.h' - // Input data // TODO nf-core: Specify the paths to your test data on nf-core/test-datasets // TODO nf-core: Give any required params for the test so that command line flags are not needed diff --git a/nf_core/pipeline-template/docs/output.md b/nf_core/pipeline-template/docs/output.md index 5e42d50cc5..83d5d23fe3 100644 --- a/nf_core/pipeline-template/docs/output.md +++ b/nf_core/pipeline-template/docs/output.md @@ -12,12 +12,16 @@ The directories listed below will be created in the results directory after the The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes data using the following steps: -{% if fastqc %}- [FastQC](#fastqc) - Raw read QC{% endif %} -{% if multiqc %}- [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline{% endif %} +{% if fastqc -%} +- [FastQC](#fastqc) - Raw read QC + {%- endif %} + {%- if multiqc %} +- [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline + {%- endif %} - [Pipeline information](#pipeline-information) - Report metrics generated during the workflow execution -{%- if fastqc %} +{% if fastqc -%} ### FastQC @@ -32,7 +36,8 @@ The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes d [FastQC](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/) gives general quality metrics about your sequenced reads. It provides information about the quality score distribution across your reads, per base sequence content (%A/T/G/C), adapter contamination and overrepresented sequences. For further reading and documentation see the [FastQC help pages](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/). {%- endif %} -{%- if multiqc %} + +{% if multiqc -%} ### MultiQC @@ -49,7 +54,7 @@ The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes d [MultiQC](http://multiqc.info) is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in the report data directory. Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQC. The pipeline has special steps which also allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see . -{% endif %} +{%- endif %} ### Pipeline information @@ -58,7 +63,8 @@ Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQ - `pipeline_info/` - Reports generated by Nextflow: `execution_report.html`, `execution_timeline.html`, `execution_trace.txt` and `pipeline_dag.dot`/`pipeline_dag.svg`. - {% if email %}- Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline. {% endif %} + {%- if email %} + - Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline.{% endif %} - Reformatted samplesheet files used as input to the pipeline: `samplesheet.valid.csv`. - Parameters used by the pipeline run: `params.json`. diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index ae2761797a..16e6220aaf 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -79,9 +79,8 @@ If you wish to repeatedly use the same parameters for multiple runs, rather than Pipeline settings can be provided in a `yaml` or `json` file via `-params-file `. -:::warning -Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args). -::: +> [!WARNING] +> Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args). The above pipeline run specified with a params file in yaml format: @@ -110,23 +109,21 @@ nextflow pull {{ name }} ### Reproducibility -It is a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. +It is a good idea to specify the pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. First, go to the [{{ name }} releases page](https://github.com/{{ name }}/releases) and find the latest pipeline version - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. Of course, you can switch to another version by changing the number after the `-r` flag. This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. {% if multiqc %}For example, at the bottom of the MultiQC reports.{% endif %} -To further assist in reproducbility, you can use share and re-use [parameter files](#running-the-pipeline) to repeat pipeline runs with the same settings without having to write out a command with every single parameter. +To further assist in reproducibility, you can use share and reuse [parameter files](#running-the-pipeline) to repeat pipeline runs with the same settings without having to write out a command with every single parameter. -:::tip -If you wish to share such profile (such as upload as supplementary material for academic publications), make sure to NOT include cluster specific paths to files, nor institutional specific profiles. -::: +> [!TIP] +> If you wish to share such profile (such as upload as supplementary material for academic publications), make sure to NOT include cluster specific paths to files, nor institutional specific profiles. ## Core Nextflow arguments -:::note -These options are part of Nextflow and use a _single_ hyphen (pipeline parameters use a double-hyphen). -::: +> [!NOTE] +> These options are part of Nextflow and use a _single_ hyphen (pipeline parameters use a double-hyphen) ### `-profile` @@ -134,19 +131,18 @@ Use this parameter to choose a configuration profile. Profiles can give configur Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Apptainer, Conda) - see below. -:::info -We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. -::: +> [!IMPORTANT] +> We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. {%- if nf_core_configs %} -The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to see if your system is available in these configs please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). +The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to check if your system is suported, please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). {% else %} {% endif %} Note that multiple profiles can be loaded, for example: `-profile test,docker` - the order of arguments is important! They are loaded in sequence, so later profiles can overwrite earlier profiles. -If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended, since it can lead to different results on different machines dependent on the computer enviroment. +If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended, since it can lead to different results on different machines dependent on the computer environment. {%- if test_config %} @@ -185,13 +181,13 @@ Specify the path to a specific config file (this is a core Nextflow command). Se ### Resource requests -Whilst the default requirements set within the pipeline will hopefully work for most people and with most input data, you may find that you want to customise the compute resources that the pipeline requests. Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the steps in the pipeline, if the job exits with any of the error codes specified [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L18) it will automatically be resubmitted with higher requests (2 x original, then 3 x original). If it still fails after the third attempt then the pipeline execution is stopped. +Whilst the default requirements set within the pipeline will hopefully work for most people and with most input data, you may find that you want to customise the compute resources that the pipeline requests. Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the pipeline steps, if the job exits with any of the error codes specified [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L18) it will automatically be resubmitted with higher resources request (2 x original, then 3 x original). If it still fails after the third attempt then the pipeline execution is stopped. To change the resource requests, please see the [max resources](https://nf-co.re/docs/usage/configuration#max-resources) and [tuning workflow resources](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources) section of the nf-core website. ### Custom Containers -In some cases you may wish to change which container or conda environment a step of the pipeline uses for a particular tool. By default nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However in some cases the pipeline specified version maybe out of date. +In some cases, you may wish to change the container or conda environment used by a pipeline steps for a particular tool. By default, nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However, in some cases the pipeline specified version maybe out of date. To use a different container from the default container or conda environment specified in a pipeline, please see the [updating tool versions](https://nf-co.re/docs/usage/configuration#updating-tool-versions) section of the nf-core website. diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index 6516ebf90e..70bdc274e2 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -84,7 +84,8 @@ workflow { params.outdir, params.input ) - {% endif %} + {%- endif %} + // // WORKFLOW: Run main workflow // @@ -108,8 +109,10 @@ workflow { {%- endif %} params.outdir, params.monochrome_logs, - {% if adaptivecard or slackreport %}params.hook_url,{% endif %} - {% if multiqc %}{{ prefix_nodash|upper }}_{{ short_name|upper }}.out.multiqc_report{% endif %} + {%- if adaptivecard or slackreport %} + params.hook_url,{% endif %} + {%- if multiqc %} + {{ prefix_nodash|upper }}_{{ short_name|upper }}.out.multiqc_report{% endif %} ) {%- endif %} } diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 367202155a..90c5728d9a 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -8,12 +8,12 @@ {%- if fastqc %} "fastqc": { "branch": "master", - "git_sha": "285a50500f9e02578d90b3ce6382ea3c30216acd", + "git_sha": "dc94b6ee04a05ddb9f7ae050712ff30a13149164", "installed_by": ["modules"] }{% endif %}{%- if multiqc %}{% if fastqc %},{% endif %} "multiqc": { "branch": "master", - "git_sha": "b7ebe95761cd389603f9cc0e0dc384c0f663815a", + "git_sha": "cf17ca47590cc578dfb47db1c2a44ef86f89976d", "installed_by": ["modules"] } {%- endif %} @@ -23,17 +23,17 @@ "nf-core": { "utils_nextflow_pipeline": { "branch": "master", - "git_sha": "d20fb2a9cc3e2835e9d067d1046a63252eb17352", + "git_sha": "c2b22d85f30a706a3073387f30380704fcae013b", "installed_by": ["subworkflows"] }, "utils_nfcore_pipeline": { "branch": "master", - "git_sha": "2fdce49d30c0254f76bc0f13c55c17455c1251ab", + "git_sha": "9a1e8bb6a5d205cf7807dcefca872a3314b2f3e6", "installed_by": ["subworkflows"] }{% if nf_schema %}, "utils_nfschema_plugin": { "branch": "master", - "git_sha": "bbd5a41f4535a8defafe6080e00ea74c45f4f96c", + "git_sha": "2fd2cd6d0e7b273747f32e465fdc6bcc3ae0814e", "installed_by": ["subworkflows"] }{% endif %} } diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml index 0d5be45f26..691d4c7638 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml @@ -1,4 +1,3 @@ -name: fastqc channels: - conda-forge - bioconda diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf index d79f1c862d..752c3a10c6 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf @@ -24,9 +24,12 @@ process FASTQC { // Make list of old name and new name pairs to use for renaming in the bash while loop def old_new_pairs = reads instanceof Path || reads.size() == 1 ? [[ reads, "${prefix}.${reads.extension}" ]] : reads.withIndex().collect { entry, index -> [ entry, "${prefix}_${index + 1}.${entry.extension}" ] } def rename_to = old_new_pairs*.join(' ').join(' ') - def renamed_files = old_new_pairs.collect{ old_name, new_name -> new_name }.join(' ') + def renamed_files = old_new_pairs.collect{ _old_name, new_name -> new_name }.join(' ') - def memory_in_mb = MemoryUnit.of("${task.memory}").toUnit('MB') + // The total amount of allocated RAM by FastQC is equal to the number of threads defined (--threads) time the amount of RAM defined (--memory) + // https://github.com/s-andrews/FastQC/blob/1faeea0412093224d7f6a07f777fad60a5650795/fastqc#L211-L222 + // Dividing the task.memory by task.cpu allows to stick to requested amount of RAM in the label + def memory_in_mb = MemoryUnit.of("${task.memory}").toUnit('MB') / task.cpus // FastQC memory value allowed range (100 - 10000) def fastqc_memory = memory_in_mb > 10000 ? 10000 : (memory_in_mb < 100 ? 100 : memory_in_mb) diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml index ee5507e06b..2b2e62b8ae 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml @@ -11,40 +11,50 @@ tools: FastQC gives general quality metrics about your reads. It provides information about the quality score distribution across your reads, the per base sequence content (%A/C/G/T). + You get information about adapter contamination and other overrepresented sequences. homepage: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/ documentation: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/ licence: ["GPL-2.0-only"] + identifier: biotools:fastqc input: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - - reads: - type: file - description: | - List of input FastQ files of size 1 and 2 for single-end and paired-end data, - respectively. + - - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. output: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - html: - type: file - description: FastQC report - pattern: "*_{fastqc.html}" + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - "*.html": + type: file + description: FastQC report + pattern: "*_{fastqc.html}" - zip: - type: file - description: FastQC report archive - pattern: "*_{fastqc.zip}" + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - "*.zip": + type: file + description: FastQC report archive + pattern: "*_{fastqc.zip}" - versions: - type: file - description: File containing software versions - pattern: "versions.yml" + - versions.yml: + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "@drpatelh" - "@grst" diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test index 70edae4d99..e9d79a074e 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test @@ -23,17 +23,14 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - // NOTE The report contains the date inside it, which means that the md5sum is stable per day, but not longer than that. So you can't md5sum it. - // looks like this:
Mon 2 Oct 2023
test.gz
- // https://github.com/nf-core/modules/pull/3903#issuecomment-1743620039 - - { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_single") } + { assert process.success }, + // NOTE The report contains the date inside it, which means that the md5sum is stable per day, but not longer than that. So you can't md5sum it. + // looks like this:
Mon 2 Oct 2023
test.gz
+ // https://github.com/nf-core/modules/pull/3903#issuecomment-1743620039 + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -54,16 +51,14 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, - { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, - { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, - { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, - { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_paired") } + { assert process.success }, + { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, + { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, + { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, + { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, + { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -83,13 +78,11 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_interleaved") } + { assert process.success }, + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -109,13 +102,11 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_bam") } + { assert process.success }, + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -138,22 +129,20 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, - { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, - { assert process.out.html[0][1][2] ==~ ".*/test_3_fastqc.html" }, - { assert process.out.html[0][1][3] ==~ ".*/test_4_fastqc.html" }, - { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, - { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, - { assert process.out.zip[0][1][2] ==~ ".*/test_3_fastqc.zip" }, - { assert process.out.zip[0][1][3] ==~ ".*/test_4_fastqc.zip" }, - { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][2]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][3]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_multiple") } + { assert process.success }, + { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, + { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, + { assert process.out.html[0][1][2] ==~ ".*/test_3_fastqc.html" }, + { assert process.out.html[0][1][3] ==~ ".*/test_4_fastqc.html" }, + { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, + { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, + { assert process.out.zip[0][1][2] ==~ ".*/test_3_fastqc.zip" }, + { assert process.out.zip[0][1][3] ==~ ".*/test_4_fastqc.zip" }, + { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][2]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][3]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -173,21 +162,18 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1] ==~ ".*/mysample_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/mysample_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_custom_prefix") } + { assert process.success }, + { assert process.out.html[0][1] ==~ ".*/mysample_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/mysample_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } test("sarscov2 single-end [fastq] - stub") { - options "-stub" - + options "-stub" when { process { """ @@ -201,12 +187,123 @@ nextflow_process { then { assertAll ( - { assert process.success }, - { assert snapshot(process.out.html.collect { file(it[1]).getName() } + - process.out.zip.collect { file(it[1]).getName() } + - process.out.versions ).match("fastqc_stub") } + { assert process.success }, + { assert snapshot(process.out).match() } ) } } + test("sarscov2 paired-end [fastq] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true) ] + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 interleaved [fastq] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_interleaved.fastq.gz', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 paired-end [bam] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 multiple [fastq] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_2.fastq.gz', checkIfExists: true) ] + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 custom_prefix - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [ id:'mysample', single_end:true ], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } } diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap index 86f7c31154..d5db3092fb 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap @@ -1,88 +1,392 @@ { - "fastqc_versions_interleaved": { + "sarscov2 custom_prefix": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:40:07.293713" + "timestamp": "2024-07-22T11:02:16.374038" }, - "fastqc_stub": { + "sarscov2 single-end [fastq] - stub": { "content": [ - [ - "test.html", - "test.zip", - "versions.yml:md5,e1cc25ca8af856014824abd842e93978" - ] + { + "0": [ + [ + { + "id": "test", + "single_end": true + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": true + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": true + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": true + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:02:24.993809" + }, + "sarscov2 custom_prefix - stub": { + "content": [ + { + "0": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:31:01.425198" + "timestamp": "2024-07-22T11:03:10.93942" }, - "fastqc_versions_multiple": { + "sarscov2 interleaved [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:40:55.797907" + "timestamp": "2024-07-22T11:01:42.355718" }, - "fastqc_versions_bam": { + "sarscov2 paired-end [bam]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:40:26.795862" + "timestamp": "2024-07-22T11:01:53.276274" }, - "fastqc_versions_single": { + "sarscov2 multiple [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:39:27.043675" + "timestamp": "2024-07-22T11:02:05.527626" }, - "fastqc_versions_paired": { + "sarscov2 paired-end [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:01:31.188871" + }, + "sarscov2 paired-end [fastq] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:02:34.273566" + }, + "sarscov2 multiple [fastq] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:39:47.584191" + "timestamp": "2024-07-22T11:03:02.304411" }, - "fastqc_versions_custom_prefix": { + "sarscov2 single-end [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:01:19.095607" + }, + "sarscov2 interleaved [fastq] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:02:44.640184" + }, + "sarscov2 paired-end [bam] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:41:14.576531" + "timestamp": "2024-07-22T11:02:53.550742" } } \ No newline at end of file diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml index 329ddb4870..6f5b867b76 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml @@ -1,6 +1,5 @@ -name: multiqc channels: - conda-forge - bioconda dependencies: - - bioconda::multiqc=1.21 + - bioconda::multiqc=1.25.1 diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf index 47ac352f94..cc0643e1d5 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf @@ -3,14 +3,16 @@ process MULTIQC { conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.21--pyhdfd78af_0' : - 'biocontainers/multiqc:1.21--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.25.1--pyhdfd78af_0' : + 'biocontainers/multiqc:1.25.1--pyhdfd78af_0' }" input: path multiqc_files, stageAs: "?/*" path(multiqc_config) path(extra_multiqc_config) path(multiqc_logo) + path(replace_names) + path(sample_names) output: path "*multiqc_report.html", emit: report @@ -23,16 +25,22 @@ process MULTIQC { script: def args = task.ext.args ?: '' + def prefix = task.ext.prefix ? "--filename ${task.ext.prefix}.html" : '' def config = multiqc_config ? "--config $multiqc_config" : '' def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : '' - def logo = multiqc_logo ? /--cl-config 'custom_logo: "${multiqc_logo}"'/ : '' + def logo = multiqc_logo ? "--cl-config 'custom_logo: \"${multiqc_logo}\"'" : '' + def replace = replace_names ? "--replace-names ${replace_names}" : '' + def samples = sample_names ? "--sample-names ${sample_names}" : '' """ multiqc \\ --force \\ $args \\ $config \\ + $prefix \\ $extra_config \\ $logo \\ + $replace \\ + $samples \\ . cat <<-END_VERSIONS > versions.yml @@ -44,7 +52,7 @@ process MULTIQC { stub: """ mkdir multiqc_data - touch multiqc_plots + mkdir multiqc_plots touch multiqc_report.html cat <<-END_VERSIONS > versions.yml diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml index 45a9bc35e1..b16c187923 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml @@ -1,5 +1,6 @@ name: multiqc -description: Aggregate results from bioinformatics analyses across many samples into a single report +description: Aggregate results from bioinformatics analyses across many samples into + a single report keywords: - QC - bioinformatics tools @@ -12,40 +13,59 @@ tools: homepage: https://multiqc.info/ documentation: https://multiqc.info/docs/ licence: ["GPL-3.0-or-later"] + identifier: biotools:multiqc input: - - multiqc_files: - type: file - description: | - List of reports / files recognised by MultiQC, for example the html and zip output of FastQC - - multiqc_config: - type: file - description: Optional config yml for MultiQC - pattern: "*.{yml,yaml}" - - extra_multiqc_config: - type: file - description: Second optional config yml for MultiQC. Will override common sections in multiqc_config. - pattern: "*.{yml,yaml}" - - multiqc_logo: - type: file - description: Optional logo file for MultiQC - pattern: "*.{png}" + - - multiqc_files: + type: file + description: | + List of reports / files recognised by MultiQC, for example the html and zip output of FastQC + - - multiqc_config: + type: file + description: Optional config yml for MultiQC + pattern: "*.{yml,yaml}" + - - extra_multiqc_config: + type: file + description: Second optional config yml for MultiQC. Will override common sections + in multiqc_config. + pattern: "*.{yml,yaml}" + - - multiqc_logo: + type: file + description: Optional logo file for MultiQC + pattern: "*.{png}" + - - replace_names: + type: file + description: | + Optional two-column sample renaming file. First column a set of + patterns, second column a set of corresponding replacements. Passed via + MultiQC's `--replace-names` option. + pattern: "*.{tsv}" + - - sample_names: + type: file + description: | + Optional TSV file with headers, passed to the MultiQC --sample_names + argument. + pattern: "*.{tsv}" output: - report: - type: file - description: MultiQC report file - pattern: "multiqc_report.html" + - "*multiqc_report.html": + type: file + description: MultiQC report file + pattern: "multiqc_report.html" - data: - type: directory - description: MultiQC data dir - pattern: "multiqc_data" + - "*_data": + type: directory + description: MultiQC data dir + pattern: "multiqc_data" - plots: - type: file - description: Plots created by MultiQC - pattern: "*_data" + - "*_plots": + type: file + description: Plots created by MultiQC + pattern: "*_data" - versions: - type: file - description: File containing software versions - pattern: "versions.yml" + - versions.yml: + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "@abhi18av" - "@bunop" diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test index f1c4242ef2..33316a7ddb 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test @@ -8,6 +8,8 @@ nextflow_process { tag "modules_nfcore" tag "multiqc" + config "./nextflow.config" + test("sarscov2 single-end [fastqc]") { when { @@ -17,6 +19,8 @@ nextflow_process { input[1] = [] input[2] = [] input[3] = [] + input[4] = [] + input[5] = [] """ } } @@ -41,6 +45,8 @@ nextflow_process { input[1] = Channel.of(file("https://github.com/nf-core/tools/raw/dev/nf_core/pipeline-template/assets/multiqc_config.yml", checkIfExists: true)) input[2] = [] input[3] = [] + input[4] = [] + input[5] = [] """ } } @@ -66,6 +72,8 @@ nextflow_process { input[1] = [] input[2] = [] input[3] = [] + input[4] = [] + input[5] = [] """ } } diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap index bfebd80298..2fcbb5ff7d 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap @@ -2,14 +2,14 @@ "multiqc_versions_single": { "content": [ [ - "versions.yml:md5,21f35ee29416b9b3073c28733efe4b7d" + "versions.yml:md5,41f391dcedce7f93ca188f3a3ffa0916" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.4" }, - "timestamp": "2024-02-29T08:48:55.657331" + "timestamp": "2024-10-02T17:51:46.317523" }, "multiqc_stub": { "content": [ @@ -17,25 +17,25 @@ "multiqc_report.html", "multiqc_data", "multiqc_plots", - "versions.yml:md5,21f35ee29416b9b3073c28733efe4b7d" + "versions.yml:md5,41f391dcedce7f93ca188f3a3ffa0916" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.4" }, - "timestamp": "2024-02-29T08:49:49.071937" + "timestamp": "2024-10-02T17:52:20.680978" }, "multiqc_versions_config": { "content": [ [ - "versions.yml:md5,21f35ee29416b9b3073c28733efe4b7d" + "versions.yml:md5,41f391dcedce7f93ca188f3a3ffa0916" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.4" }, - "timestamp": "2024-02-29T08:49:25.457567" + "timestamp": "2024-10-02T17:52:09.185842" } } \ No newline at end of file diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/nextflow.config b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/nextflow.config new file mode 100644 index 0000000000..c537a6a3e7 --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/nextflow.config @@ -0,0 +1,5 @@ +process { + withName: 'MULTIQC' { + ext.prefix = null + } +} diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 8201916b3f..21174bbdc5 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -14,6 +14,7 @@ params { input = null {%- if igenomes %} + // References genome = null igenomes_base = 's3://ngi-igenomes/igenomes/' @@ -21,31 +22,39 @@ params { {%- endif %} {%- if multiqc %} + // MultiQC options multiqc_config = null multiqc_title = null multiqc_logo = null max_multiqc_email_size = '25.MB' - {% if citations %}multiqc_methods_description = null{% endif %} + {%- if citations %} + multiqc_methods_description = null{% endif %} {%- endif %} // Boilerplate options outdir = null - {% if modules %}publish_dir_mode = 'copy'{% endif %} + {%- if modules %} + publish_dir_mode = 'copy'{% endif %} {%- if email %} email = null email_on_fail = null plaintext_email = false {%- endif %} - {% if modules %}monochrome_logs = false{% endif %} - {% if slackreport or adaptivecard %}hook_url = null{% endif %} - {% if nf_schema %}help = false + {%- if modules or nf_schema %} + monochrome_logs = false{% endif %} + {%- if slackreport or adaptivecard %} + hook_url = null{% endif %} + {%- if nf_schema %} + help = false help_full = false show_hidden = false{% endif %} version = false - {% if test_config %}pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/'{% endif %} + {%- if test_config %} + pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/'{% endif %} + trace_report_suffix = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') - {%- if nf_core_configs %} + {%- if nf_core_configs -%} // Config options config_profile_name = null config_profile_description = null @@ -56,47 +65,34 @@ params { config_profile_url = null {%- endif %} - // Max resource options - // Defaults only, expecting to be overwritten - max_memory = '128.GB' - max_cpus = 16 - max_time = '240.h' - {%- if nf_schema %} + // Schema validation default options validate_params = true - {% endif %} + {%- endif %} } -{% if modules %} + +{% if modules -%} // Load base.config by default for all pipelines includeConfig 'conf/base.config' {%- else %} + +{% if modules -%} +// Load modules.config for DSL2 module specific options +includeConfig 'conf/modules.config' +{%- endif %} + process { // TODO nf-core: Check the defaults for all processes - cpus = { check_max( 1 * task.attempt, 'cpus' ) } - memory = { check_max( 6.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 1 * task.attempt } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } maxRetries = 1 maxErrors = '-1' } -{% endif %} -{% if nf_core_configs -%} -// Load nf-core custom profiles from different Institutions -try { - includeConfig "${params.custom_config_base}/nfcore_custom.config" -} catch (Exception e) { - System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config") -} - -// Load {{ name }} custom profiles from different institutions. -try { - includeConfig "${params.custom_config_base}/pipeline/{{ short_name }}.config" -} catch (Exception e) { - System.err.println("WARNING: Could not load nf-core/config/{{ short_name }} profiles: ${params.custom_config_base}/pipeline/{{ short_name }}.config") -} -{% endif -%} +{%- endif %} profiles { debug { @@ -197,6 +193,13 @@ profiles { executor.name = 'local' executor.cpus = 4 executor.memory = 8.GB + process { + resourceLimits = [ + memory: 8.GB, + cpus : 4, + time : 1.h + ] + } } {%- endif %} {%- if test_config %} @@ -205,6 +208,15 @@ profiles { {%- endif %} } +{% if nf_core_configs -%} +// Load nf-core custom profiles from different Institutions +includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/nfcore_custom.config" : "/dev/null" + +// Load {{ name }} custom profiles from different institutions. +// TODO nf-core: Optionally, you can add a pipeline-specific nf-core config at https://github.com/nf-core/configs +// includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/pipeline/{{ short_name }}.config" : "/dev/null" +{%- endif %} + // Set default registry for Apptainer, Docker, Podman, Charliecloud and Singularity independent of -profile // Will not be used unless Apptainer / Docker / Podman / Charliecloud / Singularity are enabled // Set to your registry if you have a mirror of containers @@ -216,12 +228,8 @@ charliecloud.registry = 'quay.io' {% if igenomes -%} // Load igenomes.config if required -if (!params.igenomes_ignore) { - includeConfig 'conf/igenomes.config' -} else { - params.genomes = [:] -} -{% endif -%} +includeConfig !params.igenomes_ignore ? 'conf/igenomes.config' : 'conf/igenomes_ignored.config' +{%- endif %} // Export these variables to prevent local Python/R libraries from conflicting with those in the container // The JULIA depot path has been adjusted to a fixed path `/usr/local/share/julia` that needs to be used for packages in the container. @@ -247,31 +255,43 @@ set -C # No clobber - prevent output redirection from overwriting files. // Disable process selector warnings by default. Use debug profile to enable warnings. nextflow.enable.configProcessNamesValidation = false -def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') timeline { enabled = true - file = "${params.outdir}/pipeline_info/execution_timeline_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/execution_timeline_${params.trace_report_suffix}.html" } report { enabled = true - file = "${params.outdir}/pipeline_info/execution_report_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/execution_report_${params.trace_report_suffix}.html" } trace { enabled = true - file = "${params.outdir}/pipeline_info/execution_trace_${trace_timestamp}.txt" + file = "${params.outdir}/pipeline_info/execution_trace_${params.trace_report_suffix}.txt" } dag { enabled = true - file = "${params.outdir}/pipeline_info/pipeline_dag_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/pipeline_dag_${params.trace_report_suffix}.html" } manifest { name = '{{ name }}' - author = """{{ author }}""" + author = """{{ author }}""" // The author field is deprecated from Nextflow version 24.10.0, use contributors instead + contributors = [ + // TODO nf-core: Update the field with the details of the contributors to your pipeline. New with Nextflow version 24.10.0 + {%- for author_name in author.split(",") %} + [ + name: '{{ author_name }}', + affiliation: '', + email: '', + github: '', + contribution: [], // List of contribution types ('author', 'maintainer' or 'contributor') + orcid: '' + ], + {%- endfor %} + ] homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" mainScript = 'main.nf' - nextflowVersion = '!>=23.10.0' + nextflowVersion = '!>=24.04.2' version = '{{ version }}' doi = '' } @@ -279,17 +299,18 @@ manifest { {% if nf_schema -%} // Nextflow plugins plugins { - id 'nf-schema@2.1.0' // Validation of pipeline parameters and creation of an input channel from a sample sheet + id 'nf-schema@2.1.1' // Validation of pipeline parameters and creation of an input channel from a sample sheet } validation { - defaultIgnoreParams = ["genomes", "helpFull", "showHidden", "help-full", "show-hidden"] // The last 4 parameters are here because of a bug in nf-schema. This will be fixed in a later version + defaultIgnoreParams = ["genomes"] + monochromeLogs = params.monochrome_logs help { enabled = true - command = "nextflow run $manifest.name -profile --input samplesheet.csv --outdir " + command = "nextflow run {{ name }} -profile --input samplesheet.csv --outdir " fullParameter = "help_full" showHiddenParameter = "show_hidden" - {%- if is_nfcore %} + {% if is_nfcore -%} beforeText = """ -\033[2m----------------------------------------------------\033[0m- \033[0;32m,--.\033[0;30m/\033[0;32m,-.\033[0m @@ -297,15 +318,15 @@ validation { \033[0;34m |\\ | |__ __ / ` / \\ |__) |__ \033[0;33m} {\033[0m \033[0;34m | \\| | \\__, \\__/ | \\ |___ \033[0;32m\\`-._,-`-,\033[0m \033[0;32m`._,._,\'\033[0m -\033[0;35m ${manifest.name} ${manifest.version}\033[0m +\033[0;35m {{ name }} ${manifest.version}\033[0m -\033[2m----------------------------------------------------\033[0m- """ - afterText = """${manifest.doi ? "* The pipeline\n" : ""}${manifest.doi.tokenize(",").collect { " https://doi.org/${it.trim().replace('https://doi.org/','')}"}.join("\n")}${manifest.doi ? "\n" : ""} + afterText = """${manifest.doi ? "\n* The pipeline\n" : ""}${manifest.doi.tokenize(",").collect { " https://doi.org/${it.trim().replace('https://doi.org/','')}"}.join("\n")}${manifest.doi ? "\n" : ""} * The nf-core framework - https://doi.org/10.1038/s41587-020-0439-x + https://doi.org/10.1038/s41587-020-0439-x * Software dependencies - https://github.com/${manifest.name}/blob/master/CITATIONS.md + https://github.com/{{ name }}/blob/master/CITATIONS.md """{% endif %} }{% if is_nfcore %} summary { @@ -313,40 +334,4 @@ validation { afterText = validation.help.afterText }{% endif %} } -{% endif -%} -{%- if modules %} -// Load modules.config for DSL2 module specific options -includeConfig 'conf/modules.config' -{% endif %} -// Function to ensure that resource requirements don't go beyond -// a maximum limit -def check_max(obj, type) { - if (type == 'memory') { - try { - if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1) - return params.max_memory as nextflow.util.MemoryUnit - else - return obj - } catch (all) { - println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj" - return obj - } - } else if (type == 'time') { - try { - if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1) - return params.max_time as nextflow.util.Duration - else - return obj - } catch (all) { - println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj" - return obj - } - } else if (type == 'cpus') { - try { - return Math.min( obj, params.max_cpus as int ) - } catch (all) { - println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj" - return obj - } - } -} +{%- endif %} diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 446da25aed..3e59a8ba54 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -136,41 +136,6 @@ } }, {%- endif %} - "max_job_request_options": { - "title": "Max job request options", - "type": "object", - "fa_icon": "fab fa-acquisitions-incorporated", - "description": "Set the top limit for requested resources for any single job.", - "help_text": "If you are running on a smaller system, a pipeline step requesting more resources than are available may cause the Nextflow to stop the run with an error. These options allow you to cap the maximum resources requested by any single job so that the pipeline will run on your system.\n\nNote that you can not _increase_ the resources requested by any job using these options. For that you will need your own configuration file. See [the nf-core website](https://nf-co.re/usage/configuration) for details.", - "properties": { - "max_cpus": { - "type": "integer", - "description": "Maximum number of CPUs that can be requested for any single job.", - "default": 16, - "fa_icon": "fas fa-microchip", - "hidden": true, - "help_text": "Use to set an upper-limit for the CPU requirement for each process. Should be an integer e.g. `--max_cpus 1`" - }, - "max_memory": { - "type": "string", - "description": "Maximum amount of memory that can be requested for any single job.", - "default": "128.GB", - "fa_icon": "fas fa-memory", - "pattern": "^\\d+(\\.\\d+)?\\.?\\s*(K|M|G|T)?B$", - "hidden": true, - "help_text": "Use to set an upper-limit for the memory requirement for each process. Should be a string in the format integer-unit e.g. `--max_memory '8.GB'`" - }, - "max_time": { - "type": "string", - "description": "Maximum amount of time that can be requested for any single job.", - "default": "240.h", - "fa_icon": "far fa-clock", - "pattern": "^(\\d+\\.?\\s*(s|m|h|d|day)\\s*)+$", - "hidden": true, - "help_text": "Use to set an upper-limit for the time requirement for each process. Should be a string in the format integer-unit e.g. `--max_time '2.h'`" - } - } - }, "generic_options": { "title": "Generic options", "type": "object", @@ -217,7 +182,7 @@ "fa_icon": "fas fa-file-upload", "hidden": true },{% endif %} - {%- if modules %} + {%- if modules or nf_schema %} "monochrome_logs": { "type": "boolean", "description": "Do not use coloured log outputs.", @@ -264,7 +229,13 @@ "description": "Base URL or local path to location of pipeline test dataset files", "default": "https://raw.githubusercontent.com/nf-core/test-datasets/", "hidden": true - }{% endif %} + }{% endif %}, + "trace_report_suffix": { + "type": "string", + "fa_icon": "far calendar", + "description": "Suffix to add to the trace report filename. Default is the date and time in the format yyyy-MM-dd_HH-mm-ss.", + "hidden": true + } } } }, @@ -278,9 +249,6 @@ {% if nf_core_configs %}{ "$ref": "#/$defs/institutional_config_options" },{% endif %} - { - "$ref": "#/$defs/max_job_request_options" - }, { "$ref": "#/$defs/generic_options" } diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index ae66f3674c..be5776b836 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -22,9 +22,9 @@ include { UTILS_NFCORE_PIPELINE } from '../../nf-core/utils_nfcore_pipeline' include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline' /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SUBWORKFLOW TO INITIALISE PIPELINE -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ workflow PIPELINE_INITIALISATION { @@ -51,7 +51,8 @@ workflow PIPELINE_INITIALISATION { workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1 ) - {% if nf_schema %} + {%- if nf_schema %} + // // Validate parameters and generate parameter summary to stdout // @@ -60,7 +61,7 @@ workflow PIPELINE_INITIALISATION { validate_params, null ) - {% endif %} + {%- endif %} // // Check config provided to the pipeline @@ -70,6 +71,7 @@ workflow PIPELINE_INITIALISATION { ) {%- if igenomes %} + // // Custom validation for pipeline parameters // @@ -96,8 +98,8 @@ workflow PIPELINE_INITIALISATION { } } .groupTuple() - .map { - validateInputSamplesheet(it) + .map { samplesheet -> + validateInputSamplesheet(samplesheet) } .map { meta, fastqs -> @@ -111,9 +113,9 @@ workflow PIPELINE_INITIALISATION { } /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SUBWORKFLOW FOR PIPELINE COMPLETION -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ workflow PIPELINE_COMPLETION { @@ -123,11 +125,13 @@ workflow PIPELINE_COMPLETION { email // string: email address email_on_fail // string: email address sent on pipeline failure plaintext_email // boolean: Send plain-text email instead of HTML - {% endif %} + {%- endif %} outdir // path: Path to output directory where results will be published monochrome_logs // boolean: Disable ANSI colour codes in log output - {% if adaptivecard or slackreport %}hook_url // string: hook URL for notifications{% endif %} - {% if multiqc %}multiqc_report // string: Path to MultiQC report{% endif %} + {%- if adaptivecard or slackreport %} + hook_url // string: hook URL for notifications{% endif %} + {%- if multiqc %} + multiqc_report // string: Path to MultiQC report{% endif %} main: {%- if nf_schema %} @@ -169,9 +173,9 @@ workflow PIPELINE_COMPLETION { } /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FUNCTIONS -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ {%- if igenomes %} @@ -190,7 +194,7 @@ def validateInputSamplesheet(input) { def (metas, fastqs) = input[1..2] // Check that multiple runs of the same sample are of the same datatype i.e. single-end / paired-end - def endedness_ok = metas.collect{ it.single_end }.unique().size == 1 + def endedness_ok = metas.collect{ meta -> meta.single_end }.unique().size == 1 if (!endedness_ok) { error("Please check input samplesheet -> Multiple runs of a sample must be of the same datatype i.e. single-end or paired-end: ${metas[0].id}") } @@ -235,8 +239,10 @@ def toolCitationText() { // Uncomment function in methodsDescriptionText to render in MultiQC report def citation_text = [ "Tools used in the workflow included:", - {% if fastqc %}"FastQC (Andrews 2010),",{% endif %} - {% if multiqc %}"MultiQC (Ewels et al. 2016)",{% endif %} + {%- if fastqc %} + "FastQC (Andrews 2010),",{% endif %} + {%- if multiqc %} + "MultiQC (Ewels et al. 2016)",{% endif %} "." ].join(' ').trim() @@ -248,15 +254,17 @@ def toolBibliographyText() { // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "
  • Author (2023) Pub name, Journal, DOI
  • " : "", // Uncomment function in methodsDescriptionText to render in MultiQC report def reference_text = [ - {% if fastqc %}"
  • Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).
  • ",{% endif %} - {% if multiqc %}"
  • Ewels, P., Magnusson, M., Lundin, S., & Käller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354
  • "{% endif %} + {%- if fastqc %} + "
  • Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).
  • ",{% endif %} + {%- if multiqc %} + "
  • Ewels, P., Magnusson, M., Lundin, S., & Käller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354
  • "{% endif %} ].join(' ').trim() return reference_text } def methodsDescriptionText(mqc_methods_yaml) { - // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file + // Convert to a named map so can be used as with familiar NXF ${workflow} variable syntax in the MultiQC YML file def meta = [:] meta.workflow = workflow.toMap() meta["manifest_map"] = workflow.manifest.toMap() @@ -267,8 +275,10 @@ def methodsDescriptionText(mqc_methods_yaml) { // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers // Removing ` ` since the manifest.doi is a string and not a proper list def temp_doi_ref = "" - String[] manifest_doi = meta.manifest_map.doi.tokenize(",") - for (String doi_ref: manifest_doi) temp_doi_ref += "(doi: ${doi_ref.replace("https://doi.org/", "").replace(" ", "")}), " + def manifest_doi = meta.manifest_map.doi.tokenize(",") + manifest_doi.each { doi_ref -> + temp_doi_ref += "(doi: ${doi_ref.replace("https://doi.org/", "").replace(" ", "")}), " + } meta["doi_text"] = temp_doi_ref.substring(0, temp_doi_ref.length() - 2) } else meta["doi_text"] = "" meta["nodoi_text"] = meta.manifest_map.doi ? "" : "
  • If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used.
  • " diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf index 28e32b200e..d6e593e852 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf @@ -3,13 +3,12 @@ // /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SUBWORKFLOW DEFINITION -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ workflow UTILS_NEXTFLOW_PIPELINE { - take: print_version // boolean: print version dump_parameters // boolean: dump parameters @@ -22,7 +21,7 @@ workflow UTILS_NEXTFLOW_PIPELINE { // Print workflow version and exit on --version // if (print_version) { - log.info "${workflow.manifest.name} ${getWorkflowVersion()}" + log.info("${workflow.manifest.name} ${getWorkflowVersion()}") System.exit(0) } @@ -45,9 +44,9 @@ workflow UTILS_NEXTFLOW_PIPELINE { } /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FUNCTIONS -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ // @@ -72,11 +71,11 @@ def getWorkflowVersion() { // Dump pipeline parameters to a JSON file // def dumpParametersToJSON(outdir) { - def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') - def filename = "params_${timestamp}.json" - def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") - def jsonStr = groovy.json.JsonOutput.toJson(params) - temp_pf.text = groovy.json.JsonOutput.prettyPrint(jsonStr) + def timestamp = new java.util.Date().format('yyyy-MM-dd_HH-mm-ss') + def filename = "params_${timestamp}.json" + def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") + def jsonStr = groovy.json.JsonOutput.toJson(params) + temp_pf.text = groovy.json.JsonOutput.prettyPrint(jsonStr) nextflow.extension.FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json") temp_pf.delete() @@ -91,9 +90,16 @@ def checkCondaChannels() { try { def config = parser.load("conda config --show channels".execute().text) channels = config.channels - } catch(NullPointerException | IOException e) { - log.warn "Could not verify conda channel configuration." - return + } + catch (NullPointerException e) { + log.debug(e) + log.warn("Could not verify conda channel configuration.") + return null + } + catch (IOException e) { + log.debug(e) + log.warn("Could not verify conda channel configuration.") + return null } // Check that all channels are present @@ -102,23 +108,19 @@ def checkCondaChannels() { def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean // Check that they are in the right order - def channel_priority_violation = false - - required_channels_in_order.eachWithIndex { channel, index -> - if (index < required_channels_in_order.size() - 1) { - channel_priority_violation |= !(channels.indexOf(channel) < channels.indexOf(required_channels_in_order[index+1])) - } - } + def channel_priority_violation = required_channels_in_order != channels.findAll { ch -> ch in required_channels_in_order } if (channels_missing | channel_priority_violation) { - log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + - " There is a problem with your Conda configuration!\n\n" + - " You will need to set-up the conda-forge and bioconda channels correctly.\n" + - " Please refer to https://bioconda.github.io/\n" + - " The observed channel order is \n" + - " ${channels}\n" + - " but the following channel order is required:\n" + - " ${required_channels_in_order}\n" + - "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + log.warn """\ + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + There is a problem with your Conda configuration! + You will need to set-up the conda-forge and bioconda channels correctly. + Please refer to https://bioconda.github.io/ + The observed channel order is + ${channels} + but the following channel order is required: + ${required_channels_in_order} + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + """.stripIndent(true) } } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test index ca964ce8e1..02dbf094cd 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test @@ -52,10 +52,12 @@ nextflow_workflow { } then { - assertAll( - { assert workflow.success }, - { assert workflow.stdout.contains("nextflow_workflow v9.9.9") } - ) + expect { + with(workflow) { + assert success + assert "nextflow_workflow v9.9.9" in stdout + } + } } } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf index cbd8495bb6..228dbff897 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -3,13 +3,12 @@ // /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SUBWORKFLOW DEFINITION -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ workflow UTILS_NFCORE_PIPELINE { - take: nextflow_cli_args @@ -22,9 +21,9 @@ workflow UTILS_NFCORE_PIPELINE { } /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FUNCTIONS -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ // @@ -33,12 +32,9 @@ workflow UTILS_NFCORE_PIPELINE { def checkConfigProvided() { def valid_config = true as Boolean if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) { - log.warn "[$workflow.manifest.name] You are attempting to run the pipeline without any custom configuration!\n\n" + - "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + - " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" + - " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" + - " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" + - "Please refer to the quick start section and usage docs for the pipeline.\n " + log.warn( + "[${workflow.manifest.name}] You are attempting to run the pipeline without any custom configuration!\n\n" + "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" + " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" + " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" + "Please refer to the quick start section and usage docs for the pipeline.\n " + ) valid_config = false } return valid_config @@ -49,36 +45,17 @@ def checkConfigProvided() { // def checkProfileProvided(nextflow_cli_args) { if (workflow.profile.endsWith(',')) { - error "The `-profile` option cannot end with a trailing comma, please remove it and re-run the pipeline!\n" + - "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + error( + "The `-profile` option cannot end with a trailing comma, please remove it and re-run the pipeline!\n" + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + ) } if (nextflow_cli_args[0]) { - log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${nextflow_cli_args[0]}` has been detected.\n" + - "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + log.warn( + "nf-core pipelines do not accept positional arguments. The positional argument `${nextflow_cli_args[0]}` has been detected.\n" + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + ) } } -// -// Citation string for pipeline -// -def workflowCitation() { - def temp_doi_ref = "" - def manifest_doi = workflow.manifest.doi.tokenize(",") - // Using a loop to handle multiple DOIs - // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers - // Removing ` ` since the manifest.doi is a string and not a proper list - manifest_doi.each { doi_ref -> - temp_doi_ref += " https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n" - } - return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + - "* The pipeline\n" + - temp_doi_ref + "\n" + - "* The nf-core framework\n" + - " https://doi.org/10.1038/s41587-020-0439-x\n\n" + - "* Software dependencies\n" + - " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" -} - // // Generate workflow version string // @@ -102,7 +79,7 @@ def getWorkflowVersion() { // def processVersionsFromYAML(yaml_file) { def yaml = new org.yaml.snakeyaml.Yaml() - def versions = yaml.load(yaml_file).collectEntries { k, v -> [ k.tokenize(':')[-1], v ] } + def versions = yaml.load(yaml_file).collectEntries { k, v -> [k.tokenize(':')[-1], v] } return yaml.dumpAsMap(versions).trim() } @@ -112,8 +89,8 @@ def processVersionsFromYAML(yaml_file) { def workflowVersionToYAML() { return """ Workflow: - $workflow.manifest.name: ${getWorkflowVersion()} - Nextflow: $workflow.nextflow.version + ${workflow.manifest.name}: ${getWorkflowVersion()} + Nextflow: ${workflow.nextflow.version} """.stripIndent().trim() } @@ -121,11 +98,7 @@ def workflowVersionToYAML() { // Get channel of software versions used in pipeline in YAML format // def softwareVersionsToYAML(ch_versions) { - return ch_versions - .unique() - .map { version -> processVersionsFromYAML(version) } - .unique() - .mix(Channel.of(workflowVersionToYAML())) + return ch_versions.unique().map { version -> processVersionsFromYAML(version) }.unique().mix(Channel.of(workflowVersionToYAML())) } // @@ -133,56 +106,35 @@ def softwareVersionsToYAML(ch_versions) { // def paramsSummaryMultiqc(summary_params) { def summary_section = '' - summary_params.keySet().each { group -> - def group_params = summary_params.get(group) // This gets the parameters of that particular group - if (group_params) { - summary_section += "

    $group

    \n" - summary_section += "
    \n" - group_params.keySet().sort().each { param -> - summary_section += "
    $param
    ${group_params.get(param) ?: 'N/A'}
    \n" + summary_params + .keySet() + .each { group -> + def group_params = summary_params.get(group) + // This gets the parameters of that particular group + if (group_params) { + summary_section += "

    ${group}

    \n" + summary_section += "
    \n" + group_params + .keySet() + .sort() + .each { param -> + summary_section += "
    ${param}
    ${group_params.get(param) ?: 'N/A'}
    \n" + } + summary_section += "
    \n" } - summary_section += "
    \n" } - } - def yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n" as String - yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" - yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" - yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" - yaml_file_text += "plot_type: 'html'\n" - yaml_file_text += "data: |\n" - yaml_file_text += "${summary_section}" + def yaml_file_text = "id: '${workflow.manifest.name.replace('/', '-')}-summary'\n" as String + yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" + yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" + yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" + yaml_file_text += "plot_type: 'html'\n" + yaml_file_text += "data: |\n" + yaml_file_text += "${summary_section}" return yaml_file_text } -// -// nf-core logo -// -def nfCoreLogo(monochrome_logs=true) { - def colors = logColours(monochrome_logs) as Map - String.format( - """\n - ${dashedLine(monochrome_logs)} - ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} - ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} - ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} - ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} - ${colors.green}`._,._,\'${colors.reset} - ${colors.purple} ${workflow.manifest.name} ${getWorkflowVersion()}${colors.reset} - ${dashedLine(monochrome_logs)} - """.stripIndent() - ) -} - -// -// Return dashed line -// -def dashedLine(monochrome_logs=true) { - def colors = logColours(monochrome_logs) as Map - return "-${colors.dim}----------------------------------------------------${colors.reset}-" -} - // // ANSII colours used for terminal logging // @@ -199,54 +151,54 @@ def logColours(monochrome_logs=true) { colorcodes['hidden'] = monochrome_logs ? '' : "\033[8m" // Regular Colors - colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m" - colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m" - colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m" - colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m" - colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m" - colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m" - colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m" - colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m" + colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m" + colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m" + colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m" + colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m" + colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m" + colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m" + colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m" + colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m" // Bold - colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m" - colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m" - colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m" - colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m" - colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m" - colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m" - colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m" - colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m" + colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m" + colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m" + colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m" + colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m" + colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m" + colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m" + colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m" + colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m" // Underline - colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m" - colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m" - colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m" - colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m" - colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m" - colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m" - colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m" - colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m" + colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m" + colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m" + colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m" + colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m" + colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m" + colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m" + colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m" + colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m" // High Intensity - colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m" - colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m" - colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m" - colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m" - colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m" - colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m" - colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m" - colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m" + colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m" + colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m" + colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m" + colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m" + colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m" + colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m" + colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m" + colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m" // Bold High Intensity - colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m" - colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m" - colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m" - colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m" - colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m" - colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m" - colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m" - colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m" + colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m" + colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m" + colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m" + colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m" + colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m" + colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m" + colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m" + colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m" return colorcodes } @@ -261,14 +213,16 @@ def attachMultiqcReport(multiqc_report) { mqc_report = multiqc_report.getVal() if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) { if (mqc_report.size() > 1) { - log.warn "[$workflow.manifest.name] Found multiple reports from process 'MULTIQC', will use only one" + log.warn("[${workflow.manifest.name}] Found multiple reports from process 'MULTIQC', will use only one") } mqc_report = mqc_report[0] } } - } catch (all) { + } + catch (Exception msg) { + log.debug(msg) if (multiqc_report) { - log.warn "[$workflow.manifest.name] Could not attach MultiQC report to summary email" + log.warn("[${workflow.manifest.name}] Could not attach MultiQC report to summary email") } } return mqc_report @@ -280,26 +234,35 @@ def attachMultiqcReport(multiqc_report) { def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs=true, multiqc_report=null) { // Set up the e-mail variables - def subject = "[$workflow.manifest.name] Successful: $workflow.runName" + def subject = "[${workflow.manifest.name}] Successful: ${workflow.runName}" if (!workflow.success) { - subject = "[$workflow.manifest.name] FAILED: $workflow.runName" + subject = "[${workflow.manifest.name}] FAILED: ${workflow.runName}" } def summary = [:] - summary_params.keySet().sort().each { group -> - summary << summary_params[group] - } + summary_params + .keySet() + .sort() + .each { group -> + summary << summary_params[group] + } def misc_fields = [:] misc_fields['Date Started'] = workflow.start misc_fields['Date Completed'] = workflow.complete misc_fields['Pipeline script file path'] = workflow.scriptFile misc_fields['Pipeline script hash ID'] = workflow.scriptId - if (workflow.repository) misc_fields['Pipeline repository Git URL'] = workflow.repository - if (workflow.commitId) misc_fields['Pipeline repository Git Commit'] = workflow.commitId - if (workflow.revision) misc_fields['Pipeline Git branch/tag'] = workflow.revision - misc_fields['Nextflow Version'] = workflow.nextflow.version - misc_fields['Nextflow Build'] = workflow.nextflow.build + if (workflow.repository) { + misc_fields['Pipeline repository Git URL'] = workflow.repository + } + if (workflow.commitId) { + misc_fields['Pipeline repository Git Commit'] = workflow.commitId + } + if (workflow.revision) { + misc_fields['Pipeline Git branch/tag'] = workflow.revision + } + misc_fields['Nextflow Version'] = workflow.nextflow.version + misc_fields['Nextflow Build'] = workflow.nextflow.build misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp def email_fields = [:] @@ -336,8 +299,8 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi def email_html = html_template.toString() // Render the sendmail template - def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit - def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes() ] + def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as MemoryUnit + def smail_fields = [email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes()] def sf = new File("${workflow.projectDir}/assets/sendmail_template.txt") def sendmail_template = engine.createTemplate(sf).make(smail_fields) def sendmail_html = sendmail_template.toString() @@ -346,30 +309,34 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi def colors = logColours(monochrome_logs) as Map if (email_address) { try { - if (plaintext_email) { throw new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') } + if (plaintext_email) { +new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') } // Try to send HTML e-mail using sendmail def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html") sendmail_tf.withWriter { w -> w << sendmail_html } - [ 'sendmail', '-t' ].execute() << sendmail_html - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-" - } catch (all) { + ['sendmail', '-t'].execute() << sendmail_html + log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (sendmail)-") + } + catch (Exception msg) { + log.debug(msg) + log.debug("Trying with mail instead of sendmail") // Catch failures and try with plaintext - def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ] + def mail_cmd = ['mail', '-s', subject, '--content-type=text/html', email_address] mail_cmd.execute() << email_html - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (mail)-" + log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (mail)-") } } // Write summary e-mail HTML to a file def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html") output_hf.withWriter { w -> w << email_html } - nextflow.extension.FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html"); + nextflow.extension.FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html") output_hf.delete() // Write summary e-mail TXT to a file def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt") output_tf.withWriter { w -> w << email_txt } - nextflow.extension.FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt"); + nextflow.extension.FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt") output_tf.delete() } @@ -380,12 +347,14 @@ def completionSummary(monochrome_logs=true) { def colors = logColours(monochrome_logs) as Map if (workflow.success) { if (workflow.stats.ignoredCount == 0) { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" - } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" + log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Pipeline completed successfully${colors.reset}-") } - } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" + else { + log.info("-${colors.purple}[${workflow.manifest.name}]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-") + } + } + else { + log.info("-${colors.purple}[${workflow.manifest.name}]${colors.red} Pipeline completed with errors${colors.reset}-") } } @@ -394,21 +363,30 @@ def completionSummary(monochrome_logs=true) { // def imNotification(summary_params, hook_url) { def summary = [:] - summary_params.keySet().sort().each { group -> - summary << summary_params[group] - } + summary_params + .keySet() + .sort() + .each { group -> + summary << summary_params[group] + } def misc_fields = [:] - misc_fields['start'] = workflow.start - misc_fields['complete'] = workflow.complete - misc_fields['scriptfile'] = workflow.scriptFile - misc_fields['scriptid'] = workflow.scriptId - if (workflow.repository) misc_fields['repository'] = workflow.repository - if (workflow.commitId) misc_fields['commitid'] = workflow.commitId - if (workflow.revision) misc_fields['revision'] = workflow.revision - misc_fields['nxf_version'] = workflow.nextflow.version - misc_fields['nxf_build'] = workflow.nextflow.build - misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp + misc_fields['start'] = workflow.start + misc_fields['complete'] = workflow.complete + misc_fields['scriptfile'] = workflow.scriptFile + misc_fields['scriptid'] = workflow.scriptId + if (workflow.repository) { + misc_fields['repository'] = workflow.repository + } + if (workflow.commitId) { + misc_fields['commitid'] = workflow.commitId + } + if (workflow.revision) { + misc_fields['revision'] = workflow.revision + } + misc_fields['nxf_version'] = workflow.nextflow.version + misc_fields['nxf_build'] = workflow.nextflow.build + misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp def msg_fields = [:] msg_fields['version'] = getWorkflowVersion() @@ -433,13 +411,13 @@ def imNotification(summary_params, hook_url) { def json_message = json_template.toString() // POST - def post = new URL(hook_url).openConnection(); + def post = new URL(hook_url).openConnection() post.setRequestMethod("POST") post.setDoOutput(true) post.setRequestProperty("Content-Type", "application/json") - post.getOutputStream().write(json_message.getBytes("UTF-8")); - def postRC = post.getResponseCode(); - if (! postRC.equals(200)) { - log.warn(post.getErrorStream().getText()); + post.getOutputStream().write(json_message.getBytes("UTF-8")) + def postRC = post.getResponseCode() + if (!postRC.equals(200)) { + log.warn(post.getErrorStream().getText()) } } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test index 1dc317f8f7..e43d208b1b 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test @@ -41,58 +41,6 @@ nextflow_function { } } - test("Test Function workflowCitation") { - - function "workflowCitation" - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function nfCoreLogo") { - - function "nfCoreLogo" - - when { - function { - """ - input[0] = false - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function dashedLine") { - - function "dashedLine" - - when { - function { - """ - input[0] = false - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - test("Test Function without logColours") { function "logColours" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap index 1037232c9e..02c6701413 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap @@ -17,26 +17,6 @@ }, "timestamp": "2024-02-28T12:02:59.729647" }, - "Test Function nfCoreLogo": { - "content": [ - "\n\n-\u001b[2m----------------------------------------------------\u001b[0m-\n \u001b[0;32m,--.\u001b[0;30m/\u001b[0;32m,-.\u001b[0m\n\u001b[0;34m ___ __ __ __ ___ \u001b[0;32m/,-._.--~'\u001b[0m\n\u001b[0;34m |\\ | |__ __ / ` / \\ |__) |__ \u001b[0;33m} {\u001b[0m\n\u001b[0;34m | \\| | \\__, \\__/ | \\ |___ \u001b[0;32m\\`-._,-`-,\u001b[0m\n \u001b[0;32m`._,._,'\u001b[0m\n\u001b[0;35m nextflow_workflow v9.9.9\u001b[0m\n-\u001b[2m----------------------------------------------------\u001b[0m-\n" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:10.562934" - }, - "Test Function workflowCitation": { - "content": [ - "If you use nextflow_workflow for your analysis please cite:\n\n* The pipeline\n https://doi.org/10.5281/zenodo.5070524\n\n* The nf-core framework\n https://doi.org/10.1038/s41587-020-0439-x\n\n* Software dependencies\n https://github.com/nextflow_workflow/blob/master/CITATIONS.md" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:07.019761" - }, "Test Function without logColours": { "content": [ { @@ -95,16 +75,6 @@ }, "timestamp": "2024-02-28T12:03:17.969323" }, - "Test Function dashedLine": { - "content": [ - "-\u001b[2m----------------------------------------------------\u001b[0m-" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:14.366181" - }, "Test Function with logColours": { "content": [ { diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test index 842dc432af..8fb3016487 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test @@ -42,7 +42,7 @@ nextflow_workflow { params { test_data = '' - outdir = 1 + outdir = null } workflow { @@ -94,7 +94,7 @@ nextflow_workflow { params { test_data = '' - outdir = 1 + outdir = null } workflow { diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index 5b7e0ff143..6126f9ec69 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -5,12 +5,17 @@ */ {%- if modules %} -{% if fastqc %}include { FASTQC } from '../modules/nf-core/fastqc/main'{% endif %} -{% if multiqc %}include { MULTIQC } from '../modules/nf-core/multiqc/main'{% endif %} -{% if nf_schema %}include { paramsSummaryMap } from 'plugin/nf-schema'{% endif %} -{% if multiqc %}include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline'{% endif %} +{%- if fastqc %} +include { FASTQC } from '../modules/nf-core/fastqc/main'{% endif %} +{%- if multiqc %} +include { MULTIQC } from '../modules/nf-core/multiqc/main'{% endif %} +{%- if nf_schema %} +include { paramsSummaryMap } from 'plugin/nf-schema'{% endif %} +{%- if multiqc %} +include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline'{% endif %} include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' -{% if citations or multiqc %}include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'{% endif %} +{%- if citations or multiqc %} +include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'{% endif %} {%- endif %} /* @@ -28,7 +33,8 @@ workflow {{ short_name|upper }} { main: ch_versions = Channel.empty() - {% if multiqc %}ch_multiqc_files = Channel.empty(){% endif %} + {%- if multiqc %} + ch_multiqc_files = Channel.empty(){% endif %} {%- if fastqc %} // @@ -47,7 +53,7 @@ workflow {{ short_name|upper }} { softwareVersionsToYAML(ch_versions) .collectFile( storeDir: "${params.outdir}/pipeline_info", - name: {% if is_nfcore %}'nf_core_' {% else %} '' {% endif %} + 'pipeline_software_' + {% if multiqc %} 'mqc_' {% else %} '' {% endif %} + 'versions.yml', + name: {% if is_nfcore %}'nf_core_' + {% endif %} '{{ short_name }}_software_' {% if multiqc %} + 'mqc_' {% endif %} + 'versions.yml', sort: true, newLine: true ).set { ch_collated_versions } @@ -65,13 +71,14 @@ workflow {{ short_name|upper }} { Channel.fromPath(params.multiqc_logo, checkIfExists: true) : Channel.empty() - {% if nf_schema %} + {%- if nf_schema %} + summary_params = paramsSummaryMap( workflow, parameters_schema: "nextflow_schema.json") ch_workflow_summary = Channel.value(paramsSummaryMultiqc(summary_params)) ch_multiqc_files = ch_multiqc_files.mix( ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) - {% endif %} + {%- endif %} {%- if citations %} ch_multiqc_custom_methods_description = params.multiqc_methods_description ? @@ -95,7 +102,9 @@ workflow {{ short_name|upper }} { ch_multiqc_files.collect(), ch_multiqc_config.toList(), ch_multiqc_custom_config.toList(), - ch_multiqc_logo.toList() + ch_multiqc_logo.toList(), + [], + [] ) {% endif %} emit: diff --git a/nf_core/pipelines/bump_version.py b/nf_core/pipelines/bump_version.py index 18aa869328..3190ed70d4 100644 --- a/nf_core/pipelines/bump_version.py +++ b/nf_core/pipelines/bump_version.py @@ -5,9 +5,10 @@ import logging import re from pathlib import Path -from typing import List, Tuple, Union +from typing import List, Optional, Tuple, Union import rich.console +from ruamel.yaml import YAML import nf_core.utils from nf_core.utils import Pipeline @@ -60,6 +61,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: f"/releases/tag/{new_version}", ) ], + yaml_key=["report_comment"], ) if multiqc_current_version != "dev" and multiqc_new_version == "dev": update_file_version( @@ -71,6 +73,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: "/tree/dev", ) ], + yaml_key=["report_comment"], ) if multiqc_current_version == "dev" and multiqc_new_version != "dev": update_file_version( @@ -82,6 +85,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: f"/releases/tag/{multiqc_new_version}", ) ], + yaml_key=["report_comment"], ) update_file_version( Path("assets", "multiqc_config.yml"), @@ -92,6 +96,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: f"/{multiqc_new_version}/", ), ], + yaml_key=["report_comment"], ) # nf-test snap files pipeline_name = pipeline_obj.nf_config.get("manifest.name", "").strip(" '\"") @@ -107,6 +112,20 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: ) ], ) + # .nf-core.yml - pipeline version + # update entry: version: 1.0.0dev, but not `nf_core_version`, or `bump_version` + update_file_version( + ".nf-core.yml", + pipeline_obj, + [ + ( + current_version, + new_version, + ) + ], + required=False, + yaml_key=["template", "version"], + ) def bump_nextflow_version(pipeline_obj: Pipeline, new_version: str) -> None: @@ -147,10 +166,11 @@ def bump_nextflow_version(pipeline_obj: Pipeline, new_version: str) -> None: # example: # NXF_VER: # - "20.04.0" - rf"- \"{re.escape(current_version)}\"", - f'- "{new_version}"', + current_version, + new_version, ) ], + yaml_key=["jobs", "test", "strategy", "matrix", "NXF_VER"], ) # README.md - Nextflow version badge @@ -161,70 +181,128 @@ def bump_nextflow_version(pipeline_obj: Pipeline, new_version: str) -> None: ( rf"nextflow%20DSL2-%E2%89%A5{re.escape(current_version)}-23aa62.svg", f"nextflow%20DSL2-%E2%89%A5{new_version}-23aa62.svg", - ), - ( - # example: 1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=20.04.0`) - rf"1\.\s*Install\s*\[`Nextflow`\]\(https:\/\/www\.nextflow\.io\/docs\/latest\/getstarted\.html#installation\)\s*\(`>={re.escape(current_version)}`\)", - f"1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>={new_version}`)", - ), + ) ], ) -def update_file_version(filename: Union[str, Path], pipeline_obj: Pipeline, patterns: List[Tuple[str, str]]) -> None: - """Updates the version number in a requested file. +def update_file_version( + filename: Union[str, Path], + pipeline_obj: Pipeline, + patterns: List[Tuple[str, str]], + required: bool = True, + yaml_key: Optional[List[str]] = None, +) -> None: + """ + Updates a file with a new version number. Args: - filename (str): File to scan. - pipeline_obj (nf_core.pipelines.lint.PipelineLint): A PipelineLint object that holds information - about the pipeline contents and build files. - pattern (str): Regex pattern to apply. - - Raises: - ValueError, if the version number cannot be found. + filename (str): The name of the file to update. + pipeline_obj (nf_core.utils.Pipeline): A `Pipeline` object that holds information + about the pipeline contents. + patterns (List[Tuple[str, str]]): A list of tuples containing the regex patterns to + match and the replacement strings. + required (bool, optional): Whether the file is required to exist. Defaults to `True`. + yaml_key (Optional[List[str]], optional): The YAML key to update. Defaults to `None`. """ - # Load the file - fn = pipeline_obj._fp(filename) - content = "" - try: - with open(fn) as fh: - content = fh.read() - except FileNotFoundError: + fn: Path = pipeline_obj._fp(filename) + + if not fn.exists(): log.warning(f"File not found: '{fn}'") return - replacements = [] - for pattern in patterns: - found_match = False + if yaml_key: + update_yaml_file(fn, patterns, yaml_key, required) + else: + update_text_file(fn, patterns, required) - newcontent = [] - for line in content.splitlines(): - # Match the pattern - matches_pattern = re.findall(rf"^.*{pattern[0]}.*$", line) - if matches_pattern: - found_match = True - # Replace the match - newline = re.sub(pattern[0], pattern[1], line) - newcontent.append(newline) +def update_yaml_file(fn: Path, patterns: List[Tuple[str, str]], yaml_key: List[str], required: bool): + """ + Updates a YAML file with a new version number. - # Save for logging - replacements.append((line, newline)) + Args: + fn (Path): The name of the file to update. + patterns (List[Tuple[str, str]]): A list of tuples containing the regex patterns to + match and the replacement strings. + yaml_key (List[str]): The YAML key to update. + required (bool): Whether the file is required to exist. + """ + yaml = YAML() + yaml.preserve_quotes = True + with open(fn) as file: + yaml_content = yaml.load(file) + + try: + target = yaml_content + for key in yaml_key[:-1]: + target = target[key] - # No match, keep line as it is + last_key = yaml_key[-1] + current_value = target[last_key] + + new_value = current_value + for pattern, replacement in patterns: + # check if current value is list + if isinstance(current_value, list): + new_value = [re.sub(pattern, replacement, item) for item in current_value] else: - newcontent.append(line) + new_value = re.sub(pattern, replacement, current_value) - if found_match: - content = "\n".join(newcontent) + "\n" - else: - log.error(f"Could not find version number in {filename}: `{pattern}`") + if new_value != current_value: + target[last_key] = new_value + with open(fn, "w") as file: + yaml.dump(yaml_content, file) + log.info(f"Updated version in YAML file '{fn}'") + log_change(str(current_value), str(new_value)) + except KeyError as e: + handle_error(f"Could not find key {e} in the YAML structure of {fn}", required) - log.info(f"Updated version in '{filename}'") - for replacement in replacements: - stderr.print(f" [red] - {replacement[0].strip()}", highlight=False) - stderr.print(f" [green] + {replacement[1].strip()}", highlight=False) - stderr.print("\n") - with open(fn, "w") as fh: - fh.write(content) +def update_text_file(fn: Path, patterns: List[Tuple[str, str]], required: bool): + """ + Updates a text file with a new version number. + + Args: + fn (Path): The name of the file to update. + patterns (List[Tuple[str, str]]): A list of tuples containing the regex patterns to + match and the replacement strings. + required (bool): Whether the file is required to exist. + """ + with open(fn) as file: + content = file.read() + + updated = False + for pattern, replacement in patterns: + new_content, count = re.subn(pattern, replacement, content) + if count > 0: + log_change(content, new_content) + content = new_content + updated = True + log.info(f"Updated version in '{fn}'") + log.debug(f"Replaced pattern '{pattern}' with '{replacement}' {count} times") + elif required: + handle_error(f"Could not find version number in {fn}: `{pattern}`", required) + + if updated: + with open(fn, "w") as file: + file.write(content) + + +def handle_error(message: str, required: bool): + if required: + raise ValueError(message) + else: + log.info(message) + + +def log_change(old_content: str, new_content: str): + old_lines = old_content.splitlines() + new_lines = new_content.splitlines() + + for old_line, new_line in zip(old_lines, new_lines): + if old_line != new_line: + stderr.print(f" [red] - {old_line.strip()}", highlight=False) + stderr.print(f" [green] + {new_line.strip()}", highlight=False) + + stderr.print("\n") diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 8b0edf34cf..6a610ccccb 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -1,14 +1,11 @@ """A Textual app to create a pipeline.""" import logging -from pathlib import Path import click -import yaml from textual.app import App -from textual.widgets import Button +from textual.widgets import Button, Switch -import nf_core from nf_core.pipelines.create import utils from nf_core.pipelines.create.basicdetails import BasicDetails from nf_core.pipelines.create.custompipeline import CustomPipeline @@ -41,11 +38,12 @@ class PipelineCreateApp(App[utils.CreateConfig]): """A Textual app to manage stopwatches.""" CSS_PATH = "create.tcss" - TITLE = "nf-core create" + TITLE = "nf-core pipelines create" SUB_TITLE = "Create a new pipeline with the nf-core pipeline template" BINDINGS = [ ("d", "toggle_dark", "Toggle dark mode"), ("q", "quit", "Quit"), + ("a", "toggle_all", "Toggle all"), ] SCREENS = { "welcome": WelcomeScreen(), @@ -105,3 +103,14 @@ def on_button_pressed(self, event: Button.Pressed) -> None: def action_toggle_dark(self) -> None: """An action to toggle dark mode.""" self.dark: bool = not self.dark + + def action_toggle_all(self) -> None: + """An action to toggle all Switches.""" + switches = self.query(Switch) + if not switches: + return # No Switches widgets found + # Determine the new state based on the first switch + new_state = not switches.first().value if switches.first() else True + for switch in switches: + switch.value = new_state + self.refresh() diff --git a/nf_core/pipelines/create/basicdetails.py b/nf_core/pipelines/create/basicdetails.py index 09484fa2ea..2bd2ea1c79 100644 --- a/nf_core/pipelines/create/basicdetails.py +++ b/nf_core/pipelines/create/basicdetails.py @@ -69,7 +69,7 @@ def compose(self) -> ComposeResult: @on(Input.Submitted) def show_exists_warn(self): """Check if the pipeline exists on every input change or submitted. - If the pipeline exists, show warning message saying that it will be overriden.""" + If the pipeline exists, show warning message saying that it will be overridden.""" config = {} for text_input in self.query("TextInput"): this_input = text_input.query_one(Input) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 8e1d46c690..61e0b63ec3 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -8,7 +8,7 @@ import re import shutil from pathlib import Path -from typing import Dict, List, Optional, Tuple, Union, cast +from typing import Dict, List, Optional, Tuple, Union import git import git.config @@ -21,7 +21,8 @@ from nf_core.pipelines.create.utils import CreateConfig, features_yml_path, load_features_yaml from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file -from nf_core.utils import LintConfigType, NFCoreTemplateConfig +from nf_core.pipelines.rocrate import ROCrate +from nf_core.utils import NFCoreTemplateConfig, NFCoreYamlLintConfig log = logging.getLogger(__name__) @@ -67,7 +68,7 @@ def __init__( _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else Path().cwd()) # Obtain a CreateConfig object from `.nf-core.yml` config file if config_yml is not None and getattr(config_yml, "template", None) is not None: - self.config = CreateConfig(**config_yml["template"].model_dump()) + self.config = CreateConfig(**config_yml["template"].model_dump(exclude_none=True)) else: raise UserWarning("The template configuration was not provided in '.nf-core.yml'.") # Update the output directory @@ -182,7 +183,7 @@ def update_config(self, organisation, version, force, outdir): self.config.force = force if force else False if self.config.outdir is None: self.config.outdir = outdir if outdir else "." - if self.config.is_nfcore is None: + if self.config.is_nfcore is None or self.config.is_nfcore == "null": self.config.is_nfcore = self.config.org == "nf-core" def obtain_jinja_params_dict( @@ -205,7 +206,7 @@ def obtain_jinja_params_dict( config_yml = None # Set the parameters for the jinja template - jinja_params = self.config.model_dump() + jinja_params = self.config.model_dump(exclude_none=True) # Add template areas to jinja params and create list of areas with paths to skip skip_areas = [] @@ -356,6 +357,11 @@ def render_template(self) -> None: # Make a logo and save it, if it is a nf-core pipeline self.make_pipeline_logo() + if self.config.skip_features is None or "ro-crate" not in self.config.skip_features: + # Create the RO-Crate metadata file + rocrate_obj = ROCrate(self.outdir) + rocrate_obj.create_rocrate(json_path=self.outdir / "ro-crate-metadata.json") + # Update the .nf-core.yml with linting configurations self.fix_linting() @@ -363,14 +369,12 @@ def render_template(self) -> None: config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and config_yml is not None: with open(str(config_fn), "w") as fh: - self.config.outdir = str(self.config.outdir) - config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) - yaml.safe_dump(config_yml.model_dump(), fh) + config_yml.template = NFCoreTemplateConfig(**self.config.model_dump(exclude_none=True)) + yaml.safe_dump(config_yml.model_dump(exclude_none=True), fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") - run_prettier_on_file(self.outdir / config_fn) # Run prettier on files - run_prettier_on_file(self.outdir) + run_prettier_on_file([str(f) for f in self.outdir.glob("**/*")]) def fix_linting(self): """ @@ -397,11 +401,9 @@ def fix_linting(self): # Add the lint content to the preexisting nf-core config config_fn, nf_core_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and nf_core_yml is not None: - nf_core_yml.lint = cast(LintConfigType, lint_config) + nf_core_yml.lint = NFCoreYamlLintConfig(**lint_config) with open(self.outdir / config_fn, "w") as fh: - yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) - - run_prettier_on_file(Path(self.outdir, config_fn)) + yaml.dump(nf_core_yml.model_dump(exclude_none=True), fh, default_flow_style=False, sort_keys=False) def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 5debcfee7f..e433db41ec 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -2,9 +2,9 @@ from textual import on from textual.app import ComposeResult -from textual.containers import Center, ScrollableContainer +from textual.containers import Center, Horizontal, ScrollableContainer from textual.screen import Screen -from textual.widgets import Button, Footer, Header, Markdown, Switch +from textual.widgets import Button, Footer, Header, Markdown, Static, Switch from nf_core.pipelines.create.utils import PipelineFeature @@ -22,7 +22,13 @@ def compose(self) -> ComposeResult: """ ) ) + yield Horizontal( + Switch(id="toggle_all", value=True), + Static("Toggle all features", classes="feature_title"), + classes="custom_grid", + ) yield ScrollableContainer(id="features") + yield Center( Button("Back", id="back", variant="default"), Button("Continue", id="continue", variant="success"), @@ -35,6 +41,7 @@ def on_mount(self) -> None: self.query_one("#features").mount( PipelineFeature(feature["help_text"], feature["short_description"], feature["description"], name) ) + self.query_one("#toggle_all", Switch).value = True @on(Button.Pressed, "#continue") def on_button_pressed(self, event: Button.Pressed) -> None: @@ -45,3 +52,10 @@ def on_button_pressed(self, event: Button.Pressed) -> None: if not this_switch.value: skip.append(this_switch.id) self.parent.TEMPLATE_CONFIG.__dict__.update({"skip_features": skip, "is_nfcore": False}) + + @on(Switch.Changed, "#toggle_all") + def on_toggle_all(self, event: Switch.Changed) -> None: + """Handle toggling all switches.""" + new_state = event.value + for feature in self.query("PipelineFeature"): + feature.query_one(Switch).value = new_state diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index bd15cf9ddd..dad81689a9 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -85,7 +85,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: @on(Input.Submitted) def show_exists_warn(self): """Check if the pipeline exists on every input change or submitted. - If the pipeline exists, show warning message saying that it will be overriden.""" + If the pipeline exists, show warning message saying that it will be overridden.""" outdir = "" for text_input in self.query("TextInput"): this_input = text_input.query_one(Input) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 99e7b09ab8..b37dfb6170 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -67,7 +67,7 @@ def compose(self) -> ComposeResult: yield TextInput( "repo_org", "Organisation name", - "The name of the organisation where the GitHub repo will be cretaed", + "The name of the organisation where the GitHub repo will be created", default=self.parent.TEMPLATE_CONFIG.org, classes="column", ) diff --git a/nf_core/pipelines/create/template_features.yml b/nf_core/pipelines/create/template_features.yml index b59a2e51a6..9841879e83 100644 --- a/nf_core/pipelines/create/template_features.yml +++ b/nf_core/pipelines/create/template_features.yml @@ -60,6 +60,7 @@ ci: igenomes: skippable_paths: - "conf/igenomes.config" + - "conf/igenomes_ignored.config" short_description: "Use reference genomes" description: "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes" help_text: | @@ -76,6 +77,7 @@ igenomes: linting: files_exist: - "conf/igenomes.config" + - "conf/igenomes_ignored.config" nfcore_pipelines: True custom_pipelines: True github_badges: @@ -99,7 +101,7 @@ github_badges: nf_core_configs: skippable_paths: False short_description: "Add configuration files" - description: "The pipeline will include configuration profiles containing custom parameters requried to run nf-core pipelines at different institutions" + description: "The pipeline will include configuration profiles containing custom parameters required to run nf-core pipelines at different institutions" help_text: | Nf-core has a repository with a collection of configuration profiles. @@ -118,6 +120,7 @@ nf_core_configs: - "custom_config" - "params.custom_config_version" - "params.custom_config_base" + included_configs: False nfcore_pipelines: False custom_pipelines: True is_nfcore: @@ -183,6 +186,7 @@ code_linters: citations: skippable_paths: - "assets/methods_description_template.yml" + - "CITATIONS.md" short_description: "Include citations" description: "Include pipeline tools citations in CITATIONS.md and a method description in the MultiQC report (if enabled)." help_text: | @@ -190,6 +194,9 @@ citations: Additionally, it will include a YAML file (`assets/methods_description_template.yml`) to add a Materials & Methods section describing the tools used in the pieline, and the logics to add this section to the output MultiQC report (if the report is generated). + linting: + files_exist: + - "CITATIONS.md" nfcore_pipelines: False custom_pipelines: True gitpod: @@ -425,3 +432,27 @@ seqera_platform: You can extend this file adding any other desired configuration. nfcore_pipelines: False custom_pipelines: True +rocrate: + skippable_paths: + - "ro-crate-metadata.json" + short_description: "Add RO-Crate metadata" + description: "Add a RO-Crate metadata file to describe the pipeline" + help_text: | + RO-Crate is a metadata specification to describe research data and software. + This will add a `ro-crate-metadata.json` file to describe the pipeline. + nfcore_pipelines: False + custom_pipelines: True + linting: + files_warn: + - "ro-crate-metadata.json" +vscode: + skippable_paths: + - ".vscode" + short_description: "Render website admonitions in VSCode" + description: "Add a VSCode configuration to render website admonitions" + help_text: | + This will add a VSCode configuration file to render the admonitions in markdown files with the same style as the nf-core website. + + Adds the `.vscode` directory to the pipelinerepository. + nfcore_pipelines: False + custom_pipelines: True diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 97453b127e..d37dce86d1 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -133,10 +133,8 @@ def __init__( self.force = force self.platform = platform self.fullname: Optional[str] = None - # if flag is not specified, do not assume deliberate choice and prompt config inclusion interactively. - # this implies that non-interactive "no" choice is only possible implicitly (e.g. with --platform or if prompt is suppressed by !stderr.is_interactive). - # only alternative would have been to make it a parameter with argument, e.g. -d="yes" or -d="no". - self.include_configs = True if download_configuration else False if bool(platform) else None + # downloading configs is not supported for Seqera Platform downloads. + self.include_configs = True if download_configuration == "yes" and not bool(platform) else False # Additional tags to add to the downloaded pipeline. This enables to mark particular commits or revisions with # additional tags, e.g. "stable", "testing", "validated", "production" etc. Since this requires a git-repo, it is only # available for the bare / Seqera Platform download. @@ -748,7 +746,7 @@ def find_container_images(self, workflow_directory: str) -> None: self.nf_config is needed, because we need to restart search over raw input if no proper container matches are found. """ - config_findings.append((k, v.strip('"').strip("'"), self.nf_config, "Nextflow configs")) + config_findings.append((k, v.strip("'\""), self.nf_config, "Nextflow configs")) # rectify the container paths found in the config # Raw config_findings may yield multiple containers, so better create a shallow copy of the list, since length of input and output may be different ?!? @@ -841,11 +839,12 @@ def rectify_raw_container_matches(self, raw_findings): url_regex = ( r"https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)" ) + oras_regex = r"oras:\/\/[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)" # Thanks Stack Overflow for the regex: https://stackoverflow.com/a/39672069/713980 docker_regex = r"^(?:(?=[^:\/]{1,253})(?!-)[a-zA-Z0-9-]{1,63}(? List[str]: """ Helper function that takes a list of container images (URLs and Docker URIs), eliminates all Docker URIs for which also a URL is contained and returns the @@ -995,13 +994,74 @@ def prioritize_direct_download(self, container_list): we want to keep it and not replace with with whatever we have now (which might be the Docker URI). A regex that matches http, r"^$|^http" could thus be used to prioritize the Docker URIs over http Downloads + + We also need to handle a special case: The https:// Singularity downloads from Seqera Containers all end in 'data', although + they are not equivalent, e.g.: + + 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/63/6397750e9730a3fbcc5b4c43f14bd141c64c723fd7dad80e47921a68a7c3cd21/data' + 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data' + + Lastly, we want to remove at least a few Docker URIs for those modules, that have an oras:// download link. """ - d = {} + d: Dict[str, str] = {} + seqera_containers_http: List[str] = [] + seqera_containers_oras: List[str] = [] + all_others: List[str] = [] + for c in container_list: + if bool(re.search(r"/data$", c)): + seqera_containers_http.append(c) + elif bool(re.search(r"^oras://", c)): + seqera_containers_oras.append(c) + else: + all_others.append(c) + + for c in all_others: if re.match(r"^$|(?!^http)", d.get(k := re.sub(".*/(.*)", "\\1", c), "")): log.debug(f"{c} matches and will be saved as {k}") d[k] = c - return sorted(list(d.values())) + + combined_with_oras = self.reconcile_seqera_container_uris(seqera_containers_oras, list(d.values())) + + # combine deduplicated others (Seqera containers oras, http others and Docker URI others) and Seqera containers http + return sorted(list(set(combined_with_oras + seqera_containers_http))) + + @staticmethod + def reconcile_seqera_container_uris(prioritized_container_list: List[str], other_list: List[str]) -> List[str]: + """ + Helper function that takes a list of Seqera container URIs, + extracts the software string and builds a regex from them to filter out + similar containers from the second container list. + + prioritzed_container_list = [ + ... "oras://community.wave.seqera.io/library/multiqc:1.25.1--f0e743d16869c0bf", + ... "oras://community.wave.seqera.io/library/multiqc_pip_multiqc-plugins:e1f4877f1515d03c" + ... ] + + will be cleaned to + + ['library/multiqc:1.25.1', 'library/multiqc_pip_multiqc-plugins'] + + Subsequently, build a regex from those and filter out matching duplicates in other_list: + """ + if not prioritized_container_list: + return other_list + else: + # trim the URIs to the stem that contains the tool string, assign with Walrus operator to account for non-matching patterns + trimmed_priority_list = [ + match.group() + for c in set(prioritized_container_list) + if (match := re.search(r"library/.*?:[\d.]+", c) if "--" in c else re.search(r"library/[^\s:]+", c)) + ] + + # build regex + prioritized_containers = re.compile("|".join(f"{re.escape(c)}" for c in trimmed_priority_list)) + + # filter out matches in other list + filtered_containers = [c for c in other_list if not re.search(prioritized_containers, c)] + + # combine prioritized and regular container lists + return sorted(list(set(prioritized_container_list + filtered_containers))) def gather_registries(self, workflow_directory: str) -> None: """Fetch the registries from the pipeline config and CLI arguments and store them in a set. @@ -1025,7 +1085,13 @@ def gather_registries(self, workflow_directory: str) -> None: self.registry_set.add(self.nf_config[registry]) # add depot.galaxyproject.org to the set, because it is the default registry for singularity hardcoded in modules - self.registry_set.add("depot.galaxyproject.org") + self.registry_set.add("depot.galaxyproject.org/singularity") + + # add community.wave.seqera.io/library to the set to support the new Seqera Docker container registry + self.registry_set.add("community.wave.seqera.io/library") + + # add chttps://community-cr-prod.seqera.io/docker/registry/v2/ to the set to support the new Seqera Singularity container registry + self.registry_set.add("community-cr-prod.seqera.io/docker/registry/v2") def symlink_singularity_images(self, image_out_path: str) -> None: """Create a symlink for each registry in the registry set that points to the image. @@ -1042,10 +1108,13 @@ def symlink_singularity_images(self, image_out_path: str) -> None: if self.registry_set: # Create a regex pattern from the set, in case trimming is needed. - trim_pattern = "|".join(f"^{re.escape(registry)}-?" for registry in self.registry_set) + trim_pattern = "|".join(f"^{re.escape(registry)}-?".replace("/", "[/-]") for registry in self.registry_set) for registry in self.registry_set: - if not os.path.basename(image_out_path).startswith(registry): + # Nextflow will convert it like this as well, so we need it mimic its behavior + registry = registry.replace("/", "-") + + if not bool(re.search(trim_pattern, os.path.basename(image_out_path))): symlink_name = os.path.join("./", f"{registry}-{os.path.basename(image_out_path)}") else: trimmed_name = re.sub(f"{trim_pattern}", "", os.path.basename(image_out_path)) @@ -1265,7 +1334,7 @@ def singularity_image_filenames(self, container: str) -> Tuple[str, Optional[str # if docker.registry / singularity.registry are set to empty strings at runtime, which can be included in the HPC config profiles easily. if self.registry_set: # Create a regex pattern from the set of registries - trim_pattern = "|".join(f"^{re.escape(registry)}-?" for registry in self.registry_set) + trim_pattern = "|".join(f"^{re.escape(registry)}-?".replace("/", "[/-]") for registry in self.registry_set) # Use the pattern to trim the string out_name = re.sub(f"{trim_pattern}", "", out_name) @@ -1347,9 +1416,10 @@ def singularity_download_image( log.debug(f"Copying {container} from cache: '{os.path.basename(out_path)}'") progress.update(task, description="Copying from cache to target directory") shutil.copyfile(cache_path, out_path) + self.symlink_singularity_images(cache_path) # symlinks inside the cache directory # Create symlinks to ensure that the images are found even with different registries being used. - self.symlink_singularity_images(output_path) + self.symlink_singularity_images(out_path) progress.remove_task(task) @@ -1393,9 +1463,10 @@ def singularity_pull_image( # Sometimes, container still contain an explicit library specification, which # resulted in attempted pulls e.g. from docker://quay.io/quay.io/qiime2/core:2022.11 # Thus, if an explicit registry is specified, the provided -l value is ignored. + # Additionally, check if the container to be pulled is native Singularity: oras:// protocol. container_parts = container.split("/") if len(container_parts) > 2: - address = f"docker://{container}" + address = container if container.startswith("oras://") else f"docker://{container}" absolute_URI = True else: address = f"docker://{library}/{container.replace('docker://', '')}" @@ -1458,9 +1529,10 @@ def singularity_pull_image( log.debug(f"Copying {container} from cache: '{os.path.basename(out_path)}'") progress.update(task, current_log="Copying from cache to target directory") shutil.copyfile(cache_path, out_path) + self.symlink_singularity_images(cache_path) # symlinks inside the cache directory # Create symlinks to ensure that the images are found even with different registries being used. - self.symlink_singularity_images(output_path) + self.symlink_singularity_images(out_path) progress.remove_task(task) @@ -1694,7 +1766,7 @@ def tidy_tags_and_branches(self): self.repo.create_head("latest", "latest") # create a new head for latest self.checkout("latest") else: - # desired revisions may contain arbitrary branch names that do not correspond to valid sematic versioning patterns. + # desired revisions may contain arbitrary branch names that do not correspond to valid semantic versioning patterns. valid_versions = [ Version(v) for v in desired_revisions if re.match(r"\d+\.\d+(?:\.\d+)*(?:[\w\-_])*", v) ] @@ -1816,6 +1888,9 @@ def __init__( elif re.search(r"manifest\sunknown", line): self.error_type = self.InvalidTagError(self) break + elif re.search(r"ORAS\sSIF\simage\sshould\shave\sa\ssingle\slayer", line): + self.error_type = self.NoSingularityContainerError(self) + break elif re.search(r"Image\sfile\salready\sexists", line): self.error_type = self.ImageExistsError(self) break @@ -1880,6 +1955,17 @@ def __init__(self, error_log): self.helpmessage = f'Saving image of "{self.error_log.container}" failed, because "{self.error_log.out_path}" exists.\nPlease troubleshoot the command \n"{" ".join(self.error_log.singularity_command)}" manually.\n' super().__init__(self.message) + class NoSingularityContainerError(RuntimeError): + """The container image is no native Singularity Image Format.""" + + def __init__(self, error_log): + self.error_log = error_log + self.message = ( + f'[bold red]"{self.error_log.container}" is no valid Singularity Image Format container.[/]\n' + ) + self.helpmessage = f"Pulling \"{self.error_log.container}\" failed, because it appears invalid. To convert from Docker's OCI format, prefix the URI with 'docker://' instead of 'oras://'.\n" + super().__init__(self.message) + class OtherError(RuntimeError): """Undefined error with the container""" diff --git a/nf_core/pipelines/launch.py b/nf_core/pipelines/launch.py index a80639ea94..aab0ec4287 100644 --- a/nf_core/pipelines/launch.py +++ b/nf_core/pipelines/launch.py @@ -276,7 +276,7 @@ def merge_nxf_flag_schema(self): self.schema_obj.schema["definitions"] = {} self.schema_obj.schema["definitions"].update(self.nxf_flag_schema) self.schema_obj.schema["allOf"].insert(0, {"$ref": "#/definitions/coreNextflow"}) - # Add the new defintion to the allOf key so that it's included in validation + # Add the new definition to the allOf key so that it's included in validation # Put it at the start of the list so that it comes first def prompt_web_gui(self): @@ -316,7 +316,7 @@ def launch_web_gui(self): raise AssertionError('"api_url" not in web_response') if "web_url" not in web_response: raise AssertionError('"web_url" not in web_response') - # DO NOT FIX THIS TYPO. Needs to stay in sync with the website. Maintaining for backwards compatability. + # DO NOT FIX THIS TYPO. Needs to stay in sync with the website. Maintaining for backwards compatibility. if web_response["status"] != "recieved": raise AssertionError( f'web_response["status"] should be "recieved", but it is "{web_response["status"]}"' @@ -434,7 +434,7 @@ def prompt_param(self, param_id, param_obj, is_required, answers): question = self.single_param_to_questionary(param_id, param_obj, answers) answer = questionary.unsafe_prompt([question], style=nf_core.utils.nfcore_question_style) - # If required and got an empty reponse, ask again + # If required and got an empty response, ask again while isinstance(answer[param_id], str) and answer[param_id].strip() == "" and is_required: log.error(f"'--{param_id}' is required") answer = questionary.unsafe_prompt([question], style=nf_core.utils.nfcore_question_style) @@ -457,7 +457,7 @@ def prompt_group(self, group_id, group_obj): Prompt for edits to a group of parameters (subschema in 'definitions') Args: - group_id: Paramater ID (string) + group_id: Parameter ID (string) group_obj: JSON Schema keys (dict) Returns: diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index 6d27351b62..154e38aea6 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -27,8 +27,8 @@ from nf_core import __version__ from nf_core.components.lint import ComponentLint from nf_core.pipelines.lint_utils import console +from nf_core.utils import NFCoreYamlConfig, NFCoreYamlLintConfig, strip_ansi_codes from nf_core.utils import plural_s as _s -from nf_core.utils import strip_ansi_codes from .actions_awsfulltest import actions_awsfulltest from .actions_awstest import actions_awstest @@ -37,6 +37,7 @@ from .configs import base_config, modules_config from .files_exist import files_exist from .files_unchanged import files_unchanged +from .included_configs import included_configs from .merge_markers import merge_markers from .modules_json import modules_json from .modules_structure import modules_structure @@ -101,6 +102,7 @@ class PipelineLint(nf_core.utils.Pipeline): system_exit = system_exit template_strings = template_strings version_consistency = version_consistency + included_configs = included_configs def __init__( self, wf_path, release_mode=False, fix=(), key=None, fail_ignored=False, fail_warned=False, hide_progress=False @@ -110,7 +112,7 @@ def __init__( # Initialise the parent object super().__init__(wf_path) - self.lint_config = {} + self.lint_config: Optional[NFCoreYamlLintConfig] = None self.release_mode = release_mode self.fail_ignored = fail_ignored self.fail_warned = fail_warned @@ -152,7 +154,7 @@ def _get_all_lint_tests(release_mode): "base_config", "modules_config", "nfcore_yml", - ] + (["version_consistency"] if release_mode else []) + ] + (["version_consistency", "included_configs"] if release_mode else []) def _load(self) -> bool: """Load information about the pipeline into the PipelineLint object""" @@ -171,13 +173,12 @@ def _load_lint_config(self) -> bool: Add parsed config to the `self.lint_config` class attribute. """ _, tools_config = nf_core.utils.load_tools_config(self.wf_path) - self.lint_config = getattr(tools_config, "lint", {}) or {} + self.lint_config = getattr(tools_config, "lint", None) or None is_correct = True - # Check if we have any keys that don't match lint test names if self.lint_config is not None: - for k in self.lint_config: - if k != "nfcore_components" and k not in self.lint_tests: + for k, v in self.lint_config: + if v is not None and k != "nfcore_components" and k not in self.lint_tests: # nfcore_components is an exception to allow custom pipelines without nf-core components log.warning(f"Found unrecognised test name '{k}' in pipeline lint config") is_correct = False @@ -592,7 +593,7 @@ def run_linting( lint_obj._load_lint_config() lint_obj.load_pipeline_config() - if "nfcore_components" in lint_obj.lint_config and not lint_obj.lint_config["nfcore_components"]: + if lint_obj.lint_config and not lint_obj.lint_config["nfcore_components"]: module_lint_obj = None subworkflow_lint_obj = None else: @@ -677,5 +678,4 @@ def run_linting( if len(lint_obj.failed) > 0: if release_mode: log.info("Reminder: Lint tests were run in --release mode.") - return lint_obj, module_lint_obj, subworkflow_lint_obj diff --git a/nf_core/pipelines/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py index bd25ff33d0..19c2498263 100644 --- a/nf_core/pipelines/lint/files_exist.py +++ b/nf_core/pipelines/lint/files_exist.py @@ -66,6 +66,7 @@ def files_exist(self) -> Dict[str, List[str]]: conf/igenomes.config .github/workflows/awstest.yml .github/workflows/awsfulltest.yml + ro-crate-metadata.json Files that *must not* be present, due to being renamed or removed in the template: @@ -167,9 +168,11 @@ def files_exist(self) -> Dict[str, List[str]]: [Path("assets", "multiqc_config.yml")], [Path("conf", "base.config")], [Path("conf", "igenomes.config")], + [Path("conf", "igenomes_ignored.config")], [Path(".github", "workflows", "awstest.yml")], [Path(".github", "workflows", "awsfulltest.yml")], [Path("modules.json")], + [Path("ro-crate-metadata.json")], ] # List of strings. Fails / warns if any of the strings exist. @@ -197,6 +200,12 @@ def files_exist(self) -> Dict[str, List[str]]: ] files_warn_ifexists = [Path(".travis.yml")] + files_hint = [ + [ + ["ro-crate-metadata.json"], + ". Run `nf-core rocrate` to generate this file. Read more about RO-Crates in the [nf-core/tools docs](https://nf-co.re/tools#create-a-ro-crate-metadata-file).", + ], + ] # Remove files that should be ignored according to the linting config ignore_files = self.lint_config.get("files_exist", []) if self.lint_config is not None else [] @@ -224,7 +233,11 @@ def pf(file_path: Union[str, Path]) -> Path: if any([pf(f).is_file() for f in files]): passed.append(f"File found: {self._wrap_quotes(files)}") else: - warned.append(f"File not found: {self._wrap_quotes(files)}") + hint = "" + for file_hint in files_hint: + if file_hint[0] == files: + hint = str(file_hint[1]) + warned.append(f"File not found: {self._wrap_quotes(files)}{hint}") # Files that cause an error if they exist for file in files_fail_ifexists: diff --git a/nf_core/pipelines/lint/included_configs.py b/nf_core/pipelines/lint/included_configs.py new file mode 100644 index 0000000000..75c4594f41 --- /dev/null +++ b/nf_core/pipelines/lint/included_configs.py @@ -0,0 +1,36 @@ +from pathlib import Path + + +def included_configs(self): + """Check that the pipeline nextflow.config includes the pipeline custom configs. + + If the include line is uncommented, the test passes. + If the include line is commented, the test fails. + If the include line is missing, the test warns. + + Can be skipped by adding the following to the .nf-core.yml file: + lint: + included_configs: False + """ + passed = [] + failed = [] + warned = [] + + config_file = Path(self.wf_path / "nextflow.config") + + with open(config_file) as fh: + config = fh.read() + if ( + f"// includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? \"${{params.custom_config_base}}/pipeline/{self.pipeline_name}.config\"" + in config + ): + failed.append("Pipeline config does not include custom configs. Please uncomment the includeConfig line.") + elif ( + f"includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? \"${{params.custom_config_base}}/pipeline/{self.pipeline_name}.config\"" + in config + ): + passed.append("Pipeline config includes custom configs.") + else: + warned.append("Pipeline config does not include custom configs. Please add the includeConfig line.") + + return {"passed": passed, "failed": failed, "warned": warned} diff --git a/nf_core/pipelines/lint/multiqc_config.py b/nf_core/pipelines/lint/multiqc_config.py index 2b0fc7902e..fec5b518e3 100644 --- a/nf_core/pipelines/lint/multiqc_config.py +++ b/nf_core/pipelines/lint/multiqc_config.py @@ -31,6 +31,15 @@ def multiqc_config(self) -> Dict[str, List[str]]: lint: multiqc_config: False + To disable this test only for specific sections, you can specify a list of section names. + For example: + + .. code-block:: yaml + lint: + multiqc_config: + - report_section_order + - report_comment + """ passed: List[str] = [] diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index 790fc21797..6ae55501b2 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -80,6 +80,9 @@ def nextflow_config(self) -> Dict[str, List[str]]: * ``params.nf_required_version``: The old method for specifying the minimum Nextflow version. Replaced by ``manifest.nextflowVersion`` * ``params.container``: The old method for specifying the dockerhub container address. Replaced by ``process.container`` * ``igenomesIgnore``: Changed to ``igenomes_ignore`` + * ``params.max_cpus``: Old method of specifying the maximum number of CPUs a process can request. Replaced by native Nextflow `resourceLimits`directive in config files. + * ``params.max_memory``: Old method of specifying the maximum number of memory can request. Replaced by native Nextflow `resourceLimits`directive. + * ``params.max_time``: Old method of specifying the maximum number of CPUs can request. Replaced by native Nextflow `resourceLimits`directive. .. tip:: The ``snake_case`` convention should now be used when defining pipeline parameters @@ -146,7 +149,13 @@ def nextflow_config(self) -> Dict[str, List[str]]: ["params.input"], ] # Throw a warning if these are missing - config_warn = [["manifest.mainScript"], ["timeline.file"], ["trace.file"], ["report.file"], ["dag.file"]] + config_warn = [ + ["manifest.mainScript"], + ["timeline.file"], + ["trace.file"], + ["report.file"], + ["dag.file"], + ] # Old depreciated vars - fail if present config_fail_ifdefined = [ "params.nf_required_version", @@ -155,6 +164,9 @@ def nextflow_config(self) -> Dict[str, List[str]]: "params.igenomesIgnore", "params.name", "params.enable_conda", + "params.max_cpus", + "params.max_memory", + "params.max_time", ] # Lint for plugins @@ -334,7 +346,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: failed.append(f"Config `params.custom_config_base` is not set to `{custom_config_base}`") # Check that lines for loading custom profiles exist - lines = [ + old_lines = [ r"// Load nf-core custom profiles from different Institutions", r"try {", r'includeConfig "${params.custom_config_base}/nfcore_custom.config"', @@ -342,11 +354,19 @@ def nextflow_config(self) -> Dict[str, List[str]]: r'System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")', r"}", ] + lines = [ + r"// Load nf-core custom profiles from different Institutions", + r'''includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/nfcore_custom.config" : "/dev/null"''', + ] path = Path(self.wf_path, "nextflow.config") i = 0 with open(path) as f: for line in f: - if lines[i] in line: + if old_lines[i] in line: + i += 1 + if i == len(old_lines): + break + elif lines[i] in line: i += 1 if i == len(lines): break @@ -354,6 +374,12 @@ def nextflow_config(self) -> Dict[str, List[str]]: i = 0 if i == len(lines): passed.append("Lines for loading custom profiles found") + elif i == len(old_lines): + failed.append( + "Old lines for loading custom profiles found. File should contain: ```groovy\n{}".format( + "\n".join(lines) + ) + ) else: lines[2] = f"\t{lines[2]}" lines[4] = f"\t{lines[4]}" @@ -439,6 +465,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: f"Config default value incorrect: `{param}` is set as {self._wrap_quotes(schema_default)} in `nextflow_schema.json` but is {self._wrap_quotes(self.nf_config[param])} in `nextflow.config`." ) else: + schema_default = str(schema.schema_defaults[param_name]) failed.append( f"Default value from the Nextflow schema `{param} = {self._wrap_quotes(schema_default)}` not found in `nextflow.config`." ) diff --git a/nf_core/pipelines/lint/nfcore_yml.py b/nf_core/pipelines/lint/nfcore_yml.py index e0d5fb2005..3395696d1d 100644 --- a/nf_core/pipelines/lint/nfcore_yml.py +++ b/nf_core/pipelines/lint/nfcore_yml.py @@ -1,7 +1,8 @@ -import re from pathlib import Path from typing import Dict, List +from ruamel.yaml import YAML + from nf_core import __version__ REPOSITORY_TYPES = ["pipeline", "modules"] @@ -26,21 +27,23 @@ def nfcore_yml(self) -> Dict[str, List[str]]: failed: List[str] = [] ignored: List[str] = [] + yaml = YAML() + # Remove field that should be ignored according to the linting config ignore_configs = self.lint_config.get(".nf-core", []) if self.lint_config is not None else [] - try: - with open(Path(self.wf_path, ".nf-core.yml")) as fh: - content = fh.read() - except FileNotFoundError: - with open(Path(self.wf_path, ".nf-core.yaml")) as fh: - content = fh.read() + for ext in (".yml", ".yaml"): + try: + nf_core_yml = yaml.load(Path(self.wf_path) / f".nf-core{ext}") + break + except FileNotFoundError: + continue + else: + raise FileNotFoundError("No `.nf-core.yml` file found.") if "repository_type" not in ignore_configs: # Check that the repository type is set in the .nf-core.yml - repo_type_re = r"repository_type: (.+)" - match = re.search(repo_type_re, content) - if match: - repo_type = match.group(1) + if "repository_type" in nf_core_yml: + repo_type = nf_core_yml["repository_type"] if repo_type not in REPOSITORY_TYPES: failed.append( f"Repository type in `.nf-core.yml` is not valid. " @@ -55,10 +58,8 @@ def nfcore_yml(self) -> Dict[str, List[str]]: if "nf_core_version" not in ignore_configs: # Check that the nf-core version is set in the .nf-core.yml - nf_core_version_re = r"nf_core_version: (.+)" - match = re.search(nf_core_version_re, content) - if match: - nf_core_version = match.group(1).strip('"') + if "nf_core_version" in nf_core_yml: + nf_core_version = nf_core_yml["nf_core_version"] if nf_core_version != __version__ and "dev" not in nf_core_version: warned.append( f"nf-core version in `.nf-core.yml` is not set to the latest version. " diff --git a/nf_core/pipelines/lint/readme.py b/nf_core/pipelines/lint/readme.py index 1c09104258..75b05f16ed 100644 --- a/nf_core/pipelines/lint/readme.py +++ b/nf_core/pipelines/lint/readme.py @@ -23,6 +23,21 @@ def readme(self): * If pipeline is released but still contains a 'zenodo.XXXXXXX' tag, the test fails + To disable this test, add the following to the pipeline's ``.nf-core.yml`` file: + + .. code-block:: yaml + lint: + readme: False + + To disable subsets of these tests, add the following to the pipeline's ``.nf-core.yml`` file: + + .. code-block:: yaml + + lint: + readme: + - nextflow_badge + - zenodo_release + """ passed = [] warned = [] @@ -36,7 +51,7 @@ def readme(self): if "nextflow_badge" not in ignore_configs: # Check that there is a readme badge showing the minimum required version of Nextflow - # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.10.0-23aa62.svg)](https://www.nextflow.io/) + # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A524.04.2-23aa62.svg)](https://www.nextflow.io/) # and that it has the correct version nf_badge_re = r"\[!\[Nextflow\]\(https://img\.shields\.io/badge/nextflow%20DSL2-!?(?:%E2%89%A5|%3E%3D)([\d\.]+)-23aa62\.svg\)\]\(https://www\.nextflow\.io/\)" match = re.search(nf_badge_re, content) diff --git a/nf_core/pipelines/lint/template_strings.py b/nf_core/pipelines/lint/template_strings.py index 37a1f64daf..0cb669e553 100644 --- a/nf_core/pipelines/lint/template_strings.py +++ b/nf_core/pipelines/lint/template_strings.py @@ -5,7 +5,7 @@ def template_strings(self): """Check for template placeholders. - The ``nf-core create`` pipeline template uses + The ``nf-core pipelines create`` pipeline template uses `Jinja `_ behind the scenes. This lint test fails if any Jinja template variables such as @@ -39,8 +39,8 @@ def template_strings(self): ignored = [] # Files that should be ignored according to the linting config ignore_files = self.lint_config.get("template_strings", []) if self.lint_config is not None else [] - files = self.list_files() + files = self.list_files() # Loop through files, searching for string num_matches = 0 for fn in files: diff --git a/nf_core/pipelines/lint_utils.py b/nf_core/pipelines/lint_utils.py index ff65fb0e56..b4c56c6007 100644 --- a/nf_core/pipelines/lint_utils.py +++ b/nf_core/pipelines/lint_utils.py @@ -2,9 +2,10 @@ import logging import subprocess from pathlib import Path -from typing import List +from typing import List, Union import rich +import yaml from rich.console import Console from rich.table import Table @@ -69,7 +70,7 @@ def print_fixes(lint_obj): ) -def run_prettier_on_file(file): +def run_prettier_on_file(file: Union[Path, str, List[str]]) -> None: """Run the pre-commit hook prettier on a file. Args: @@ -80,12 +81,15 @@ def run_prettier_on_file(file): """ nf_core_pre_commit_config = Path(nf_core.__file__).parent / ".pre-commit-prettier-config.yaml" + args = ["pre-commit", "run", "--config", str(nf_core_pre_commit_config), "prettier"] + if isinstance(file, List): + args.extend(["--files", *file]) + else: + args.extend(["--files", str(file)]) + try: - subprocess.run( - ["pre-commit", "run", "--config", nf_core_pre_commit_config, "prettier", "--files", file], - capture_output=True, - check=True, - ) + subprocess.run(args, capture_output=True, check=True) + log.debug(f"${subprocess.STDOUT}") except subprocess.CalledProcessError as e: if ": SyntaxError: " in e.stdout.decode(): log.critical(f"Can't format {file} because it has a syntax error.\n{e.stdout.decode()}") @@ -111,6 +115,18 @@ def dump_json_with_prettier(file_name, file_content): run_prettier_on_file(file_name) +def dump_yaml_with_prettier(file_name: Union[Path, str], file_content: dict) -> None: + """Dump a YAML file and run prettier on it. + + Args: + file_name (Path | str): A file identifier as a string or pathlib.Path. + file_content (dict): Content to dump into the YAML file + """ + with open(file_name, "w") as fh: + yaml.safe_dump(file_content, fh) + run_prettier_on_file(file_name) + + def ignore_file(lint_name: str, file_path: Path, dir_path: Path) -> List[List[str]]: """Ignore a file and add the result to the ignored list. Return the passed, failed, ignored and ignore_configs lists.""" diff --git a/nf_core/pipelines/params_file.py b/nf_core/pipelines/params_file.py index d61b7cfbc8..69326c142d 100644 --- a/nf_core/pipelines/params_file.py +++ b/nf_core/pipelines/params_file.py @@ -2,9 +2,9 @@ import json import logging -import os import textwrap -from typing import Literal, Optional +from pathlib import Path +from typing import Dict, List, Literal, Optional import questionary @@ -27,7 +27,7 @@ ModeLiteral = Literal["both", "start", "end", "none"] -def _print_wrapped(text, fill_char="-", mode="both", width=80, indent=0, drop_whitespace=True): +def _print_wrapped(text, fill_char="-", mode="both", width=80, indent=0, drop_whitespace=True) -> str: """Helper function to format text for the params-file template. Args: @@ -100,7 +100,7 @@ def __init__( self.wfs = nf_core.pipelines.list.Workflows() self.wfs.get_remote_workflows() - def get_pipeline(self): + def get_pipeline(self) -> Optional[bool]: """ Prompt the user for a pipeline name and get the schema """ @@ -124,11 +124,14 @@ def get_pipeline(self): ).unsafe_ask() # Get the schema - self.schema_obj = nf_core.pipelines.schema.PipelineSchema() + self.schema_obj = PipelineSchema() + if self.schema_obj is None: + return False self.schema_obj.get_schema_path(self.pipeline, local_only=False, revision=self.pipeline_revision) self.schema_obj.get_wf_params() + return True - def format_group(self, definition, show_hidden=False): + def format_group(self, definition, show_hidden=False) -> str: """Format a group of parameters of the schema as commented YAML. Args: @@ -167,7 +170,9 @@ def format_group(self, definition, show_hidden=False): return out - def format_param(self, name, properties, required_properties=(), show_hidden=False): + def format_param( + self, name: str, properties: Dict, required_properties: List[str] = [], show_hidden: bool = False + ) -> Optional[str]: """ Format a single parameter of the schema as commented YAML @@ -188,6 +193,9 @@ def format_param(self, name, properties, required_properties=(), show_hidden=Fal return None description = properties.get("description", "") + if self.schema_obj is None: + log.error("No schema object found") + return "" self.schema_obj.get_schema_defaults() default = properties.get("default") type = properties.get("type") @@ -209,7 +217,7 @@ def format_param(self, name, properties, required_properties=(), show_hidden=Fal return out - def generate_params_file(self, show_hidden=False): + def generate_params_file(self, show_hidden: bool = False) -> str: """Generate the contents of a parameter template file. Assumes the pipeline has been fetched (if remote) and the schema loaded. @@ -220,6 +228,10 @@ def generate_params_file(self, show_hidden=False): Returns: str: Formatted output for the pipeline schema """ + if self.schema_obj is None: + log.error("No schema object found") + return "" + schema = self.schema_obj.schema pipeline_name = self.schema_obj.pipeline_manifest.get("name", self.pipeline) pipeline_version = self.schema_obj.pipeline_manifest.get("version", "0.0.0") @@ -234,13 +246,13 @@ def generate_params_file(self, show_hidden=False): out += "\n" # Add all parameter groups - for definition in schema.get("definitions", {}).values(): + for definition in schema.get("definitions", schema.get("$defs", {})).values(): out += self.format_group(definition, show_hidden=show_hidden) out += "\n" return out - def write_params_file(self, output_fn="nf-params.yaml", show_hidden=False, force=False): + def write_params_file(self, output_fn: Path = Path("nf-params.yaml"), show_hidden=False, force=False) -> bool: """Build a template file for the pipeline schema. Args: @@ -254,7 +266,9 @@ def write_params_file(self, output_fn="nf-params.yaml", show_hidden=False, force """ self.get_pipeline() - + if self.schema_obj is None: + log.error("No schema object found") + return False try: self.schema_obj.load_schema() self.schema_obj.validate_schema() @@ -265,11 +279,10 @@ def write_params_file(self, output_fn="nf-params.yaml", show_hidden=False, force schema_out = self.generate_params_file(show_hidden=show_hidden) - if os.path.exists(output_fn) and not force: + if output_fn.exists() and not force: log.error(f"File '{output_fn}' exists! Please delete first, or use '--force'") return False - with open(output_fn, "w") as fh: - fh.write(schema_out) - log.info(f"Parameter file written to '{output_fn}'") + output_fn.write_text(schema_out) + log.info(f"Parameter file written to '{output_fn}'") return True diff --git a/nf_core/pipelines/refgenie.py b/nf_core/pipelines/refgenie.py index 426ca5eb7d..46197e9cc8 100644 --- a/nf_core/pipelines/refgenie.py +++ b/nf_core/pipelines/refgenie.py @@ -144,14 +144,14 @@ def update_config(rgc): This function is executed after running 'refgenie pull /' The refgenie config file is transformed into a nextflow.config file, which is used to - overwrited the 'refgenie_genomes.config' file. + overwrite the 'refgenie_genomes.config' file. The path to the target config file is inferred from the following options, in order: - the 'nextflow_config' attribute in the refgenie config file - the NXF_REFGENIE_PATH environment variable - otherwise defaults to: $NXF_HOME/nf-core/refgenie_genomes.config - Additionaly, an 'includeConfig' statement is added to the file $NXF_HOME/config + Additionally, an 'includeConfig' statement is added to the file $NXF_HOME/config """ # Compile nextflow refgenie_genomes.config from refgenie config diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py new file mode 100644 index 0000000000..915f203f00 --- /dev/null +++ b/nf_core/pipelines/rocrate.py @@ -0,0 +1,362 @@ +#!/usr/bin/env python +"""Code to deal with pipeline RO (Research Object) Crates""" + +import logging +import os +import re +import sys +from datetime import datetime +from pathlib import Path +from typing import Optional, Set, Union + +import requests +import rocrate.rocrate +from git import GitCommandError, InvalidGitRepositoryError +from repo2rocrate.nextflow import NextflowCrateBuilder +from rich.progress import BarColumn, Progress +from rocrate.model.person import Person +from rocrate.rocrate import ROCrate as BaseROCrate + +from nf_core.utils import Pipeline + +log = logging.getLogger(__name__) + + +class CustomNextflowCrateBuilder(NextflowCrateBuilder): + DATA_ENTITIES = NextflowCrateBuilder.DATA_ENTITIES + [ + ("docs/usage.md", "File", "Usage documentation"), + ("docs/output.md", "File", "Output documentation"), + ("suborkflows/local", "Dataset", "Pipeline-specific suborkflows"), + ("suborkflows/nf-core", "Dataset", "nf-core suborkflows"), + (".nf-core.yml", "File", "nf-core configuration file, configuring template features and linting rules"), + (".pre-commit-config.yaml", "File", "Configuration file for pre-commit hooks"), + (".prettierignore", "File", "Ignore file for prettier"), + (".prettierrc", "File", "Configuration file for prettier"), + ] + + +def custom_make_crate( + root: Path, + workflow: Optional[Path] = None, + repo_url: Optional[str] = None, + wf_name: Optional[str] = None, + wf_version: Optional[str] = None, + lang_version: Optional[str] = None, + ci_workflow: Optional[str] = "ci.yml", + diagram: Optional[Path] = None, +) -> BaseROCrate: + builder = CustomNextflowCrateBuilder(root, repo_url=repo_url) + + return builder.build( + workflow, + wf_name=wf_name, + wf_version=wf_version, + lang_version=lang_version, + license=None, + ci_workflow=ci_workflow, + diagram=diagram, + ) + + +class ROCrate: + """ + Class to generate an RO Crate for a pipeline + + """ + + def __init__(self, pipeline_dir: Path, version="") -> None: + """ + Initialise the ROCrate object + + Args: + pipeline_dir (Path): Path to the pipeline directory + version (str): Version of the pipeline to checkout + """ + from nf_core.utils import is_pipeline_directory, setup_requests_cachedir + + is_pipeline_directory(pipeline_dir) + self.pipeline_dir = pipeline_dir + self.version: str = version + self.crate: rocrate.rocrate.ROCrate + self.pipeline_obj = Pipeline(self.pipeline_dir) + self.pipeline_obj._load() + + setup_requests_cachedir() + + def create_rocrate(self, json_path: Union[None, Path] = None, zip_path: Union[None, Path] = None) -> bool: + """ + Create an RO Crate for a pipeline + + Args: + outdir (Path): Path to the output directory + json_path (Path): Path to the metadata file + zip_path (Path): Path to the zip file + + """ + + # Check that the checkout pipeline version is the same as the requested version + if self.version != "": + if self.version != self.pipeline_obj.nf_config.get("manifest.version"): + # using git checkout to get the requested version + log.info(f"Checking out pipeline version {self.version}") + if self.pipeline_obj.repo is None: + log.error(f"Pipeline repository not found in {self.pipeline_dir}") + sys.exit(1) + try: + self.pipeline_obj.repo.git.checkout(self.version) + self.pipeline_obj = Pipeline(self.pipeline_dir) + self.pipeline_obj._load() + except InvalidGitRepositoryError: + log.error(f"Could not find a git repository in {self.pipeline_dir}") + sys.exit(1) + except GitCommandError: + log.error(f"Could not checkout version {self.version}") + sys.exit(1) + self.version = self.pipeline_obj.nf_config.get("manifest.version", "") + self.make_workflow_rocrate() + + # Save just the JSON metadata file + if json_path is not None: + if json_path.name == "ro-crate-metadata.json": + json_path = json_path.parent + + log.info(f"Saving metadata file to '{json_path}'") + self.crate.metadata.write(json_path) + + # Save the whole crate zip file + if zip_path is not None: + if zip_path.name != "ro-crate.crate.zip": + zip_path = zip_path / "ro-crate.crate.zip" + log.info(f"Saving zip file '{zip_path}") + self.crate.write_zip(zip_path) + + if json_path is None and zip_path is None: + log.error("Please provide a path to save the ro-crate file or the zip file.") + return False + + return True + + def make_workflow_rocrate(self) -> None: + """ + Create an RO Crate for a pipeline + """ + if self.pipeline_obj is None: + raise ValueError("Pipeline object not loaded") + + diagram: Optional[Path] = None + # find files (metro|tube)_?(map)?.png in the pipeline directory or docs/ using pathlib + pattern = re.compile(r".*?(metro|tube|subway)_(map).*?\.png", re.IGNORECASE) + for file in self.pipeline_dir.rglob("*.png"): + if pattern.match(file.name): + log.debug(f"Found diagram: {file}") + diagram = file.relative_to(self.pipeline_dir) + break + + # Create the RO Crate object + + self.crate = custom_make_crate( + self.pipeline_dir, + self.pipeline_dir / "main.nf", + self.pipeline_obj.nf_config.get("manifest.homePage", ""), + self.pipeline_obj.nf_config.get("manifest.name", ""), + self.pipeline_obj.nf_config.get("manifest.version", ""), + self.pipeline_obj.nf_config.get("manifest.nextflowVersion", ""), + diagram=diagram, + ) + + # add readme as description + readme = self.pipeline_dir / "README.md" + + try: + self.crate.description = readme.read_text() + except FileNotFoundError: + log.error(f"Could not find README.md in {self.pipeline_dir}") + # get license from LICENSE file + license_file = self.pipeline_dir / "LICENSE" + try: + license = license_file.read_text() + if license.startswith("MIT"): + self.crate.license = "MIT" + else: + # prompt for license + log.info("Could not determine license from LICENSE file") + self.crate.license = input("Please enter the license for this pipeline: ") + except FileNotFoundError: + log.error(f"Could not find LICENSE file in {self.pipeline_dir}") + + self.crate.add_jsonld( + {"@id": "https://nf-co.re/", "@type": "Organization", "name": "nf-core", "url": "https://nf-co.re/"} + ) + + # Set metadata for main entity file + self.set_main_entity("main.nf") + + def set_main_entity(self, main_entity_filename: str): + """ + Set the main.nf as the main entity of the crate and add necessary metadata + """ + if self.crate.mainEntity is None: + raise ValueError("Main entity not set") + + self.crate.mainEntity.append_to( + "dct:conformsTo", "https://bioschemas.org/profiles/ComputationalWorkflow/1.0-RELEASE/", compact=True + ) + # add dateCreated and dateModified, based on the current data + self.crate.mainEntity.append_to("dateCreated", self.crate.root_dataset.get("dateCreated", ""), compact=True) + self.crate.mainEntity.append_to( + "dateModified", str(datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")), compact=True + ) + self.crate.mainEntity.append_to("sdPublisher", {"@id": "https://nf-co.re/"}, compact=True) + if self.version.endswith("dev"): + url = "dev" + else: + url = self.version + self.crate.mainEntity.append_to( + "url", f"https://nf-co.re/{self.crate.name.replace('nf-core/','')}/{url}/", compact=True + ) + self.crate.mainEntity.append_to("version", self.version, compact=True) + + # get keywords from nf-core website + remote_workflows = requests.get("https://nf-co.re/pipelines.json").json()["remote_workflows"] + # go through all remote workflows and find the one that matches the pipeline name + topics = ["nf-core", "nextflow"] + for remote_wf in remote_workflows: + assert self.pipeline_obj.pipeline_name is not None # mypy + if remote_wf["name"] == self.pipeline_obj.pipeline_name.replace("nf-core/", ""): + topics = topics + remote_wf["topics"] + break + + log.debug(f"Adding topics: {topics}") + self.crate.mainEntity.append_to("keywords", topics) + + self.add_main_authors(self.crate.mainEntity) + + self.crate.mainEntity = self.crate.mainEntity + + self.crate.mainEntity.append_to("license", self.crate.license) + self.crate.mainEntity.append_to("name", self.crate.name) + + if "dev" in self.version: + self.crate.creativeWorkStatus = "InProgress" + else: + self.crate.creativeWorkStatus = "Stable" + if self.pipeline_obj.repo is None: + log.error(f"Pipeline repository not found in {self.pipeline_dir}") + else: + tags = self.pipeline_obj.repo.tags + if tags: + # get the tag for this version + for tag in tags: + if tag.commit.hexsha == self.pipeline_obj.repo.head.commit.hexsha: + self.crate.mainEntity.append_to( + "dateCreated", + tag.commit.committed_datetime.strftime("%Y-%m-%dT%H:%M:%SZ"), + compact=True, + ) + + def add_main_authors(self, wf_file: rocrate.model.entity.Entity) -> None: + """ + Add workflow authors to the crate + """ + # add author entity to crate + + try: + authors = self.pipeline_obj.nf_config["manifest.author"].split(",") + # remove spaces + authors = [a.strip() for a in authors] + # add manifest authors as maintainer to crate + + except KeyError: + log.error("No author field found in manifest of nextflow.config") + return + # look at git contributors for author names + try: + git_contributors: Set[str] = set() + if self.pipeline_obj.repo is None: + log.info("No git repository found. No git contributors will be added as authors.") + return + commits_touching_path = list(self.pipeline_obj.repo.iter_commits(paths="main.nf")) + + for commit in commits_touching_path: + if commit.author.name is not None: + git_contributors.add(commit.author.name) + # exclude bots + contributors = {c for c in git_contributors if not c.endswith("bot") and c != "Travis CI User"} + + log.debug(f"Found {len(contributors)} git authors") + + progress_bar = Progress( + "[bold blue]{task.description}", + BarColumn(bar_width=None), + "[magenta]{task.completed} of {task.total}[reset] » [bold yellow]{task.fields[test_name]}", + transient=True, + disable=os.environ.get("HIDE_PROGRESS", None) is not None, + ) + with progress_bar: + bump_progress = progress_bar.add_task( + "Searching for author names on GitHub", total=len(contributors), test_name="" + ) + + for git_author in contributors: + progress_bar.update(bump_progress, advance=1, test_name=git_author) + git_author = ( + requests.get(f"https://api.github.com/users/{git_author}").json().get("name", git_author) + ) + if git_author is None: + log.debug(f"Could not find name for {git_author}") + continue + + except AttributeError: + log.debug("Could not find git contributors") + + # remove usernames (just keep names with spaces) + named_contributors = {c for c in contributors if " " in c} + + for author in named_contributors: + log.debug(f"Adding author: {author}") + + if self.pipeline_obj.repo is None: + log.info("No git repository found. No git contributors will be added as authors.") + return + # get email from git log + email = self.pipeline_obj.repo.git.log(f"--author={author}", "--pretty=format:%ae", "-1") + orcid = get_orcid(author) + author_entitity = self.crate.add( + Person( + self.crate, orcid if orcid is not None else "#" + email, properties={"name": author, "email": email} + ) + ) + wf_file.append_to("creator", author_entitity) + if author in authors: + wf_file.append_to("maintainer", author_entitity) + + +def get_orcid(name: str) -> Optional[str]: + """ + Get the ORCID for a given name + + Args: + name (str): Name of the author + + Returns: + str: ORCID URI or None + """ + base_url = "https://pub.orcid.org/v3.0/search/" + headers = { + "Accept": "application/json", + } + params = {"q": f'family-name:"{name.split()[-1]}" AND given-names:"{name.split()[0]}"'} + response = requests.get(base_url, params=params, headers=headers) + + if response.status_code == 200: + json_response = response.json() + if json_response.get("num-found") == 1: + orcid_uri = json_response.get("result")[0].get("orcid-identifier", {}).get("uri") + log.info(f"Using found ORCID for {name}. Please double-check: {orcid_uri}") + return orcid_uri + else: + log.debug(f"No exact ORCID found for {name}. See {response.url}") + return None + else: + log.info(f"API request to ORCID unsuccessful. Status code: {response.status_code}") + return None diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 95ed5e5b6e..61fd6bc2d7 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -43,7 +43,7 @@ def __init__(self): self.schema_from_scratch = False self.no_prompts = False self.web_only = False - self.web_schema_build_url = "https://nf-co.re/pipeline_schema_builder" + self.web_schema_build_url = "https://oldsite.nf-co.re/pipeline_schema_builder" self.web_schema_build_web_url = None self.web_schema_build_api_url = None self.validation_plugin = None @@ -51,7 +51,7 @@ def __init__(self): self.defs_notation = None self.ignored_params = [] - # Update the validation plugin code everytime the schema gets changed + # Update the validation plugin code every time the schema gets changed def set_schema_filename(self, schema: str) -> None: self._schema_filename = schema self._update_validation_plugin_from_config() @@ -71,7 +71,7 @@ def _update_validation_plugin_from_config(self) -> None: else: conf = nf_core.utils.fetch_wf_config(Path(self.pipeline_dir)) - plugins = str(conf.get("plugins", "")).strip('"').strip("'").strip(" ").split(",") + plugins = str(conf.get("plugins", "")).strip("'\"").strip(" ").split(",") plugin_found = False for plugin_instance in plugins: if "nf-schema" in plugin_instance: @@ -96,11 +96,18 @@ def _update_validation_plugin_from_config(self) -> None: conf.get("validation.help.shortParameter", "help"), conf.get("validation.help.fullParameter", "helpFull"), conf.get("validation.help.showHiddenParameter", "showHidden"), + "trace_report_suffix", # report suffix should be ignored by default as it is a Java Date object ] # Help parameter should be ignored by default - ignored_params_config = conf.get("validation", {}).get("defaultIgnoreParams", []) + ignored_params_config_str = conf.get("validation.defaultIgnoreParams", "") + ignored_params_config = [ + item.strip().strip("'") for item in ignored_params_config_str[1:-1].split(",") + ] # Extract list elements and remove whitespace + if len(ignored_params_config) > 0: + log.debug(f"Ignoring parameters from config: {ignored_params_config}") ignored_params.extend(ignored_params_config) self.ignored_params = ignored_params + log.debug(f"Ignoring parameters: {self.ignored_params}") self.schema_draft = "https://json-schema.org/draft/2020-12/schema" else: @@ -118,6 +125,7 @@ def get_schema_path( # Supplied path exists - assume a local pipeline directory or schema if path.exists(): log.debug(f"Path exists: {path}. Assuming local pipeline directory or schema") + local_only = True if revision is not None: log.warning(f"Local workflow supplied, ignoring revision '{revision}'") if path.is_dir(): @@ -373,7 +381,7 @@ def validate_config_default_parameter(self, param, schema_param, config_default) # If we have a default in the schema, check it matches the config if "default" in schema_param and ( (schema_param["type"] == "boolean" and str(config_default).lower() != str(schema_param["default"]).lower()) - and (str(schema_param["default"]) != str(config_default).strip('"').strip("'")) + and (str(schema_param["default"]) != str(config_default).strip("'\"")) ): # Check that we are not deferring the execution of this parameter in the schema default with squiggly brakcets if schema_param["type"] != "string" or "{" not in schema_param["default"]: @@ -950,6 +958,7 @@ def launch_web_builder(self): """ Send pipeline schema to web builder and wait for response """ + content = { "post_content": "json_schema", "api": "true", @@ -958,12 +967,13 @@ def launch_web_builder(self): "schema": json.dumps(self.schema), } web_response = nf_core.utils.poll_nfcore_web_api(self.web_schema_build_url, content) + try: if "api_url" not in web_response: raise AssertionError('"api_url" not in web_response') if "web_url" not in web_response: raise AssertionError('"web_url" not in web_response') - # DO NOT FIX THIS TYPO. Needs to stay in sync with the website. Maintaining for backwards compatability. + # DO NOT FIX THIS TYPO. Needs to stay in sync with the website. Maintaining for backwards compatibility. if web_response["status"] != "recieved": raise AssertionError( f'web_response["status"] should be "recieved", but it is "{web_response["status"]}"' diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index fced35dc20..781b4f5f00 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -6,7 +6,7 @@ import re import shutil from pathlib import Path -from typing import Dict, Optional, Union +from typing import Any, Dict, Optional, Tuple, Union import git import questionary @@ -21,6 +21,7 @@ import nf_core.pipelines.create.create import nf_core.pipelines.list import nf_core.utils +from nf_core.pipelines.lint_utils import dump_yaml_with_prettier log = logging.getLogger(__name__) @@ -104,7 +105,7 @@ def __init__( with open(template_yaml_path) as f: self.config_yml.template = yaml.safe_load(f) with open(self.config_yml_path, "w") as fh: - yaml.safe_dump(self.config_yml.model_dump(), fh) + yaml.safe_dump(self.config_yml.model_dump(exclude_none=True), fh) log.info(f"Saved pipeline creation settings to '{self.config_yml_path}'") raise SystemExit( f"Please commit your changes and delete the {template_yaml_path} file. Then run the sync command again." @@ -119,7 +120,7 @@ def __init__( requests.auth.HTTPBasicAuth(self.gh_username, os.environ["GITHUB_AUTH_TOKEN"]) ) - def sync(self): + def sync(self) -> None: """Find workflow attributes, create a new template pipeline on TEMPLATE""" # Clear requests_cache so that we don't get stale API responses @@ -270,22 +271,27 @@ def make_template_pipeline(self): self.config_yml.template.force = True with open(self.config_yml_path, "w") as config_path: - yaml.safe_dump(self.config_yml.model_dump(), config_path) + yaml.safe_dump(self.config_yml.model_dump(exclude_none=True), config_path) try: - nf_core.pipelines.create.create.PipelineCreate( + pipeline_create_obj = nf_core.pipelines.create.create.PipelineCreate( outdir=str(self.pipeline_dir), from_config_file=True, no_git=True, force=True, - ).init_pipeline() + ) + pipeline_create_obj.init_pipeline() # set force to false to avoid overwriting files in the future if self.config_yml.template is not None: + self.config_yml.template = pipeline_create_obj.config # Set force true in config to overwrite existing files self.config_yml.template.force = False - with open(self.config_yml_path, "w") as config_path: - yaml.safe_dump(self.config_yml.model_dump(), config_path) + # Set outdir as the current directory to avoid local info leaking + self.config_yml.template.outdir = "." + # Update nf-core version + self.config_yml.nf_core_version = nf_core.__version__ + dump_yaml_with_prettier(self.config_yml_path, self.config_yml.model_dump(exclude_none=True)) except Exception as err: # Reset to where you were to prevent git getting messed up. @@ -410,12 +416,8 @@ def close_open_template_merge_prs(self): list_prs_url = f"https://api.github.com/repos/{self.gh_repo}/pulls" with self.gh_api.cache_disabled(): list_prs_request = self.gh_api.get(list_prs_url) - try: - list_prs_json = json.loads(list_prs_request.content) - list_prs_pp = json.dumps(list_prs_json, indent=4) - except Exception: - list_prs_json = list_prs_request.content - list_prs_pp = list_prs_request.content + + list_prs_json, list_prs_pp = self._parse_json_response(list_prs_request) log.debug(f"GitHub API listing existing PRs:\n{list_prs_url}\n{list_prs_pp}") if list_prs_request.status_code != 200: @@ -456,12 +458,8 @@ def close_open_pr(self, pr) -> bool: # Update the PR status to be closed with self.gh_api.cache_disabled(): pr_request = self.gh_api.patch(url=pr["url"], data=json.dumps({"state": "closed"})) - try: - pr_request_json = json.loads(pr_request.content) - pr_request_pp = json.dumps(pr_request_json, indent=4) - except Exception: - pr_request_json = pr_request.content - pr_request_pp = pr_request.content + + pr_request_json, pr_request_pp = self._parse_json_response(pr_request) # PR update worked if pr_request.status_code == 200: @@ -475,6 +473,22 @@ def close_open_pr(self, pr) -> bool: log.warning(f"Could not close PR ('{pr_request.status_code}'):\n{pr['url']}\n{pr_request_pp}") return False + @staticmethod + def _parse_json_response(response) -> Tuple[Any, str]: + """Helper method to parse JSON response and create pretty-printed string. + + Args: + response: requests.Response object + + Returns: + Tuple of (parsed_json, pretty_printed_str) + """ + try: + json_data = json.loads(response.content) + return json_data, json.dumps(json_data, indent=4) + except Exception: + return response.content, str(response.content) + def reset_target_dir(self): """ Reset the target pipeline directory. Check out the original branch. diff --git a/nf_core/subworkflows/__init__.py b/nf_core/subworkflows/__init__.py index 88e8a09388..8e3c85a271 100644 --- a/nf_core/subworkflows/__init__.py +++ b/nf_core/subworkflows/__init__.py @@ -3,5 +3,6 @@ from .install import SubworkflowInstall from .lint import SubworkflowLint from .list import SubworkflowList +from .patch import SubworkflowPatch from .remove import SubworkflowRemove from .update import SubworkflowUpdate diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index b366ddfb51..cedae62f11 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -11,11 +11,12 @@ import questionary import rich +import ruamel.yaml import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult -from nf_core.pipelines.lint_utils import console +from nf_core.pipelines.lint_utils import console, run_prettier_on_file log = logging.getLogger(__name__) @@ -45,6 +46,7 @@ def __init__( self, directory, fail_warned=False, + fix=False, remote_url=None, branch=None, no_pull=False, @@ -55,6 +57,7 @@ def __init__( component_type="subworkflows", directory=directory, fail_warned=fail_warned, + fix=fix, remote_url=remote_url, branch=branch, no_pull=no_pull, @@ -214,6 +217,10 @@ def lint_subworkflow(self, swf, progress_bar, registry, local=False): # Otherwise run all the lint tests else: + # Update meta.yml file if requested + if self.fix: + self.update_meta_yml_file(swf) + if self.repo_type == "pipeline" and self.modules_json: # Set correct sha version = self.modules_json.get_subworkflow_version(swf.component_name, swf.repo_url, swf.org) @@ -230,3 +237,56 @@ def lint_subworkflow(self, swf, progress_bar, registry, local=False): self.failed += warned self.failed += [LintResult(swf, *s) for s in swf.failed] + + def update_meta_yml_file(self, swf): + """ + Update the meta.yml file with the correct inputs and outputs + """ + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.indent(mapping=2, sequence=2, offset=0) + + # Read meta.yml + with open(swf.meta_yml) as fh: + meta_yaml = yaml.load(fh) + meta_yaml_corrected = meta_yaml.copy() + # Obtain inputs and outputs from main.nf + swf.get_inputs_from_main_nf() + swf.get_outputs_from_main_nf() + + # Compare inputs and add them if missing + if "input" in meta_yaml: + # Delete inputs from meta.yml which are not present in main.nf + meta_yaml_corrected["input"] = [ + input for input in meta_yaml["input"] if list(input.keys())[0] in swf.inputs + ] + # Obtain inputs from main.nf missing in meta.yml + inputs_correct = [ + list(input.keys())[0] for input in meta_yaml_corrected["input"] if list(input.keys())[0] in swf.inputs + ] + inputs_missing = [input for input in swf.inputs if input not in inputs_correct] + # Add missing inputs to meta.yml + for missing_input in inputs_missing: + meta_yaml_corrected["input"].append({missing_input: {"description": ""}}) + + if "output" in meta_yaml: + # Delete outputs from meta.yml which are not present in main.nf + meta_yaml_corrected["output"] = [ + output for output in meta_yaml["output"] if list(output.keys())[0] in swf.outputs + ] + # Obtain output from main.nf missing in meta.yml + outputs_correct = [ + list(output.keys())[0] + for output in meta_yaml_corrected["output"] + if list(output.keys())[0] in swf.outputs + ] + outputs_missing = [output for output in swf.outputs if output not in outputs_correct] + # Add missing inputs to meta.yml + for missing_output in outputs_missing: + meta_yaml_corrected["output"].append({missing_output: {"description": ""}}) + + # Write corrected meta.yml to file + with open(swf.meta_yml, "w") as fh: + log.info(f"Updating {swf.meta_yml}") + yaml.dump(meta_yaml_corrected, fh) + run_prettier_on_file(fh.name) diff --git a/nf_core/subworkflows/lint/meta_yml.py b/nf_core/subworkflows/lint/meta_yml.py index 9c96df7563..91242e0869 100644 --- a/nf_core/subworkflows/lint/meta_yml.py +++ b/nf_core/subworkflows/lint/meta_yml.py @@ -1,4 +1,5 @@ import json +import logging from pathlib import Path import jsonschema.validators @@ -6,6 +7,8 @@ import nf_core.components.components_utils +log = logging.getLogger(__name__) + def meta_yml(subworkflow_lint_object, subworkflow): """ @@ -65,6 +68,8 @@ def meta_yml(subworkflow_lint_object, subworkflow): subworkflow.passed.append(("meta_input", f"`{input}` specified", subworkflow.meta_yml)) else: subworkflow.failed.append(("meta_input", f"`{input}` missing in `meta.yml`", subworkflow.meta_yml)) + else: + log.debug(f"No inputs specified in subworkflow `main.nf`: {subworkflow.component_name}") if "output" in meta_yaml: meta_output = [list(x.keys())[0] for x in meta_yaml["output"]] @@ -75,6 +80,8 @@ def meta_yml(subworkflow_lint_object, subworkflow): subworkflow.failed.append( ("meta_output", f"`{output}` missing in `meta.yml`", subworkflow.meta_yml) ) + else: + log.debug(f"No outputs specified in subworkflow `main.nf`: {subworkflow.component_name}") # confirm that the name matches the process name in main.nf if meta_yaml["name"].upper() == subworkflow.workflow_name: diff --git a/nf_core/subworkflows/lint/subworkflow_changes.py b/nf_core/subworkflows/lint/subworkflow_changes.py index a9c9616a21..cf0fd7211c 100644 --- a/nf_core/subworkflows/lint/subworkflow_changes.py +++ b/nf_core/subworkflows/lint/subworkflow_changes.py @@ -2,9 +2,12 @@ Check whether the content of a subworkflow has changed compared to the original repository """ +import shutil +import tempfile from pathlib import Path import nf_core.modules.modules_repo +from nf_core.components.components_differ import ComponentsDiffer def subworkflow_changes(subworkflow_lint_object, subworkflow): @@ -20,7 +23,29 @@ def subworkflow_changes(subworkflow_lint_object, subworkflow): Only runs when linting a pipeline, not the modules repository """ - tempdir = subworkflow.component_dir + if subworkflow.is_patched: + # If the subworkflow is patched, we need to apply + # the patch in reverse before comparing with the remote + tempdir_parent = Path(tempfile.mkdtemp()) + tempdir = tempdir_parent / "tmp_subworkflow_dir" + shutil.copytree(subworkflow.component_dir, tempdir) + try: + new_lines = ComponentsDiffer.try_apply_patch( + subworkflow.component_type, + subworkflow.component_name, + subworkflow.org, + subworkflow.patch_path, + tempdir, + reverse=True, + ) + for file, lines in new_lines.items(): + with open(tempdir / file, "w") as fh: + fh.writelines(lines) + except LookupError: + # This error is already reported by subworkflow_patch, so just return + return + else: + tempdir = subworkflow.component_dir subworkflow.branch = subworkflow_lint_object.modules_json.get_component_branch( "subworkflows", subworkflow.component_name, subworkflow.repo_url, subworkflow.org ) diff --git a/nf_core/subworkflows/patch.py b/nf_core/subworkflows/patch.py new file mode 100644 index 0000000000..3c8b3d5e4d --- /dev/null +++ b/nf_core/subworkflows/patch.py @@ -0,0 +1,10 @@ +import logging + +from nf_core.components.patch import ComponentPatch + +log = logging.getLogger(__name__) + + +class SubworkflowPatch(ComponentPatch): + def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False, installed_by=False): + super().__init__(pipeline_dir, "subworkflows", remote_url, branch, no_pull, installed_by) diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index e2a76ccaeb..dd61b72a2b 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -395,8 +395,16 @@ def get_component_git_log( old_component_path = Path("modules", component_name) commits_old_iter = self.repo.iter_commits(max_count=depth, paths=old_component_path) - commits_old = [{"git_sha": commit.hexsha, "trunc_message": commit.message} for commit in commits_old_iter] - commits_new = [{"git_sha": commit.hexsha, "trunc_message": commit.message} for commit in commits_new_iter] + try: + commits_old = [{"git_sha": commit.hexsha, "trunc_message": commit.message} for commit in commits_old_iter] + commits_new = [{"git_sha": commit.hexsha, "trunc_message": commit.message} for commit in commits_new_iter] + except git.GitCommandError as e: + log.error( + f"Git error: {e}\n" + "To solve this, you can try to remove the cloned rempository and run the command again.\n" + f"This repository is typically found at `{self.local_repo_dir}`" + ) + raise UserWarning commits = iter(commits_new + commits_old) return commits diff --git a/nf_core/utils.py b/nf_core/utils.py index 663efb6b46..30b0743493 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -5,6 +5,7 @@ import concurrent.futures import datetime import errno +import fnmatch import hashlib import io import json @@ -19,7 +20,7 @@ import time from contextlib import contextmanager from pathlib import Path -from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Literal, Optional, Tuple, Union import git import prompt_toolkit.styles @@ -36,6 +37,9 @@ import nf_core +if TYPE_CHECKING: + from nf_core.pipelines.schema import PipelineSchema + log = logging.getLogger(__name__) # ASCII nf-core logo @@ -52,14 +56,29 @@ [ ("qmark", "fg:ansiblue bold"), # token in front of the question ("question", "bold"), # question text - ("answer", "fg:ansigreen nobold bg:"), # submitted answer text behind the question - ("pointer", "fg:ansiyellow bold"), # pointer used in select and checkbox prompts - ("highlighted", "fg:ansiblue bold"), # pointed-at choice in select and checkbox prompts - ("selected", "fg:ansiyellow noreverse bold"), # style for a selected item of a checkbox + ( + "answer", + "fg:ansigreen nobold bg:", + ), # submitted answer text behind the question + ( + "pointer", + "fg:ansiyellow bold", + ), # pointer used in select and checkbox prompts + ( + "highlighted", + "fg:ansiblue bold", + ), # pointed-at choice in select and checkbox prompts + ( + "selected", + "fg:ansiyellow noreverse bold", + ), # style for a selected item of a checkbox ("separator", "fg:ansiblack"), # separator in lists ("instruction", ""), # user instructions for select, rawselect, checkbox ("text", ""), # plain text - ("disabled", "fg:gray italic"), # disabled choices for select and checkbox prompts + ( + "disabled", + "fg:gray italic", + ), # disabled choices for select and checkbox prompts ("choice-default", "fg:ansiblack"), ("choice-default-changed", "fg:ansiyellow"), ("choice-required", "fg:ansired"), @@ -79,7 +98,11 @@ def fetch_remote_version(source_url): return remote_version -def check_if_outdated(current_version=None, remote_version=None, source_url="https://nf-co.re/tools_version"): +def check_if_outdated( + current_version=None, + remote_version=None, + source_url="https://nf-co.re/tools_version", +): """ Check if the current version of nf-core is outdated """ @@ -146,11 +169,12 @@ def __init__(self, wf_path: Path) -> None: self.wf_path = Path(wf_path) self.pipeline_name: Optional[str] = None self.pipeline_prefix: Optional[str] = None - self.schema_obj: Optional[Dict] = None + self.schema_obj: Optional[PipelineSchema] = None + self.repo: Optional[git.Repo] = None try: - repo = git.Repo(self.wf_path) - self.git_sha = repo.head.object.hexsha + self.repo = git.Repo(self.wf_path) + self.git_sha = self.repo.head.object.hexsha except Exception as e: log.debug(f"Could not find git hash for pipeline: {self.wf_path}. {e}") @@ -254,7 +278,7 @@ def fetch_wf_config(wf_path: Path, cache_config: bool = True) -> dict: """ log.debug(f"Got '{wf_path}' as path") - + wf_path = Path(wf_path) config = {} cache_fn = None cache_basedir = None @@ -323,7 +347,7 @@ def fetch_wf_config(wf_path: Path, cache_config: bool = True) -> dict: # If we can, save a cached copy # HINT: during testing phase (in test_download, for example) we don't want - # to save configuration copy in $HOME, otherwise the tests/test_download.py::DownloadTest::test_wf_use_local_configs + # to save configuration copy in $HOME, otherwise the tests/pipelines/test_download.py::DownloadTest::test_wf_use_local_configs # will fail after the first attempt. It's better to not save temporary data # in others folders than tmp when doing tests in general if cache_path and cache_config: @@ -414,7 +438,7 @@ def wait_cli_function(poll_func: Callable[[], bool], refresh_per_second: int = 2 refresh_per_second (int): Refresh this many times per second. Default: 20. Returns: - None. Just sits in an infite loop until the function returns True. + None. Just sits in an infinite loop until the function returns True. """ try: spinner = Spinner("dots2", "Use ctrl+c to stop waiting and force exit.") @@ -433,7 +457,7 @@ def poll_nfcore_web_api(api_url: str, post_data: Optional[Dict] = None) -> Dict: Takes argument api_url for URL - Expects API reponse to be valid JSON and contain a top-level 'status' key. + Expects API response to be valid JSON and contain a top-level 'status' key. """ # Run without requests_cache so that we get the updated statuses with requests_cache.disabled(): @@ -441,6 +465,7 @@ def poll_nfcore_web_api(api_url: str, post_data: Optional[Dict] = None) -> Dict: if post_data is None: response = requests.get(api_url, headers={"Cache-Control": "no-cache"}) else: + log.debug(f"requesting {api_url} with {post_data}") response = requests.post(url=api_url, data=post_data) except requests.exceptions.Timeout: raise AssertionError(f"URL timed out: {api_url}") @@ -526,7 +551,8 @@ def __call__(self, r): with open(gh_cli_config_fn) as fh: gh_cli_config = yaml.safe_load(fh) self.auth = requests.auth.HTTPBasicAuth( - gh_cli_config["github.com"]["user"], gh_cli_config["github.com"]["oauth_token"] + gh_cli_config["github.com"]["user"], + gh_cli_config["github.com"]["oauth_token"], ) self.auth_mode = f"gh CLI config: {gh_cli_config['github.com']['user']}" except Exception: @@ -607,11 +633,11 @@ def request_retry(self, url, post_data=None): while True: # GET request if post_data is None: - log.debug(f"Seding GET request to {url}") + log.debug(f"Sending GET request to {url}") r = self.get(url=url) # POST request else: - log.debug(f"Seding POST request to {url}") + log.debug(f"Sending POST request to {url}") r = self.post(url=url, json=post_data) # Failed but expected - try again @@ -717,12 +743,12 @@ def parse_anaconda_licence(anaconda_response, version=None): license = re.sub(r"GNU GENERAL PUBLIC LICENSE", "GPL", license, flags=re.IGNORECASE) license = license.replace("GPL-", "GPLv") license = re.sub(r"GPL\s*([\d\.]+)", r"GPL v\1", license) # Add v prefix to GPL version if none found - license = re.sub(r"GPL\s*v(\d).0", r"GPL v\1", license) # Remove superflous .0 from GPL version + license = re.sub(r"GPL\s*v(\d).0", r"GPL v\1", license) # Remove superfluous .0 from GPL version license = re.sub(r"GPL \(([^\)]+)\)", r"GPL \1", license) license = re.sub(r"GPL\s*v", "GPL v", license) # Normalise whitespace to one space between GPL and v license = re.sub(r"\s*(>=?)\s*(\d)", r" \1\2", license) # Normalise whitespace around >= GPL versions - license = license.replace("Clause", "clause") # BSD capitilisation - license = re.sub(r"-only$", "", license) # Remove superflous GPL "only" version suffixes + license = license.replace("Clause", "clause") # BSD capitalisation + license = re.sub(r"-only$", "", license) # Remove superfluous GPL "only" version suffixes clean_licences.append(license) return clean_licences @@ -794,12 +820,18 @@ def get_tag_date(tag_date): # Obtain version and build match = re.search(r"(?::)+([A-Za-z\d\-_.]+)", img["image_name"]) if match is not None: - all_docker[match.group(1)] = {"date": get_tag_date(img["updated"]), "image": img} + all_docker[match.group(1)] = { + "date": get_tag_date(img["updated"]), + "image": img, + } elif img["image_type"] == "Singularity": # Obtain version and build match = re.search(r"(?::)+([A-Za-z\d\-_.]+)", img["image_name"]) if match is not None: - all_singularity[match.group(1)] = {"date": get_tag_date(img["updated"]), "image": img} + all_singularity[match.group(1)] = { + "date": get_tag_date(img["updated"]), + "image": img, + } # Obtain common builds from Docker and Singularity images common_keys = list(all_docker.keys() & all_singularity.keys()) current_date = None @@ -929,13 +961,19 @@ def prompt_pipeline_release_branch( # Releases if len(wf_releases) > 0: for tag in map(lambda release: release.get("tag_name"), wf_releases): - tag_display = [("fg:ansiblue", f"{tag} "), ("class:choice-default", "[release]")] + tag_display = [ + ("fg:ansiblue", f"{tag} "), + ("class:choice-default", "[release]"), + ] choices.append(questionary.Choice(title=tag_display, value=tag)) tag_set.append(str(tag)) # Branches for branch in wf_branches.keys(): - branch_display = [("fg:ansiyellow", f"{branch} "), ("class:choice-default", "[branch]")] + branch_display = [ + ("fg:ansiyellow", f"{branch} "), + ("class:choice-default", "[branch]"), + ] choices.append(questionary.Choice(title=branch_display, value=branch)) tag_set.append(branch) @@ -966,7 +1004,8 @@ def validate(self, value): return True else: raise questionary.ValidationError( - message="Invalid remote cache index file", cursor_position=len(value.text) + message="Invalid remote cache index file", + cursor_position=len(value.text), ) else: return True @@ -996,7 +1035,13 @@ def get_repo_releases_branches(pipeline, wfs): pipeline = wf.full_name # Store releases and stop loop - wf_releases = list(sorted(wf.releases, key=lambda k: k.get("published_at_timestamp", 0), reverse=True)) + wf_releases = list( + sorted( + wf.releases, + key=lambda k: k.get("published_at_timestamp", 0), + reverse=True, + ) + ) break # Arbitrary GitHub repo @@ -1016,7 +1061,13 @@ def get_repo_releases_branches(pipeline, wfs): raise AssertionError(f"Not able to find pipeline '{pipeline}'") except AttributeError: # Success! We have a list, which doesn't work with .get() which is looking for a dict key - wf_releases = list(sorted(rel_r.json(), key=lambda k: k.get("published_at_timestamp", 0), reverse=True)) + wf_releases = list( + sorted( + rel_r.json(), + key=lambda k: k.get("published_at_timestamp", 0), + reverse=True, + ) + ) # Get release tag commit hashes if len(wf_releases) > 0: @@ -1050,15 +1101,26 @@ def get_repo_releases_branches(pipeline, wfs): class NFCoreTemplateConfig(BaseModel): + """Template configuration schema""" + org: Optional[str] = None + """ Organisation name """ name: Optional[str] = None + """ Pipeline name """ description: Optional[str] = None + """ Pipeline description """ author: Optional[str] = None + """ Pipeline author """ version: Optional[str] = None + """ Pipeline version """ force: Optional[bool] = True + """ Force overwrite of existing files """ outdir: Optional[Union[str, Path]] = None + """ Output directory """ skip_features: Optional[list] = None + """ Skip features. See https://nf-co.re/docs/nf-core-tools/pipelines/create for a list of features. """ is_nfcore: Optional[bool] = None + """ Whether the pipeline is an nf-core pipeline. """ # convert outdir to str @field_validator("outdir") @@ -1077,17 +1139,121 @@ def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) -LintConfigType = Optional[Dict[str, Union[List[str], List[Dict[str, List[str]]], bool]]] +class NFCoreYamlLintConfig(BaseModel): + """ + schema for linting config in `.nf-core.yml` should cover: + + .. code-block:: yaml + files_unchanged: + - .github/workflows/branch.yml + modules_config: False + modules_config: + - fastqc + # merge_markers: False + merge_markers: + - docs/my_pdf.pdf + nextflow_config: False + nextflow_config: + - manifest.name + - config_defaults: + - params.annotation_db + - params.multiqc_comment_headers + - params.custom_table_headers + # multiqc_config: False + multiqc_config: + - report_section_order + - report_comment + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + template_strings: False + template_strings: + - docs/my_pdf.pdf + nfcore_components: False + """ + + files_unchanged: Optional[Union[bool, List[str]]] = None + """ List of files that should not be changed """ + modules_config: Optional[Optional[Union[bool, List[str]]]] = None + """ List of modules that should not be changed """ + merge_markers: Optional[Optional[Union[bool, List[str]]]] = None + """ List of files that should not contain merge markers """ + nextflow_config: Optional[Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]]] = None + """ List of Nextflow config files that should not be changed """ + multiqc_config: Optional[Union[bool, List[str]]] = None + """ List of MultiQC config options that be changed """ + files_exist: Optional[Union[bool, List[str]]] = None + """ List of files that can not exist """ + template_strings: Optional[Optional[Union[bool, List[str]]]] = None + """ List of files that can contain template strings """ + readme: Optional[Union[bool, List[str]]] = None + """ Lint the README.md file """ + nfcore_components: Optional[bool] = None + """ Lint all required files to use nf-core modules and subworkflows """ + actions_ci: Optional[bool] = None + """ Lint all required files to use GitHub Actions CI """ + actions_awstest: Optional[bool] = None + """ Lint all required files to run tests on AWS """ + actions_awsfulltest: Optional[bool] = None + """ Lint all required files to run full tests on AWS """ + pipeline_todos: Optional[bool] = None + """ Lint for TODOs statements""" + plugin_includes: Optional[bool] = None + """ Lint for nextflow plugin """ + pipeline_name_conventions: Optional[bool] = None + """ Lint for pipeline name conventions """ + schema_lint: Optional[bool] = None + """ Lint nextflow_schema.json file""" + schema_params: Optional[bool] = None + """ Lint schema for all params """ + system_exit: Optional[bool] = None + """ Lint for System.exit calls in groovy/nextflow code """ + schema_description: Optional[bool] = None + """ Check that every parameter in the schema has a description. """ + actions_schema_validation: Optional[bool] = None + """ Lint GitHub Action workflow files with schema""" + modules_json: Optional[bool] = None + """ Lint modules.json file """ + modules_structure: Optional[bool] = None + """ Lint modules structure """ + base_config: Optional[bool] = None + """ Lint base.config file """ + nfcore_yml: Optional[bool] = None + """ Lint nf-core.yml """ + version_consistency: Optional[bool] = None + """ Lint for version consistency """ + included_configs: Optional[bool] = None + """ Lint for included configs """ + + def __getitem__(self, item: str) -> Any: + return getattr(self, item) + + def get(self, item: str, default: Any = None) -> Any: + if getattr(self, item, default) is None: + return default + return getattr(self, item, default) + + def __setitem__(self, item: str, value: Any) -> None: + setattr(self, item, value) class NFCoreYamlConfig(BaseModel): - repository_type: str + """.nf-core.yml configuration file schema""" + + repository_type: Optional[Literal["pipeline", "modules"]] = None + """ Type of repository """ nf_core_version: Optional[str] = None + """ Version of nf-core/tools used to create/update the pipeline """ org_path: Optional[str] = None - lint: Optional[LintConfigType] = None + """ Path to the organisation's modules repository (used for modules repo_type only) """ + lint: Optional[NFCoreYamlLintConfig] = None + """ Pipeline linting configuration, see https://nf-co.re/docs/nf-core-tools/pipelines/lint#linting-config for examples and documentation """ template: Optional[NFCoreTemplateConfig] = None + """ Pipeline template configuration """ bump_version: Optional[Dict[str, bool]] = None + """ Disable bumping of the version for a module/subworkflow (when repository_type is modules). See https://nf-co.re/docs/nf-core-tools/modules/bump-versions for more information. """ update: Optional[Dict[str, Union[str, bool, Dict[str, Union[str, Dict[str, Union[str, bool]]]]]]] = None + """ Disable updating specific modules/subworkflows (when repository_type is pipeline). See https://nf-co.re/docs/nf-core-tools/modules/update for more information. """ def __getitem__(self, item: str) -> Any: return getattr(self, item) @@ -1095,6 +1261,26 @@ def __getitem__(self, item: str) -> Any: def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) + def __setitem__(self, item: str, value: Any) -> None: + setattr(self, item, value) + + def model_dump(self, **kwargs) -> Dict[str, Any]: + # Get the initial data + config = super().model_dump(**kwargs) + + if self.repository_type == "modules": + # Fields to exclude for modules + fields_to_exclude = ["template", "update"] + else: # pipeline + # Fields to exclude for pipeline + fields_to_exclude = ["bump_version", "org_path"] + + # Remove the fields based on repository_type + for field in fields_to_exclude: + config.pop(field, None) + + return config + def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path], Optional[NFCoreYamlConfig]]: """ @@ -1133,9 +1319,38 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] except ValidationError as e: error_message = f"Config file '{config_fn}' is invalid" for error in e.errors(): - error_message += f"\n{error['loc'][0]}: {error['msg']}" + error_message += f"\n{error['loc'][0]}: {error['msg']}\ninput: {error['input']}" raise AssertionError(error_message) + wf_config = fetch_wf_config(Path(directory)) + if nf_core_yaml_config["repository_type"] == "pipeline" and wf_config: + # Retrieve information if template from config file is empty + template = tools_config.get("template") + config_template_keys = template.keys() if template is not None else [] + if nf_core_yaml_config.template is None: + # The .nf-core.yml file did not contain template information + nf_core_yaml_config.template = NFCoreTemplateConfig( + org="nf-core", + name=wf_config["manifest.name"].strip("'\"").split("/")[-1], + description=wf_config["manifest.description"].strip("'\""), + author=wf_config["manifest.author"].strip("'\""), + version=wf_config["manifest.version"].strip("'\""), + outdir=str(directory), + is_nfcore=True, + ) + elif "prefix" in config_template_keys or "skip" in config_template_keys: + # The .nf-core.yml file contained the old prefix or skip keys + nf_core_yaml_config.template = NFCoreTemplateConfig( + org=tools_config["template"].get("prefix", tools_config["template"].get("org", "nf-core")), + name=tools_config["template"].get("name", wf_config["manifest.name"].strip("'\"").split("/")[-1]), + description=tools_config["template"].get("description", wf_config["manifest.description"].strip("'\"")), + author=tools_config["template"].get("author", wf_config["manifest.author"].strip("'\"")), + version=tools_config["template"].get("version", wf_config["manifest.version"].strip("'\"")), + outdir=tools_config["template"].get("outdir", str(directory)), + skip_features=tools_config["template"].get("skip", tools_config["template"].get("skip_features")), + is_nfcore=tools_config["template"].get("prefix", tools_config["template"].get("org")) == "nf-core", + ) + log.debug("Using config file: %s", config_fn) return config_fn, nf_core_yaml_config @@ -1158,7 +1373,7 @@ def get_first_available_path(directory: Union[Path, str], paths: List[str]) -> U return None -def sort_dictionary(d): +def sort_dictionary(d: Dict) -> Dict: """Sorts a nested dictionary recursively""" result = {} for k, v in sorted(d.items()): @@ -1299,3 +1514,21 @@ def set_wd(path: Path) -> Generator[None, None, None]: yield finally: os.chdir(start_wd) + + +def get_wf_files(wf_path: Path): + """Return a list of all files in a directory (ignores .gitigore files)""" + + wf_files = [] + + with open(Path(wf_path, ".gitignore")) as f: + lines = f.read().splitlines() + ignore = [line for line in lines if line and not line.startswith("#")] + + for path in Path(wf_path).rglob("*"): + if any(fnmatch.fnmatch(str(path), pattern) for pattern in ignore): + continue + if path.is_file(): + wf_files.append(str(path)) + + return wf_files diff --git a/requirements-dev.txt b/requirements-dev.txt index aa43ee3fe3..aab9b1e5d7 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,7 +6,7 @@ responses ruff Sphinx sphinx-rtd-theme -textual-dev==1.5.1 +textual-dev==1.6.1 types-PyYAML types-requests types-jsonschema @@ -16,7 +16,7 @@ types-requests types-setuptools typing_extensions >=4.0.0 pytest-asyncio -pytest-textual-snapshot==0.4.0 +pytest-textual-snapshot==1.0.0 pytest-workflow>=2.0.0 pytest>=8.0.0 ruff diff --git a/requirements.txt b/requirements.txt index eba6460f03..51259938a8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,7 +9,7 @@ packaging pillow pdiff pre-commit -prompt_toolkit<=3.0.36 +prompt_toolkit<=3.0.48 pydantic>=2.2.1 pyyaml questionary>=2.0.1 @@ -18,7 +18,10 @@ requests requests_cache rich-click==1.8.* rich>=13.3.1 +rocrate +repo2rocrate tabulate textual==0.71.0 trogon pdiff +ruamel.yaml diff --git a/setup.py b/setup.py index 45df29b8bc..11b3022494 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "3.0.0dev" +version = "3.0.3dev" with open("README.md") as f: readme = f.read() diff --git a/tests/data/mock_module_containers/modules/mock_seqera_container_http.nf b/tests/data/mock_module_containers/modules/mock_seqera_container_http.nf new file mode 100644 index 0000000000..20c7075481 --- /dev/null +++ b/tests/data/mock_module_containers/modules/mock_seqera_container_http.nf @@ -0,0 +1,11 @@ +process CAT_FASTQ { + label 'process_single' + + conda "${moduleDir}/environment.yml" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data' : + 'community.wave.seqera.io/library/coreutils:9.5--ae99c88a9b28c264' }" + + // truncated + +} diff --git a/tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf b/tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf new file mode 100644 index 0000000000..8278ac7917 --- /dev/null +++ b/tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf @@ -0,0 +1,11 @@ +process UMI_TRANSFER { + label 'process_single' + + conda "${moduleDir}/environment.yml" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6' : + 'community.wave.seqera.io/library/umi-transfer:1.0.0--d30e8812ea280fa1' }" + + // truncated + +} diff --git a/tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf b/tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf new file mode 100644 index 0000000000..234ca04a45 --- /dev/null +++ b/tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf @@ -0,0 +1,11 @@ +process UMI_TRANSFER_MULLED { + label 'process_single' + + conda "${moduleDir}/environment.yml" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'oras://community.wave.seqera.io/library/umi-transfer_umicollapse:796a995ff53da9e3' : + 'community.wave.seqera.io/library/umi-transfer_umicollapse:3298d4f1b49e33bd' }" + + // truncated + +} diff --git a/tests/modules/test_lint.py b/tests/modules/test_lint.py index 51c814b88c..5372807987 100644 --- a/tests/modules/test_lint.py +++ b/tests/modules/test_lint.py @@ -274,7 +274,7 @@ def test_modules_lint_patched_modules(self): all_modules=True, ) - assert len(module_lint.failed) == 1 + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 @@ -305,6 +305,14 @@ def test_modules_lint_check_url(self): len(mocked_ModuleLint.failed) == failed ), f"{test}: Expected {failed} FAIL, got {len(mocked_ModuleLint.failed)}." + def test_modules_lint_update_meta_yml(self): + """update the meta.yml of a module""" + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules, fix=True) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + def test_modules_lint_snapshot_file(self): """Test linting a module with a snapshot file""" module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) @@ -432,7 +440,7 @@ def test_modules_environment_yml_file_sorted_incorrectly(self): ) as fh: yaml_content = yaml.safe_load(fh) # Add a new dependency to the environment.yml file and reverse the order - yaml_content["dependencies"].append("z") + yaml_content["dependencies"].append("z=0.0.0") yaml_content["dependencies"].reverse() yaml_content = yaml.dump(yaml_content) with open( @@ -513,25 +521,6 @@ def test_modules_meta_yml_incorrect_licence_field(self): assert len(module_lint.warned) >= 0 assert module_lint.failed[0].lint_test == "meta_yml_valid" - def test_modules_meta_yml_input_mismatch(self): - """Test linting a module with an extra entry in input fields in meta.yml compared to module.input""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: - main_nf = fh.read() - main_nf_new = main_nf.replace("path bam", "path bai") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf_new) - module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf) - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) == 2, f"Linting warning with {[x.__dict__ for x in module_lint.warned]}" - lint_tests = [x.lint_test for x in module_lint.warned] - # check that it is there twice: - assert lint_tests.count("meta_input_meta_only") == 1 - assert lint_tests.count("meta_input_main_only") == 1 - def test_modules_meta_yml_output_mismatch(self): """Test linting a module with an extra entry in output fields in meta.yml compared to module.output""" with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: @@ -543,13 +532,9 @@ def test_modules_meta_yml_output_mismatch(self): module_lint.lint(print_results=False, module="bpipe/test") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf) - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) == 2 - lint_tests = [x.lint_test for x in module_lint.warned] - # check that it is there twice: - assert lint_tests.count("meta_output_meta_only") == 1 - assert lint_tests.count("meta_output_main_only") == 1 + assert "Module `meta.yml` does not match `main.nf`" in module_lint.failed[0].message def test_modules_meta_yml_incorrect_name(self): """Test linting a module with an incorrect name in meta.yml""" diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py index 0368c146c4..325a8073b7 100644 --- a/tests/modules/test_modules_json.py +++ b/tests/modules/test_modules_json.py @@ -175,14 +175,17 @@ def test_mod_json_repo_present(self): assert mod_json_obj.repo_present(NF_CORE_MODULES_REMOTE) is True assert mod_json_obj.repo_present("INVALID_REPO") is False - def test_mod_json_module_present(self): - """Tests the module_present function""" + def test_mod_json_component_present(self): + """Tests the component_present function""" mod_json_obj = ModulesJson(self.pipeline_dir) - assert mod_json_obj.module_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is True - assert mod_json_obj.module_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is False - assert mod_json_obj.module_present("fastqc", "INVALID_REPO", "INVALID_DIR") is False - assert mod_json_obj.module_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR") is False + assert mod_json_obj.component_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME, "modules") is True + assert ( + mod_json_obj.component_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME, "modules") + is False + ) + assert mod_json_obj.component_present("fastqc", "INVALID_REPO", "INVALID_DIR", "modules") is False + assert mod_json_obj.component_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR", "modules") is False def test_mod_json_get_module_version(self): """Test the get_module_version function""" diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py index c3eb94d374..f608278618 100644 --- a/tests/modules/test_patch.py +++ b/tests/modules/test_patch.py @@ -21,10 +21,10 @@ testing if the update commands works correctly with patch files """ -ORG_SHA = "002623ccc88a3b0cb302c7d8f13792a95354d9f2" -CORRECT_SHA = "1dff30bfca2d98eb7ac7b09269a15e822451d99f" -SUCCEED_SHA = "ba15c20c032c549d77c5773659f19c2927daf48e" -FAIL_SHA = "67b642d4471c4005220a342cad3818d5ba2b5a73" +ORG_SHA = "3dc7c14d29af40f1a0871a675364e437559d97a8" +CORRECT_SHA = "63e780200600e340365b669f9c673b670764c569" +SUCCEED_SHA = "0d0515c3f11266e1314e129bec3e308f804c8dc7" +FAIL_SHA = "cb64a5c1ef85619b89ab99dec2e9097fe84e1dc8" BISMARK_ALIGN = "bismark/align" REPO_NAME = "nf-core-test" PATCH_BRANCH = "patch-tester" @@ -76,11 +76,11 @@ def test_create_patch_no_change(self): module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) # Check that no patch file has been added to the directory - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} + assert not (module_path / "bismark-align.diff").exists() # Check the 'modules.json' contains no patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) is None def test_create_patch_change(self): """Test creating a patch when there is a change to the module""" @@ -94,11 +94,11 @@ def test_create_patch_change(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -127,11 +127,11 @@ def test_create_patch_try_apply_successful(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -153,11 +153,11 @@ def test_create_patch_try_apply_successful(self): update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, install_dir, REPO_NAME, SUCCEED_SHA) # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -195,11 +195,11 @@ def test_create_patch_try_apply_failed(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -234,11 +234,11 @@ def test_create_patch_update_success(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -254,13 +254,13 @@ def test_create_patch_update_success(self): assert update_obj.update(BISMARK_ALIGN) # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) + ), modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, GITLAB_URL, REPO_NAME) # Check that the correct lines are in the patch file with open(module_path / patch_fn) as fh: @@ -295,11 +295,11 @@ def test_create_patch_update_fail(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -349,11 +349,11 @@ def test_remove_patch(self): # Check that a patch file with the correct name has been created patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -361,8 +361,8 @@ def test_remove_patch(self): mock_questionary.unsafe_ask.return_value = True patch_obj.remove(BISMARK_ALIGN) # Check that the diff file has been removed - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} + assert not (module_path / patch_fn).exists() # Check that the 'modules.json' entry has been removed modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) is None diff --git a/tests/pipelines/__snapshots__/test_create_app.ambr b/tests/pipelines/__snapshots__/test_create_app.ambr deleted file mode 100644 index 6e3009e18e..0000000000 --- a/tests/pipelines/__snapshots__/test_create_app.ambr +++ /dev/null @@ -1,3321 +0,0 @@ -# serializer version: 1 -# name: test_basic_details_custom - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - - GitHub organisationWorkflow name - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - A short description of your pipeline. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - Name of the main author / authors - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Next  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_basic_details_nfcore - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - - GitHub organisationWorkflow name - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-core                                   Pipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - A short description of your pipeline. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - Name of the main author / authors - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Next  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_choose_type - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Choose pipeline type - - - - - Choose "nf-core" if:Choose "Custom" if: - - ● You want your pipeline to be part of the ● Your pipeline will never be part of  - nf-core communitynf-core - ● You think that there's an outside chance ● You want full control over all features  - that it ever could be part of nf-corethat are included from the template  - (including those that are mandatory for  - nf-core). - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  nf-core  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Custom  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - What's the difference? - -   Choosing "nf-core" effectively pre-selects the following template features: - - ● GitHub Actions continuous-integration configuration files: - ▪ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) - ▪ Code formatting checks with Prettier - ▪ Auto-fix linting functionality using @nf-core-bot - ▪ Marking old issues as stale - ● Inclusion of shared nf-core configuration profiles - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_customisation_help - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use a GitHub Create a GitHub  Show help  - ▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - pipeline. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing▄▄ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Hide help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common reference  - genome files. - - By selecting this option, your pipeline will include a configuration file  - specifying the paths to these files. - - The required code to use these files will also be included in the template.  - When the pipeline user provides an appropriate genome key, the pipeline will - automatically download the required reference files. - ▅▅ - For more information about reference genomes in nf-core pipelines, see the  - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file of  Show help  - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_final_details - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Final details - - - - - First version of the pipelinePath to the output directory where the  - pipeline will be created - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - 1.0.0dev.                                          - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Finish  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_github_details - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - -   Now that we have created a new pipeline locally, we can create a new GitHub repository and push    -   the code to it. - - - - - Your GitHub usernameYour GitHub personal access token▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - for login. Show  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub username••••••••••••                   - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-core                               mypipeline                             - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - ⚠️ You can't create a repository directly in the nf-core organisation. - Please create the pipeline repo to an organisation where you have access or use your user  - account. A core-team member will be able to transfer the repo to nf-core once the development - has started. - - 💡 Your GitHub user account will be used by default if nf-core is given as the org name. - - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Create GitHub repo  Finish without creating a repo  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_github_exit_message - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - HowTo create a GitHub repository - - - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - -   If you would like to create the GitHub repository later, you can do it manually by following  -   these steps: - -  1. Create a new GitHub repository -  2. Add the remote to your local repository: - - - cd <pipeline_directory> - git remote add origin git@github.com:<username>/<repo_name>.git - - -  3. Push the code to the remote: - - - git push --all origin - - - 💡 Note the --all flag: this is needed to push all branches to the remote. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Close  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_github_question - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - - -   After creating the pipeline template locally, we can create a GitHub repository and push the  -   code to it. - -   Do you want to create a GitHub repository? - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Create GitHub repo  Finish without creating a repo  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_type_custom - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use a GitHub Create a GitHub  Show help  - ▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - pipeline. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing - ▃▃ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Show help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file of  Show help  - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters  - requried to run  - nf-core pipelines at  - different institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - and CI tests to lint  - your code: pre-commit, - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_type_nfcore - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Show help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most common - reference genome files  - from iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use multiqcThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include the MultiQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - module which generates  - an HTML report for  - quality control. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use fastqcThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include the FastQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - module which performs  - quality control  - analysis of input FASTQ - files. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use nf-schemaUse the nf-schema  Show help  - ▁▁▁▁▁▁▁▁Nextflow plugin for ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - this pipeline. - - - - - - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_type_nfcore_validation - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - - GitHub organisationWorkflow name - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-core                                   Pipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Must be lowercase without  - punctuation. - - - - A short description of your pipeline. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - - - - Name of the main author / authors - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Next  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_welcome - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - - - - Welcome to the nf-core pipeline creation wizard - -   This app will help you create a new Nextflow pipeline from the nf-core/tools pipeline template. - -   The template helps anyone benefit from nf-core best practices, and is a requirement for nf-core    -   pipelines. - - 💡 If you want to add a pipeline to nf-core, please join on Slack and discuss your plans with - the community as early as possible; ideally before you start on your pipeline! See the  - nf-core guidelines and the #new-pipelines Slack channel for more information. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Let's go!  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- diff --git a/tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg new file mode 100644 index 0000000000..f327dac799 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg @@ -0,0 +1,271 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Basic details + + + + +GitHub organisationWorkflow name + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-corePipeline Name +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +A short description of your pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Description +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +Name of the main author / authors + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Author(s) +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Next  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg new file mode 100644 index 0000000000..6a4e424130 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg @@ -0,0 +1,274 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Basic details + + + + +GitHub organisationWorkflow name + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-core                                   Pipeline Name +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +A short description of your pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Description +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +Name of the main author / authors + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Author(s) +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Next  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg b/tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg new file mode 100644 index 0000000000..3eaceeb477 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg @@ -0,0 +1,269 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Choose pipeline type + + + + +Choose "nf-core" if:Choose "Custom" if: + +● You want your pipeline to be part of the ● Your pipeline will never be part of  +nf-core communitynf-core +● You think that there's an outside chance ● You want full control over all features  +that it ever could be part of nf-corethat are included from the template  +(including those that are mandatory for  +nf-core). +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-core  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Custom  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + +What's the difference? + +  Choosing "nf-core" effectively pre-selects the following template features: + +● GitHub Actions continuous-integration configuration files: +▪ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) +▪ Code formatting checks with Prettier +▪ Auto-fix linting functionality using @nf-core-bot +▪ Marking old issues as stale +● Inclusion of shared nf-core configuration profiles + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg new file mode 100644 index 0000000000..c34bd85230 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg @@ -0,0 +1,275 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Template features + + +▔▔▔▔▔▔▔▔ +        Toggle all features +▁▁▁▁▁▁▁▁ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use a GitHub Create a GitHub  Show help  +▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github CI testsThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous▅▅ +Integration (CI)  +testing + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Hide help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most  +common reference  +genome files from  +iGenomes + + +Nf-core pipelines are configured to use a copy of the most common reference  +genome files. + +By selecting this option, your pipeline will include a configuration file  +specifying the paths to these files. + +The required code to use these files will also be included in the template.  +When the pipeline user provides an appropriate genome key, the pipeline will +automatically download the required reference files. +▅▅ +For more information about reference genomes in nf-core pipelines, see the  + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github badgesThe README.md file of  Show help  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_final_details.svg b/tests/pipelines/__snapshots__/test_create_app/test_final_details.svg new file mode 100644 index 0000000000..74c232f747 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_final_details.svg @@ -0,0 +1,269 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Final details + + + + +First version of the pipelinePath to the output directory where the  +pipeline will be created +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +1.0.0dev.                                          +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Finish  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_github_details.svg b/tests/pipelines/__snapshots__/test_create_app/test_github_details.svg new file mode 100644 index 0000000000..03eeaab0d1 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_github_details.svg @@ -0,0 +1,276 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Create GitHub repository + +  Now that we have created a new pipeline locally, we can create a new GitHub repository and push    +  the code to it. + + + + +Your GitHub usernameYour GitHub personal access token▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +for login. Show  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +GitHub username••••••••••••                   +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +The name of the organisation where the The name of the new GitHub repository +GitHub repo will be created +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-core                               mypipeline                             +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + +⚠️ You can't create a repository directly in the nf-core organisation. +Please create the pipeline repo to an organisation where you have access or use your user  +account. A core-team member will be able to transfer the repo to nf-core once the development +has started. + +💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + +▔▔▔▔▔▔▔▔Private +Select to make the new GitHub repo private. +▁▁▁▁▁▁▁▁ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Create GitHub repo  Finish without creating a repo  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg b/tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg new file mode 100644 index 0000000000..1be8c63f10 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg @@ -0,0 +1,272 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +HowTo create a GitHub repository + + + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\  +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +  If you would like to create the GitHub repository later, you can do it manually by following  +  these steps: + + 1. Create a new GitHub repository + 2. Add the remote to your local repository: + + +cd <pipeline_directory> +git remote add origin git@github.com:<username>/<repo_name>.git + + + 3. Push the code to the remote: + + +git push --all origin + + +💡 Note the --all flag: this is needed to push all branches to the remote. + + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_github_question.svg b/tests/pipelines/__snapshots__/test_create_app/test_github_question.svg new file mode 100644 index 0000000000..8aad414e62 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_github_question.svg @@ -0,0 +1,265 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Create GitHub repository + + +  After creating the pipeline template locally, we can create a GitHub repository and push the  +  code to it. + +  Do you want to create a GitHub repository? + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Create GitHub repo  Finish without creating a repo  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg new file mode 100644 index 0000000000..b8dea05604 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg @@ -0,0 +1,274 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Template features + + +▔▔▔▔▔▔▔▔ +        Toggle all features +▁▁▁▁▁▁▁▁ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use a GitHub Create a GitHub  Show help  +▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github CI testsThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous +Integration (CI) ▇▇ +testing + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Show help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most  +common reference  +genome files from  +iGenomes + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github badgesThe README.md file of  Show help  +▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +include GitHub badges + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add configuration The pipeline will  Show help  +▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +profiles containing  +custom parameters  +required to run  +nf-core pipelines at  +different institutions + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use code lintersThe pipeline will  Show help  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg new file mode 100644 index 0000000000..c18bb31b8e --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg @@ -0,0 +1,272 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Template features + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Show help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most common +reference genome files  +from iGenomes + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use multiqcThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include the MultiQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +module which generates  +an HTML report for  +quality control. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use fastqcThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include the FastQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +module which performs  +quality control  +analysis of input FASTQ +files. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use nf-schemaUse the nf-schema  Show help  +▁▁▁▁▁▁▁▁Nextflow plugin for ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +this pipeline. + + + + + + + + + + + + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg new file mode 100644 index 0000000000..fd6f2532c8 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg @@ -0,0 +1,273 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Basic details + + + + +GitHub organisationWorkflow name + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-core                                   Pipeline Name +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +Value error, Must be lowercase without  +punctuation. + + + +A short description of your pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Description +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +Value error, Cannot be left empty. + + + +Name of the main author / authors + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Author(s) +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +Value error, Cannot be left empty. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Next  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_welcome.svg b/tests/pipelines/__snapshots__/test_create_app/test_welcome.svg new file mode 100644 index 0000000000..d9941b650d --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_welcome.svg @@ -0,0 +1,271 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\  +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + + + +Welcome to the nf-core pipeline creation wizard + +  This app will help you create a new Nextflow pipeline from the nf-core/tools pipeline template. + +  The template helps anyone benefit from nf-core best practices, and is a requirement for nf-core    +  pipelines. + +💡 If you want to add a pipeline to nf-core, please join on Slack and discuss your plans with +the community as early as possible; ideally before you start on your pipeline! See the  +nf-core guidelines and the #new-pipelines Slack channel for more information. + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Let's go!  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/lint/test_files_exist.py b/tests/pipelines/lint/test_files_exist.py index 97dd346cdf..ebc529247e 100644 --- a/tests/pipelines/lint/test_files_exist.py +++ b/tests/pipelines/lint/test_files_exist.py @@ -1,5 +1,7 @@ from pathlib import Path +from ruamel.yaml import YAML + import nf_core.pipelines.lint from ..test_lint import TestLint @@ -9,17 +11,17 @@ class TestLintFilesExist(TestLint): def setUp(self) -> None: super().setUp() self.new_pipeline = self._make_pipeline_copy() + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) def test_files_exist_missing_config(self): """Lint test: critical files missing FAIL""" Path(self.new_pipeline, "CHANGELOG.md").unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" + assert self.lint_obj._load() + self.lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert "File not found: `CHANGELOG.md`" in results["failed"] def test_files_exist_missing_main(self): @@ -27,31 +29,27 @@ def test_files_exist_missing_main(self): Path(self.new_pipeline, "main.nf").unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() + assert self.lint_obj._load() - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert "File not found: `main.nf`" in results["warned"] def test_files_exist_deprecated_file(self): """Check whether deprecated file issues warning""" - nf = Path(self.new_pipeline, "parameters.settings.json") - nf.touch() + Path(self.new_pipeline, "parameters.settings.json").touch() - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() + assert self.lint_obj._load() - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert results["failed"] == ["File must be removed: `parameters.settings.json`"] def test_files_exist_pass(self): """Lint check should pass if all files are there""" - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() + assert self.lint_obj._load() - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert results["failed"] == [] def test_files_exist_pass_conditional_nfschema(self): @@ -62,9 +60,58 @@ def test_files_exist_pass_conditional_nfschema(self): with open(Path(self.new_pipeline, "nextflow.config"), "w") as f: f.write(config) - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - lint_obj.nf_config["manifest.schema"] = "nf-core" - results = lint_obj.files_exist() + assert self.lint_obj._load() + self.lint_obj.nf_config["manifest.schema"] = "nf-core" + results = self.lint_obj.files_exist() assert results["failed"] == [] assert results["ignored"] == [] + + def test_files_exists_pass_nf_core_yml_config(self): + """Check if linting passes with a valid nf-core.yml config""" + valid_yaml = """ + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + """ + yaml = YAML() + nf_core_yml_path = Path(self.new_pipeline, ".nf-core.yml") + nf_core_yml = yaml.load(nf_core_yml_path) + + nf_core_yml["lint"] = yaml.load(valid_yaml) + yaml.dump(nf_core_yml, nf_core_yml_path) + + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + assert self.lint_obj._load() + + results = self.lint_obj.files_exist() + assert results["failed"] == [] + assert "File is ignored: `.github/CONTRIBUTING.md`" in results["ignored"] + assert "File is ignored: `CITATIONS.md`" in results["ignored"] + + def test_files_exists_fail_nf_core_yml_config(self): + """Check if linting fails with a valid nf-core.yml config""" + valid_yaml = """ + files_exist: + - CITATIONS.md + """ + + # remove CITATIONS.md + Path(self.new_pipeline, "CITATIONS.md").unlink() + assert self.lint_obj._load() + # test first if linting fails correctly + results = self.lint_obj.files_exist() + assert "File not found: `CITATIONS.md`" in results["failed"] + + yaml = YAML() + nf_core_yml_path = Path(self.new_pipeline, ".nf-core.yml") + nf_core_yml = yaml.load(nf_core_yml_path) + + nf_core_yml["lint"] = yaml.load(valid_yaml) + yaml.dump(nf_core_yml, nf_core_yml_path) + + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + assert self.lint_obj._load() + + results = self.lint_obj.files_exist() + assert results["failed"] == [] + assert "File is ignored: `CITATIONS.md`" in results["ignored"] diff --git a/tests/pipelines/lint/test_nextflow_config.py b/tests/pipelines/lint/test_nextflow_config.py index 3cc9355452..f8c3c1f31f 100644 --- a/tests/pipelines/lint/test_nextflow_config.py +++ b/tests/pipelines/lint/test_nextflow_config.py @@ -6,7 +6,6 @@ import nf_core.pipelines.create.create import nf_core.pipelines.lint -from nf_core.utils import NFCoreYamlConfig from ..test_lint import TestLint @@ -30,7 +29,6 @@ def test_default_values_match(self): result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 assert len(result["warned"]) == 0 - assert "Config default value correct: params.max_cpus" in str(result["passed"]) assert "Config default value correct: params.validate_params" in str(result["passed"]) def test_nextflow_config_bad_name_fail(self): @@ -71,18 +69,18 @@ def test_nextflow_config_missing_test_profile_failed(self): def test_default_values_fail(self): """Test linting fails if the default values in nextflow.config do not match the ones defined in the nextflow_schema.json.""" - # Change the default value of max_cpus in nextflow.config + # Change the default value of max_multiqc_email_size in nextflow.config nf_conf_file = Path(self.new_pipeline) / "nextflow.config" with open(nf_conf_file) as f: content = f.read() - fail_content = re.sub(r"\bmax_cpus\s*=\s*16\b", "max_cpus = 0", content) + fail_content = re.sub(r"\bmax_multiqc_email_size\s*=\s*'25.MB'", "max_multiqc_email_size = '0'", content) with open(nf_conf_file, "w") as f: f.write(fail_content) - # Change the default value of max_memory in nextflow_schema.json + # Change the default value of custom_config_version in nextflow_schema.json nf_schema_file = Path(self.new_pipeline) / "nextflow_schema.json" with open(nf_schema_file) as f: content = f.read() - fail_content = re.sub(r'"default": "128.GB"', '"default": "18.GB"', content) + fail_content = re.sub(r'"default": "master"', '"default": "main"', content) with open(nf_schema_file, "w") as f: f.write(fail_content) lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) @@ -90,11 +88,11 @@ def test_default_values_fail(self): result = lint_obj.nextflow_config() assert len(result["failed"]) == 2 assert ( - "Config default value incorrect: `params.max_cpus` is set as `16` in `nextflow_schema.json` but is `0` in `nextflow.config`." + "Config default value incorrect: `params.max_multiqc_email_size` is set as `25.MB` in `nextflow_schema.json` but is `0` in `nextflow.config`." in result["failed"] ) assert ( - "Config default value incorrect: `params.max_memory` is set as `18.GB` in `nextflow_schema.json` but is `128.GB` in `nextflow.config`." + "Config default value incorrect: `params.custom_config_version` is set as `main` in `nextflow_schema.json` but is `master` in `nextflow.config`." in result["failed"] ) @@ -103,14 +101,14 @@ def test_catch_params_assignment_in_main_nf(self): # Add parameter assignment in main.nf main_nf_file = Path(self.new_pipeline) / "main.nf" with open(main_nf_file, "a") as f: - f.write("params.max_time = 42") + f.write("params.custom_config_base = 'test'") lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() - assert len(result["failed"]) == 1 + assert len(result["failed"]) == 2 assert ( - result["failed"][0] - == "Config default value incorrect: `params.max_time` is set as `240.h` in `nextflow_schema.json` but is `null` in `nextflow.config`." + result["failed"][1] + == "Config default value incorrect: `params.custom_config_base` is set as `https://raw.githubusercontent.com/nf-core/configs/master` in `nextflow_schema.json` but is `null` in `nextflow.config`." ) def test_allow_params_reference_in_main_nf(self): @@ -118,7 +116,7 @@ def test_allow_params_reference_in_main_nf(self): # Add parameter reference in main.nf main_nf_file = Path(self.new_pipeline) / "main.nf" with open(main_nf_file, "a") as f: - f.write("params.max_time == 42") + f.write("params.custom_config_version == 'main'") lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() @@ -126,22 +124,30 @@ def test_allow_params_reference_in_main_nf(self): def test_default_values_ignored(self): """Test ignoring linting of default values.""" - # Add max_cpus to the ignore list + valid_yaml = """ + nextflow_config: + - manifest.name + - config_defaults: + - params.custom_config_version + """ + # Add custom_config_version to the ignore list nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" - nf_core_yml = NFCoreYamlConfig( - repository_type="pipeline", lint={"nextflow_config": [{"config_defaults": ["params.max_cpus"]}]} - ) + + with open(nf_core_yml_path) as f: + nf_core_yml = yaml.safe_load(f) + nf_core_yml["lint"] = yaml.safe_load(valid_yaml) with open(nf_core_yml_path, "w") as f: - yaml.dump(nf_core_yml.model_dump(), f) + yaml.dump(nf_core_yml, f) lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj.load_pipeline_config() lint_obj._load_lint_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 - assert len(result["ignored"]) == 1 - assert "Config default value correct: params.max_cpu" not in str(result["passed"]) - assert "Config default ignored: params.max_cpus" in str(result["ignored"]) + assert len(result["ignored"]) == 2 + assert "Config default value correct: params.custom_config_version" not in str(result["passed"]) + assert "Config default ignored: params.custom_config_version" in str(result["ignored"]) + assert "Config variable ignored: `manifest.name`" in str(result["ignored"]) def test_default_values_float(self): """Test comparing two float values.""" @@ -150,7 +156,9 @@ def test_default_values_float(self): with open(nf_conf_file) as f: content = f.read() fail_content = re.sub( - r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content + r"validate_params\s*=\s*true", + "params.validate_params = true\ndummy = 0.000000001", + content, ) with open(nf_conf_file, "w") as f: f.write(fail_content) @@ -180,7 +188,9 @@ def test_default_values_float_fail(self): with open(nf_conf_file) as f: content = f.read() fail_content = re.sub( - r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content + r"validate_params\s*=\s*true", + "params.validate_params = true\ndummy = 0.000000001", + content, ) with open(nf_conf_file, "w") as f: f.write(fail_content) diff --git a/tests/pipelines/lint/test_nfcore_yml.py b/tests/pipelines/lint/test_nfcore_yml.py index 955c00da81..2ac36ffe0c 100644 --- a/tests/pipelines/lint/test_nfcore_yml.py +++ b/tests/pipelines/lint/test_nfcore_yml.py @@ -1,8 +1,9 @@ -import re from pathlib import Path -import nf_core.pipelines.create +from ruamel.yaml import YAML + import nf_core.pipelines.lint +from nf_core.utils import NFCoreYamlConfig from ..test_lint import TestLint @@ -11,11 +12,14 @@ class TestLintNfCoreYml(TestLint): def setUp(self) -> None: super().setUp() self.new_pipeline = self._make_pipeline_copy() - self.nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + self.nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" + self.yaml = YAML() + self.nf_core_yml: NFCoreYamlConfig = self.yaml.load(self.nf_core_yml_path) + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) def test_nfcore_yml_pass(self): """Lint test: nfcore_yml - PASS""" - self.lint_obj._load() + assert self.lint_obj._load() results = self.lint_obj.nfcore_yml() assert "Repository type in `.nf-core.yml` is valid" in str(results["passed"]) @@ -27,31 +31,95 @@ def test_nfcore_yml_pass(self): def test_nfcore_yml_fail_repo_type(self): """Lint test: nfcore_yml - FAIL - repository type not set""" - with open(self.nf_core_yml) as fh: - content = fh.read() - new_content = content.replace("repository_type: pipeline", "repository_type: foo") - with open(self.nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - results = lint_obj.nfcore_yml() - assert "Repository type in `.nf-core.yml` is not valid." in str(results["failed"]) - assert len(results.get("warned", [])) == 0 - assert len(results.get("passed", [])) >= 0 - assert len(results.get("ignored", [])) == 0 + self.nf_core_yml["repository_type"] = "foo" + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + with self.assertRaises(AssertionError): + self.lint_obj._load() def test_nfcore_yml_fail_nfcore_version(self): """Lint test: nfcore_yml - FAIL - nf-core version not set""" - with open(self.nf_core_yml) as fh: - content = fh.read() - new_content = re.sub(r"nf_core_version:.+", "nf_core_version: foo", content) - with open(self.nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - results = lint_obj.nfcore_yml() + self.nf_core_yml["nf_core_version"] = "foo" + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() assert "nf-core version in `.nf-core.yml` is not set to the latest version." in str(results["warned"]) assert len(results.get("failed", [])) == 0 assert len(results.get("passed", [])) >= 0 assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_nested_lint_config(self) -> None: + """Lint test: nfcore_yml with nested lint config - PASS""" + valid_yaml = """ + lint: + files_unchanged: + - .github/workflows/branch.yml + # modules_config: False + modules_config: + - fastqc + # merge_markers: False + merge_markers: + - docs/my_pdf.pdf + # nextflow_config: False + nextflow_config: + - manifest.name + - config_defaults: + - params.annotation_db + - params.multiqc_comment_headers + - params.custom_table_headers + multiqc_config: + - report_section_order + - report_comment + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + # template_strings: False + template_strings: + - docs/my_pdf.pdf + """ + self.nf_core_yml["lint"] = self.yaml.load(valid_yaml) + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() + assert len(results.get("failed", [])) == 0 + assert len(results.get("warned", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_nested_lint_config_bool(self) -> None: + """Lint test: nfcore_yml with nested lint config - PASS""" + valid_yaml = """ + lint: + files_unchanged: + - .github/workflows/branch.yml + modules_config: False + # modules_config: + # - fastqc + merge_markers: False + # merge_markers: + # - docs/my_pdf.pdf + # nextflow_config: False + nextflow_config: + - manifest.name + - config_defaults: + - params.annotation_db + - params.multiqc_comment_headers + - params.custom_table_headers + multiqc_config: + - report_section_order + - report_comment + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + template_strings: False + # template_strings: + # - docs/my_pdf.pdf + """ + self.nf_core_yml["lint"] = self.yaml.load(valid_yaml) + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() + assert len(results.get("failed", [])) == 0 + assert len(results.get("warned", [])) == 0 + assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/test_template_strings.py b/tests/pipelines/lint/test_template_strings.py index 406ba63e0c..37b7604806 100644 --- a/tests/pipelines/lint/test_template_strings.py +++ b/tests/pipelines/lint/test_template_strings.py @@ -1,6 +1,8 @@ import subprocess from pathlib import Path +import yaml + import nf_core.pipelines.create import nf_core.pipelines.lint @@ -11,6 +13,9 @@ class TestLintTemplateStrings(TestLint): def setUp(self) -> None: super().setUp() self.new_pipeline = self._make_pipeline_copy() + self.nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" + with open(self.nf_core_yml_path) as f: + self.nf_core_yml = yaml.safe_load(f) def test_template_strings(self): """Tests finding a template string in a file fails linting.""" @@ -28,9 +33,12 @@ def test_template_strings(self): def test_template_strings_ignored(self): """Tests ignoring template_strings""" # Ignore template_strings test - nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings: False") + valid_yaml = """ + template_strings: false + """ + self.nf_core_yml["lint"] = yaml.safe_load(valid_yaml) + with open(self.nf_core_yml_path, "w") as f: + yaml.safe_dump(self.nf_core_yml, f) lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj._load() lint_obj._lint_pipeline() @@ -43,13 +51,21 @@ def test_template_strings_ignore_file(self): txt_file = Path(self.new_pipeline) / "docs" / "test.txt" with open(txt_file, "w") as f: f.write("my {{ template_string }}") + subprocess.check_output(["git", "add", "docs"], cwd=self.new_pipeline) + # Ignore template_strings test - nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings:\n - docs/test.txt") + valid_yaml = """ + template_strings: + - docs/test.txt + """ + self.nf_core_yml["lint"] = yaml.safe_load(valid_yaml) + with open(self.nf_core_yml_path, "w") as f: + yaml.safe_dump(self.nf_core_yml, f) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj._load() result = lint_obj.template_strings() + assert len(result["failed"]) == 0 assert len(result["ignored"]) == 1 diff --git a/tests/pipelines/test_bump_version.py b/tests/pipelines/test_bump_version.py index 709e82427d..8af5c0e4d1 100644 --- a/tests/pipelines/test_bump_version.py +++ b/tests/pipelines/test_bump_version.py @@ -13,12 +13,25 @@ def test_bump_pipeline_version(self): """Test that making a release with the working example files works""" # Bump the version number - nf_core.pipelines.bump_version.bump_pipeline_version(self.pipeline_obj, "1.1") + nf_core.pipelines.bump_version.bump_pipeline_version(self.pipeline_obj, "1.1.0") new_pipeline_obj = nf_core.utils.Pipeline(self.pipeline_dir) # Check nextflow.config new_pipeline_obj.load_pipeline_config() - assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.1" + assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.1.0" + + # Check multiqc_config.yml + with open(new_pipeline_obj._fp("assets/multiqc_config.yml")) as fh: + multiqc_config = yaml.safe_load(fh) + + assert "report_comment" in multiqc_config + assert "/releases/tag/1.1.0" in multiqc_config["report_comment"] + + # Check .nf-core.yml + with open(new_pipeline_obj._fp(".nf-core.yml")) as fh: + nf_core_yml = yaml.safe_load(fh) + if nf_core_yml["template"]: + assert nf_core_yml["template"]["version"] == "1.1.0" def test_dev_bump_pipeline_version(self): """Test that making a release works with a dev name and a leading v""" @@ -33,7 +46,7 @@ def test_dev_bump_pipeline_version(self): def test_bump_nextflow_version(self): # Bump the version number to a specific version, preferably one # we're not already on - version = "22.04.3" + version = "25.04.2" nf_core.pipelines.bump_version.bump_nextflow_version(self.pipeline_obj, version) new_pipeline_obj = nf_core.utils.Pipeline(self.pipeline_dir) new_pipeline_obj._load() diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index a898d37b70..d1e2c41a68 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -257,6 +257,141 @@ def test_find_container_images_modules(self, tmp_path, mock_fetch_wf_config): not in download_obj.containers ) + # mock_seqera_container_oras.nf + assert "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6" in download_obj.containers + assert "community.wave.seqera.io/library/umi-transfer:1.0.0--d30e8812ea280fa1" not in download_obj.containers + + # mock_seqera_container_oras_mulled.nf + assert ( + "oras://community.wave.seqera.io/library/umi-transfer_umicollapse:796a995ff53da9e3" + in download_obj.containers + ) + assert ( + "community.wave.seqera.io/library/umi-transfer_umicollapse:3298d4f1b49e33bd" not in download_obj.containers + ) + + # mock_seqera_container_http.nf + assert ( + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" + in download_obj.containers + ) + + # ToDO: This URI should actually NOT be in there, but prioritize_direct_download() can not handle this case. + # + # It works purely by comparing the strings, thus can establish the equivalence of 'https://depot.galaxyproject.org/singularity/umi_tools:1.1.5--py39hf95cd2a_0' + # and 'biocontainers/umi_tools:1.1.5--py39hf95cd2a_0' because of the identical string 'umi_tools:1.1.5--py39hf95cd2a_0', but has no means to establish that + # 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data' and + # 'community.wave.seqera.io/library/coreutils:9.5--ae99c88a9b28c264' are the equivalent container. It would need to query an API at Seqera for that. + + assert "community.wave.seqera.io/library/coreutils:9.5--ae99c88a9b28c264" in download_obj.containers + + # + # Test for 'prioritize_direct_download' + # + @with_temporary_folder + def test_prioritize_direct_download(self, tmp_path): + download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) + + # tests deduplication and https priority as well as Seqera Container exception + + test_container = [ + "https://depot.galaxyproject.org/singularity/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "biocontainers/umi-transfer:1.5.0--h715e4b3_0", + "https://depot.galaxyproject.org/singularity/umi-transfer:1.5.0--h715e4b3_0", + "biocontainers/umi-transfer:1.5.0--h715e4b3_0", + "quay.io/nf-core/sortmerna:4.3.7--6502243397c065ba", + "nf-core/sortmerna:4.3.7--6502243397c065ba", + "https://depot.galaxyproject.org/singularity/sortmerna:4.3.7--hdbdd923_1", + "https://depot.galaxyproject.org/singularity/sortmerna:4.3.7--hdbdd923_0", + "https://depot.galaxyproject.org/singularity/sortmerna:4.2.0--h9ee0642_1", + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/63/6397750e9730a3fbcc5b4c43f14bd141c64c723fd7dad80e47921a68a7c3cd21/data", + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data", + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data", + ] + + result = download_obj.prioritize_direct_download(test_container) + + # Verify that the priority works for regular https downloads (https encountered first) + assert "https://depot.galaxyproject.org/singularity/ubuntu:22.04" in result + assert "nf-core/ubuntu:22.04" not in result + + # Verify that the priority works for regular https downloads (https encountered second) + assert "biocontainers/umi-transfer:1.5.0--h715e4b3_0" not in result + assert "https://depot.galaxyproject.org/singularity/umi-transfer:1.5.0--h715e4b3_0" in result + + # Verify that the priority works for images with and without explicit registry + # No priority here, though - the first is retained. + assert "nf-core/sortmerna:4.3.7--6502243397c065ba" in result + assert "quay.io/nf-core/sortmerna:4.3.7--6502243397c065ba" not in result + + # Verify that different versions of the same tool and different build numbers are retained + assert "https://depot.galaxyproject.org/singularity/sortmerna:4.3.7--hdbdd923_1" in result + assert "https://depot.galaxyproject.org/singularity/sortmerna:4.3.7--hdbdd923_0" in result + assert "https://depot.galaxyproject.org/singularity/sortmerna:4.2.0--h9ee0642_1" in result + + # Verify that Seqera containers are not deduplicated... + assert ( + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/63/6397750e9730a3fbcc5b4c43f14bd141c64c723fd7dad80e47921a68a7c3cd21/data" + in result + ) + assert ( + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" + in result + ) + # ...but identical ones are. + assert ( + result.count( + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" + ) + == 1 + ) + + # + # Test for 'reconcile_seqera_container_uris' + # + @with_temporary_folder + def test_reconcile_seqera_container_uris(self, tmp_path): + download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) + + prioritized_container = [ + "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6", + "oras://community.wave.seqera.io/library/sylph:0.6.1--b97274cdc1caa649", + ] + + test_container = [ + "https://depot.galaxyproject.org/singularity/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "community.wave.seqera.io/library/umi-transfer:1.5.0--73c1a6b65e5b0b81", + "community.wave.seqera.io/library/sylph:0.6.1--a21713a57a65a373", + "biocontainers/sylph:0.6.1--b97274cdc1caa649", + ] + + # test that the test_container list is returned as it is, if no prioritized_containers are specified + result_empty = download_obj.reconcile_seqera_container_uris([], test_container) + assert result_empty == test_container + + result = download_obj.reconcile_seqera_container_uris(prioritized_container, test_container) + + # Verify that unrelated images are retained + assert "https://depot.galaxyproject.org/singularity/ubuntu:22.04" in result + assert "nf-core/ubuntu:22.04" in result + + # Verify that the priority works for regular Seqera container (Native Singularity over Docker, but only for Seqera registry) + assert "oras://community.wave.seqera.io/library/sylph:0.6.1--b97274cdc1caa649" in result + assert "community.wave.seqera.io/library/sylph:0.6.1--a21713a57a65a373" not in result + assert "biocontainers/sylph:0.6.1--b97274cdc1caa649" in result + + # Verify that version strings are respected: Version 1.0.0 does not replace version 1.5.0 + assert "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6" in result + assert "community.wave.seqera.io/library/umi-transfer:1.5.0--73c1a6b65e5b0b81" in result + + # assert that the deduplication works + assert test_container.count("nf-core/ubuntu:22.04") == 3 + assert result.count("nf-core/ubuntu:22.04") == 1 + # # Tests for 'singularity_pull_image' # @@ -287,11 +422,30 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p "docker.io/bschiffthaler/sed", f"{tmp_dir}/sed.sif", None, "docker.io", mock_rich_progress ) + # Test successful pull with absolute oras:// URI + download_obj.singularity_pull_image( + "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6", + f"{tmp_dir}/umi-transfer-oras.sif", + None, + "docker.io", + mock_rich_progress, + ) + + # try pulling Docker container image with oras:// + with pytest.raises(ContainerError.NoSingularityContainerError): + download_obj.singularity_pull_image( + "oras://ghcr.io/matthiaszepper/umi-transfer:dev", + f"{tmp_dir}/umi-transfer-oras_impostor.sif", + None, + "docker.io", + mock_rich_progress, + ) + # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExistsError is raised before attempting to pull.) with pytest.raises(ContainerError.RegistryNotFoundError): download_obj.singularity_pull_image( "hello-world", - f"{tmp_dir}/hello-world_new.sif", + f"{tmp_dir}/break_the_registry_test.sif", None, "register-this-domain-to-break-the-test.io", mock_rich_progress, @@ -327,7 +481,7 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p with pytest.raises(ContainerError.InvalidTagError): download_obj.singularity_pull_image( "ewels/multiqc:go-rewrite", - f"{tmp_dir}/umi-transfer.sif", + f"{tmp_dir}/multiqc-go.sif", None, "ghcr.io", mock_rich_progress, @@ -376,10 +530,72 @@ def test_get_singularity_images(self, tmp_path, mock_fetch_wf_config): @mock.patch("os.symlink") @mock.patch("os.open") @mock.patch("os.close") - @mock.patch("re.sub") @mock.patch("os.path.basename") @mock.patch("os.path.dirname") def test_symlink_singularity_images( + self, + tmp_path, + mock_dirname, + mock_basename, + mock_close, + mock_open, + mock_symlink, + mock_makedirs, + ): + # Setup + mock_dirname.return_value = f"{tmp_path}/path/to" + mock_basename.return_value = "singularity-image.img" + mock_open.return_value = 12 # file descriptor + mock_close.return_value = 12 # file descriptor + + download_obj = DownloadWorkflow( + pipeline="dummy", + outdir=tmp_path, + container_library=( + "quay.io", + "community-cr-prod.seqera.io/docker/registry/v2", + "depot.galaxyproject.org/singularity", + ), + ) + + # Call the method + download_obj.symlink_singularity_images(f"{tmp_path}/path/to/singularity-image.img") + + # Check that os.makedirs was called with the correct arguments + mock_makedirs.assert_any_call(f"{tmp_path}/path/to", exist_ok=True) + + # Check that os.open was called with the correct arguments + mock_open.assert_any_call(f"{tmp_path}/path/to", os.O_RDONLY) + + # Check that os.symlink was called with the correct arguments + expected_calls = [ + mock.call( + "./singularity-image.img", + "./quay.io-singularity-image.img", + dir_fd=12, + ), + mock.call( + "./singularity-image.img", + "./community-cr-prod.seqera.io-docker-registry-v2-singularity-image.img", + dir_fd=12, + ), + mock.call( + "./singularity-image.img", + "./depot.galaxyproject.org-singularity-singularity-image.img", + dir_fd=12, + ), + ] + mock_symlink.assert_has_calls(expected_calls, any_order=True) + + @with_temporary_folder + @mock.patch("os.makedirs") + @mock.patch("os.symlink") + @mock.patch("os.open") + @mock.patch("os.close") + @mock.patch("re.sub") + @mock.patch("os.path.basename") + @mock.patch("os.path.dirname") + def test_symlink_singularity_images_registry( self, tmp_path, mock_dirname, @@ -400,23 +616,22 @@ def test_symlink_singularity_images( download_obj = DownloadWorkflow( pipeline="dummy", outdir=tmp_path, - container_library=("mirage-the-imaginative-registry.io", "quay.io"), + container_library=("quay.io", "community-cr-prod.seqera.io/docker/registry/v2"), ) - # Call the method + download_obj.registry_set = {"quay.io", "community-cr-prod.seqera.io/docker/registry/v2"} + + # Call the method with registry - should not happen, but preserve it then. download_obj.symlink_singularity_images(f"{tmp_path}/path/to/quay.io-singularity-image.img") print(mock_resub.call_args) # Check that os.makedirs was called with the correct arguments mock_makedirs.assert_any_call(f"{tmp_path}/path/to", exist_ok=True) - # Check that os.open was called with the correct arguments - mock_open.assert_called_once_with(f"{tmp_path}/path/to", os.O_RDONLY) - # Check that os.symlink was called with the correct arguments - mock_symlink.assert_any_call( + mock_symlink.assert_called_with( "./quay.io-singularity-image.img", - "./mirage-the-imaginative-registry.io-quay.io-singularity-image.img", + "./community-cr-prod.seqera.io-docker-registry-v2-singularity-image.img", dir_fd=12, ) # Check that there is no attempt to symlink to itself (test parameters would result in that behavior if not checked in the function) @@ -425,6 +640,10 @@ def test_symlink_singularity_images( not in mock_symlink.call_args_list ) + # Normally it would be called for each registry, but since quay.io is part of the name, it + # will only be called once, as no symlink to itself must be created. + mock_open.assert_called_once_with(f"{tmp_path}/path/to", os.O_RDONLY) + # # Test for gather_registries' # @@ -446,10 +665,16 @@ def test_gather_registries(self, tmp_path, mock_fetch_wf_config): download_obj.gather_registries(tmp_path) assert download_obj.registry_set assert isinstance(download_obj.registry_set, set) - assert len(download_obj.registry_set) == 6 + assert len(download_obj.registry_set) == 8 assert "quay.io" in download_obj.registry_set # default registry, if no container library is provided. - assert "depot.galaxyproject.org" in download_obj.registry_set # default registry, often hardcoded in modules + assert ( + "depot.galaxyproject.org/singularity" in download_obj.registry_set + ) # default registry, often hardcoded in modules + assert "community.wave.seqera.io/library" in download_obj.registry_set # Seqera containers Docker + assert ( + "community-cr-prod.seqera.io/docker/registry/v2" in download_obj.registry_set + ) # Seqera containers Singularity https:// download assert "apptainer-registry.io" in download_obj.registry_set assert "docker.io" in download_obj.registry_set assert "podman-registry.io" in download_obj.registry_set @@ -483,7 +708,14 @@ def test_singularity_image_filenames(self, tmp_path): download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) download_obj.outdir = tmp_path download_obj.container_cache_utilisation = "amend" - download_obj.registry_set = {"docker.io", "quay.io", "depot.galaxyproject.org"} + + download_obj.registry_set = { + "docker.io", + "quay.io", + "depot.galaxyproject.org/singularity", + "community.wave.seqera.io/library", + "community-cr-prod.seqera.io/docker/registry/v2", + } ## Test phase I: Container not yet cached, should be amended to cache # out_path: str, Path to cache @@ -501,11 +733,13 @@ def test_singularity_image_filenames(self, tmp_path): self.assertTrue(all((isinstance(element, str), element is None) for element in result)) # assert that the correct out_path is returned that points to the cache - assert result[0].endswith("/cachedir/singularity-bbmap-38.93--he522d1c_0.img") + assert result[0].endswith("/cachedir/bbmap-38.93--he522d1c_0.img") ## Test phase II: Test various container names # out_path: str, Path to cache # cache_path: None + + # Test --- mulled containers # result = download_obj.singularity_image_filenames( "quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:59cdd445419f14abac76b31dd0d71217994cbcc9-0" ) @@ -513,10 +747,33 @@ def test_singularity_image_filenames(self, tmp_path): "/cachedir/biocontainers-mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2-59cdd445419f14abac76b31dd0d71217994cbcc9-0.img" ) + # Test --- Docker containers without registry # result = download_obj.singularity_image_filenames("nf-core/ubuntu:20.04") assert result[0].endswith("/cachedir/nf-core-ubuntu-20.04.img") - ## Test phase III: Container wil lbe cached but also copied to out_path + # Test --- Docker container with explicit registry -> should be trimmed # + result = download_obj.singularity_image_filenames("docker.io/nf-core/ubuntu:20.04") + assert result[0].endswith("/cachedir/nf-core-ubuntu-20.04.img") + + # Test --- Docker container with explicit registry not in registry set -> can't be trimmed + result = download_obj.singularity_image_filenames("mirage-the-imaginative-registry.io/nf-core/ubuntu:20.04") + assert result[0].endswith("/cachedir/mirage-the-imaginative-registry.io-nf-core-ubuntu-20.04.img") + + # Test --- Seqera Docker containers: Trimmed, because it is hard-coded in the registry set. + result = download_obj.singularity_image_filenames( + "community.wave.seqera.io/library/coreutils:9.5--ae99c88a9b28c264" + ) + assert result[0].endswith("/cachedir/coreutils-9.5--ae99c88a9b28c264.img") + + # Test --- Seqera Singularity containers: Trimmed, because it is hard-coded in the registry set. + result = download_obj.singularity_image_filenames( + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" + ) + assert result[0].endswith( + "cachedir/blobs-sha256-c2-c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975-data.img" + ) + + ## Test phase III: Container will be cached but also copied to out_path # out_path: str, Path to cache # cache_path: str, Path to cache download_obj.container_cache_utilisation = "copy" @@ -525,8 +782,8 @@ def test_singularity_image_filenames(self, tmp_path): ) self.assertTrue(all(isinstance(element, str) for element in result)) - assert result[0].endswith("/singularity-images/singularity-bbmap-38.93--he522d1c_0.img") - assert result[1].endswith("/cachedir/singularity-bbmap-38.93--he522d1c_0.img") + assert result[0].endswith("/singularity-images/bbmap-38.93--he522d1c_0.img") + assert result[1].endswith("/cachedir/bbmap-38.93--he522d1c_0.img") ## Test phase IV: Expect an error if no NXF_SINGULARITY_CACHEDIR is defined os.environ["NXF_SINGULARITY_CACHEDIR"] = "" diff --git a/tests/pipelines/test_launch.py b/tests/pipelines/test_launch.py index d63e7b6dc5..ed23872f66 100644 --- a/tests/pipelines/test_launch.py +++ b/tests/pipelines/test_launch.py @@ -101,7 +101,12 @@ def test_ob_to_questionary_string(self): "default": "data/*{1,2}.fastq.gz", } result = self.launcher.single_param_to_questionary("input", sc_obj) - assert result == {"type": "input", "name": "input", "message": "", "default": "data/*{1,2}.fastq.gz"} + assert result == { + "type": "input", + "name": "input", + "message": "", + "default": "data/*{1,2}.fastq.gz", + } @mock.patch("questionary.unsafe_prompt", side_effect=[{"use_web_gui": "Web based"}]) def test_prompt_web_gui_true(self, mock_prompt): @@ -123,7 +128,8 @@ def test_launch_web_gui_missing_keys(self, mock_poll_nfcore_web_api): assert exc_info.value.args[0].startswith("Web launch response not recognised:") @mock.patch( - "nf_core.utils.poll_nfcore_web_api", side_effect=[{"api_url": "foo", "web_url": "bar", "status": "recieved"}] + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"api_url": "foo", "web_url": "bar", "status": "recieved"}], ) @mock.patch("webbrowser.open") @mock.patch("nf_core.utils.wait_cli_function") @@ -133,7 +139,10 @@ def test_launch_web_gui(self, mock_poll_nfcore_web_api, mock_webbrowser, mock_wa self.launcher.merge_nxf_flag_schema() assert self.launcher.launch_web_gui() is None - @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "error", "message": "foo"}]) + @mock.patch( + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"status": "error", "message": "foo"}], + ) def test_get_web_launch_response_error(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - status error""" with pytest.raises(AssertionError) as exc_info: @@ -147,12 +156,18 @@ def test_get_web_launch_response_unexpected(self, mock_poll_nfcore_web_api): self.launcher.get_web_launch_response() assert exc_info.value.args[0].startswith("Web launch GUI returned unexpected status (foo): ") - @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "waiting_for_user"}]) + @mock.patch( + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"status": "waiting_for_user"}], + ) def test_get_web_launch_response_waiting(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - status waiting_for_user""" assert self.launcher.get_web_launch_response() is False - @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "launch_params_complete"}]) + @mock.patch( + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"status": "launch_params_complete"}], + ) def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - complete, but missing keys""" with pytest.raises(AssertionError) as exc_info: @@ -185,11 +200,9 @@ def test_sanitise_web_response(self): self.launcher.get_pipeline_schema() self.launcher.nxf_flags["-name"] = "" self.launcher.schema_obj.input_params["igenomes_ignore"] = "true" - self.launcher.schema_obj.input_params["max_cpus"] = "12" self.launcher.sanitise_web_response() assert "-name" not in self.launcher.nxf_flags assert self.launcher.schema_obj.input_params["igenomes_ignore"] is True - assert self.launcher.schema_obj.input_params["max_cpus"] == 12 def test_ob_to_questionary_bool(self): """Check converting a python dict to a pyenquirer format - booleans""" @@ -262,7 +275,10 @@ def test_ob_to_questionary_enum(self): def test_ob_to_questionary_pattern(self): """Check converting a python dict to a questionary format - with pattern""" - sc_obj = {"type": "string", "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$"} + sc_obj = { + "type": "string", + "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$", + } result = self.launcher.single_param_to_questionary("email", sc_obj) assert result["type"] == "input" assert result["validate"]("test@email.com") is True @@ -282,7 +298,7 @@ def test_strip_default_params(self): assert self.launcher.schema_obj.input_params == {"input": "custom_input"} def test_build_command_empty(self): - """Test the functionality to build a nextflow command - nothing customsied""" + """Test the functionality to build a nextflow command - nothing customised""" self.launcher.get_pipeline_schema() self.launcher.merge_nxf_flag_schema() self.launcher.build_command() diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 9ca29d249f..ca7353d50d 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -48,7 +48,8 @@ def test_init_pipeline_lint(self): def test_load_lint_config_not_found(self): """Try to load a linting config file that doesn't exist""" assert self.lint_obj._load_lint_config() - assert self.lint_obj.lint_config == {} + assert self.lint_obj.lint_config is not None + assert self.lint_obj.lint_config.model_dump(exclude_none=True) == {} def test_load_lint_config_ignore_all_tests(self): """Try to load a linting config file that ignores all tests""" @@ -64,7 +65,8 @@ def test_load_lint_config_ignore_all_tests(self): # Load the new lint config file and check lint_obj._load_lint_config() - assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) + assert lint_obj.lint_config is not None + assert sorted(list(lint_obj.lint_config.model_dump(exclude_none=True))) == sorted(lint_obj.lint_tests) # Try running linting and make sure that all tests are ignored lint_obj._lint_pipeline() diff --git a/tests/pipelines/test_params_file.py b/tests/pipelines/test_params_file.py index 22a6182acd..0450d3f99d 100644 --- a/tests/pipelines/test_params_file.py +++ b/tests/pipelines/test_params_file.py @@ -1,79 +1,67 @@ import json -import os -import shutil -import tempfile from pathlib import Path -import nf_core.pipelines.create.create -import nf_core.pipelines.schema from nf_core.pipelines.params_file import ParamsFileBuilder +from ..test_pipelines import TestPipelines -class TestParamsFileBuilder: + +class TestParamsFileBuilder(TestPipelines): """Class for schema tests""" - @classmethod - def setup_class(cls): + def setUp(self): """Create a new PipelineSchema object""" - cls.schema_obj = nf_core.pipelines.schema.PipelineSchema() - cls.root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - - # Create a test pipeline in temp directory - cls.tmp_dir = tempfile.mkdtemp() - cls.template_dir = Path(cls.tmp_dir, "wf") - create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "a description", "Me", outdir=cls.template_dir, no_git=True - ) - create_obj.init_pipeline() - - cls.template_schema = Path(cls.template_dir, "nextflow_schema.json") - cls.params_template_builder = ParamsFileBuilder(cls.template_dir) - cls.invalid_template_schema = Path(cls.template_dir, "nextflow_schema_invalid.json") - - # Remove the allOf section to make the schema invalid - with open(cls.template_schema) as fh: - o = json.load(fh) - del o["allOf"] - - with open(cls.invalid_template_schema, "w") as fh: - json.dump(o, fh) - - @classmethod - def teardown_class(cls): - if Path(cls.tmp_dir).exists(): - shutil.rmtree(cls.tmp_dir) + super().setUp() + + self.template_schema = Path(self.pipeline_dir, "nextflow_schema.json") + self.params_template_builder = ParamsFileBuilder(self.pipeline_dir) + self.outfile = Path(self.pipeline_dir, "params-file.yml") def test_build_template(self): - outfile = Path(self.tmp_dir, "params-file.yml") - self.params_template_builder.write_params_file(str(outfile)) + self.params_template_builder.write_params_file(self.outfile) - assert outfile.exists() + assert self.outfile.exists() - with open(outfile) as fh: + with open(self.outfile) as fh: out = fh.read() assert "nf-core/testpipeline" in out - def test_build_template_invalid_schema(self, caplog): + def test_build_template_invalid_schema(self): """Build a schema from a template""" - outfile = Path(self.tmp_dir, "params-file-invalid.yml") - builder = ParamsFileBuilder(self.invalid_template_schema) - res = builder.write_params_file(str(outfile)) + schema = {} + with open(self.template_schema) as fh: + schema = json.load(fh) + del schema["allOf"] + + with open(self.template_schema, "w") as fh: + json.dump(schema, fh) + + builder = ParamsFileBuilder(self.template_schema) + res = builder.write_params_file(self.outfile) assert res is False - assert "Pipeline schema file is invalid" in caplog.text + assert "Pipeline schema file is invalid" in self.caplog.text - def test_build_template_file_exists(self, caplog): + def test_build_template_file_exists(self): """Build a schema from a template""" # Creates a new empty file - outfile = Path(self.tmp_dir) / "params-file.yml" - with open(outfile, "w"): - pass + self.outfile.touch() - res = self.params_template_builder.write_params_file(outfile) + res = self.params_template_builder.write_params_file(self.outfile) assert res is False - assert f"File '{outfile}' exists!" in caplog.text + assert f"File '{self.outfile}' exists!" in self.caplog.text + + self.outfile.unlink() - outfile.unlink() + def test_build_template_content(self): + """Test that the content of the params file is correct""" + self.params_template_builder.write_params_file(self.outfile) + + with open(self.outfile) as fh: + out = fh.read() + + assert "nf-core/testpipeline" in out + assert "# input = null" in out diff --git a/tests/pipelines/test_rocrate.py b/tests/pipelines/test_rocrate.py new file mode 100644 index 0000000000..01a77ecd76 --- /dev/null +++ b/tests/pipelines/test_rocrate.py @@ -0,0 +1,127 @@ +"""Test the nf-core pipelines rocrate command""" + +import shutil +import tempfile +from pathlib import Path + +import git +import rocrate.rocrate +from git import Repo + +import nf_core.pipelines.create +import nf_core.pipelines.create.create +import nf_core.pipelines.rocrate +import nf_core.utils + +from ..test_pipelines import TestPipelines + + +class TestROCrate(TestPipelines): + """Class for lint tests""" + + def setUp(self) -> None: + super().setUp() + # add fake metro map + Path(self.pipeline_dir, "docs", "images", "nf-core-testpipeline_metro_map.png").touch() + # commit the changes + repo = Repo(self.pipeline_dir) + repo.git.add(A=True) + repo.index.commit("Initial commit") + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir) + + def tearDown(self): + """Clean up temporary files and folders""" + + if self.tmp_dir.exists(): + shutil.rmtree(self.tmp_dir) + + def test_rocrate_creation(self): + """Run the nf-core rocrate command""" + + # Run the command + self.rocrate_obj + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) + + # Check that the crate was created + self.assertTrue(Path(self.pipeline_dir, "ro-crate-metadata.json").exists()) + + # Check that the entries in the crate are correct + crate = rocrate.rocrate.ROCrate(self.pipeline_dir) + entities = crate.get_entities() + + # Check if the correct entities are set: + for entity in entities: + entity_json = entity.as_jsonld() + if entity_json["@id"] == "./": + self.assertEqual(entity_json.get("name"), "nf-core/testpipeline") + self.assertEqual(entity_json["mainEntity"], {"@id": "main.nf"}) + elif entity_json["@id"] == "#main.nf": + self.assertEqual(entity_json["programmingLanguage"], [{"@id": "#nextflow"}]) + self.assertEqual(entity_json["image"], [{"@id": "nf-core-testpipeline_metro_map.png"}]) + # assert there is a metro map + # elif entity_json["@id"] == "nf-core-testpipeline_metro_map.png": # FIXME waiting for https://github.com/ResearchObject/ro-crate-py/issues/174 + # self.assertEqual(entity_json["@type"], ["File", "ImageObject"]) + # assert that author is set as a person + elif "name" in entity_json and entity_json["name"] == "Test McTestFace": + self.assertEqual(entity_json["@type"], "Person") + # check that it is set as author of the main entity + if crate.mainEntity is not None: + self.assertEqual(crate.mainEntity["author"][0].id, entity_json["@id"]) + + def test_rocrate_creation_wrong_pipeline_dir(self): + """Run the nf-core rocrate command with a wrong pipeline directory""" + # Run the command + + # Check that it raises a UserWarning + with self.assertRaises(UserWarning): + nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir / "bad_dir") + + # assert that the crate was not created + self.assertFalse(Path(self.pipeline_dir / "bad_dir", "ro-crate-metadata.json").exists()) + + def test_rocrate_creation_with_wrong_version(self): + """Run the nf-core rocrate command with a pipeline version""" + # Run the command + + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir, version="1.0.0") + + # Check that the crate was created + with self.assertRaises(SystemExit): + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) + + def test_rocrate_creation_without_git(self): + """Run the nf-core rocrate command with a pipeline version""" + + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir, version="1.0.0") + # remove git repo + shutil.rmtree(self.pipeline_dir / ".git") + # Check that the crate was created + with self.assertRaises(SystemExit): + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) + + def test_rocrate_creation_to_zip(self): + """Run the nf-core rocrate command with a zip output""" + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, zip_path=self.pipeline_dir) + # Check that the crate was created + self.assertTrue(Path(self.pipeline_dir, "ro-crate.crate.zip").exists()) + + def test_rocrate_creation_for_fetchngs(self): + """Run the nf-core rocrate command with nf-core/fetchngs""" + tmp_dir = Path(tempfile.mkdtemp()) + # git clone nf-core/fetchngs + git.Repo.clone_from("https://github.com/nf-core/fetchngs", tmp_dir / "fetchngs") + # Run the command + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(tmp_dir / "fetchngs", version="1.12.0") + assert self.rocrate_obj.create_rocrate(tmp_dir / "fetchngs", self.pipeline_dir) + + # Check that Sateesh Peri is mentioned in creator field + + crate = rocrate.rocrate.ROCrate(self.pipeline_dir) + entities = crate.get_entities() + for entity in entities: + entity_json = entity.as_jsonld() + if entity_json["@id"] == "#main.nf": + assert "https://orcid.org/0000-0002-9879-9070" in entity_json["creator"] + + # Clean up + shutil.rmtree(tmp_dir) diff --git a/tests/pipelines/test_schema.py b/tests/pipelines/test_schema.py index 2abaf07bd2..ab543d8b90 100644 --- a/tests/pipelines/test_schema.py +++ b/tests/pipelines/test_schema.py @@ -49,7 +49,7 @@ def test_load_lint_schema(self): self.schema_obj.load_lint_schema() def test_load_lint_schema_nofile(self): - """Check that linting raises properly if a non-existant file is given""" + """Check that linting raises properly if a non-existent file is given""" with pytest.raises(RuntimeError): self.schema_obj.get_schema_path("fake_file") diff --git a/tests/pipelines/test_sync.py b/tests/pipelines/test_sync.py index ffbe75510b..8bf8a3c4ec 100644 --- a/tests/pipelines/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -56,6 +56,8 @@ def mocked_requests_get(url) -> MockResponse: for branch_no in range(3, 7) ] return MockResponse(response_data, 200, url) + if url == "https://nf-co.re/pipelines.json": + return MockResponse({"remote_workflows": [{"name": "testpipeline", "topics": ["test", "pipeline"]}]}, 200, url) return MockResponse([{"html_url": url}], 404, url) @@ -398,3 +400,33 @@ def test_reset_target_dir_fake_branch(self): with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.reset_target_dir() assert exc_info.value.args[0].startswith("Could not reset to original branch `fake_branch`") + + def test_sync_no_changes(self): + """Test pipeline sync when no changes are needed""" + with mock.patch("requests.get", side_effect=mocked_requests_get), mock.patch( + "requests.post", side_effect=mocked_requests_post + ) as mock_post: + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) + + # Mock that no changes were made + psync.made_changes = False + + # Run sync + psync.sync() + + # Verify no PR was created + mock_post.assert_not_called() + + def test_sync_no_github_token(self): + """Test sync fails appropriately when GitHub token is missing""" + # Ensure GitHub token is not set + if "GITHUB_AUTH_TOKEN" in os.environ: + del os.environ["GITHUB_AUTH_TOKEN"] + + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir, make_pr=True) + psync.made_changes = True # Force changes to trigger PR attempt + + # Run sync and check for appropriate error + with self.assertRaises(nf_core.pipelines.sync.PullRequestExceptionError) as exc_info: + psync.sync() + self.assertIn("GITHUB_AUTH_TOKEN not set!", str(exc_info.exception)) diff --git a/tests/subworkflows/test_lint.py b/tests/subworkflows/test_lint.py index 56574b865c..d94b55b3d3 100644 --- a/tests/subworkflows/test_lint.py +++ b/tests/subworkflows/test_lint.py @@ -63,6 +63,14 @@ def test_subworkflows_lint_multiple_remotes(self): assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0 + def test_subworkflows_lint_update_meta_yml(self): + """update the meta.yml of a subworkflow""" + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules, fix=True) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + def test_subworkflows_lint_snapshot_file(self): """Test linting a subworkflow with a snapshot file""" subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) diff --git a/tests/subworkflows/test_patch.py b/tests/subworkflows/test_patch.py new file mode 100644 index 0000000000..5bb6a6798e --- /dev/null +++ b/tests/subworkflows/test_patch.py @@ -0,0 +1,307 @@ +import os +import tempfile +from pathlib import Path +from unittest import mock + +import pytest + +import nf_core.components.components_command +import nf_core.components.patch +import nf_core.subworkflows + +from ..test_subworkflows import TestSubworkflows +from ..utils import GITLAB_REPO, GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + +OLD_SHA = "dbb12457e32d3da8eea7dc4ae096201fff4747c5" +SUCCEED_SHA = "0a33e6a0d730ad22a0ec9f7f9a7540af6e943221" +FAIL_SHA = "b6e5e8739de9a1a0c4f85267144e43dbaf8f1461" + + +class TestSubworkflowsPatch(TestSubworkflows): + """ + Test the 'nf-core subworkflows patch' command + """ + + def modify_main_nf(self, path): + """Modify a file to test patch creation""" + with open(path) as fh: + lines = fh.readlines() + # We want a patch file that looks something like: + # - ch_fasta // channel: [ fasta ] + for line_index in range(len(lines)): + if lines[line_index] == " ch_fasta // channel: [ fasta ]\n": + to_pop = line_index + lines.pop(to_pop) + with open(path, "w") as fh: + fh.writelines(lines) + + def setup_patch(self, pipeline_dir, modify_subworkflow): + # Install the subworkflow bam_sort_stats_samtools + install_obj = nf_core.subworkflows.SubworkflowInstall( + pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, + sha=OLD_SHA, + ) + + # Install the module + install_obj.install("bam_sort_stats_samtools") + + if modify_subworkflow: + # Modify the subworkflow + subworkflow_path = Path(pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + self.modify_main_nf(subworkflow_path / "main.nf") + + def test_create_patch_no_change(self): + """Test creating a patch when there is a change to the module""" + self.setup_patch(self.pipeline_dir, False) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + with pytest.raises(UserWarning): + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Check that no patch file has been added to the directory + assert not (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + def test_create_patch_change(self): + """Test creating a patch when there is no change to the subworkflow""" + self.setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + # Check that the correct lines are in the patch file + with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: + patch_lines = fh.readlines() + print(patch_lines) + subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) + assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" + assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ fasta ]\n" in patch_lines + + def test_create_patch_try_apply_successful(self): + """Test creating a patch file and applying it to a new version of the the files""" + self.setup_patch(self.pipeline_dir, True) + subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + update_obj = nf_core.subworkflows.SubworkflowUpdate( + self.pipeline_dir, sha=OLD_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files("bam_sort_stats_samtools", OLD_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" + patch_relpath = subworkflow_relpath / "bam_sort_stats_samtools.diff" + assert ( + update_obj.try_apply_patch( + "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir + ) + is True + ) + + # Move the files from the temporary directory + update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, GITLAB_REPO, OLD_SHA) + + # Check that a patch file with the correct name has been created + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + # Check that the correct lines are in the patch file + with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: + patch_lines = fh.readlines() + subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) + assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" + assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ fasta ]\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(subworkflow_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # These lines should have been removed by the patch + assert "- ch_fasta // channel: [ fasta ]\n" not in main_nf_lines + + def test_create_patch_try_apply_failed(self): + """Test creating a patch file and applying it to a new version of the the files""" + self.setup_patch(self.pipeline_dir, True) + subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + update_obj = nf_core.subworkflows.SubworkflowUpdate( + self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files("bam_sort_stats_samtools", FAIL_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" + patch_relpath = subworkflow_relpath / "bam_sort_stats_samtools.diff" + assert ( + update_obj.try_apply_patch( + "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir + ) + is False + ) + + def test_create_patch_update_success(self): + """ + Test creating a patch file and the updating the subworkflow + + Should have the same effect as 'test_create_patch_try_apply_successful' + but uses higher level api + """ + self.setup_patch(self.pipeline_dir, True) + swf_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + patch_fn = "bam_sort_stats_samtools.diff" + # Check that a patch file with the correct name has been created + assert (swf_path / patch_fn).exists() + + # Check the 'modules.json' contains a patch file for the subworkflow + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) == Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools", patch_fn) + + # Update the subworkflow + update_obj = nf_core.subworkflows.update.SubworkflowUpdate( + self.pipeline_dir, + sha=OLD_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, + ) + assert update_obj.update("bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert (swf_path / patch_fn).exists() + + # Check the 'modules.json' contains a patch file for the subworkflow + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) == Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools", patch_fn), modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) + + # Check that the correct lines are in the patch file + with open(swf_path / patch_fn) as fh: + patch_lines = fh.readlines() + swf_relpath = swf_path.relative_to(self.pipeline_dir) + assert f"--- {swf_relpath / 'main.nf'}\n" in patch_lines + assert f"+++ {swf_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ fasta ]\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(swf_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # this line should have been removed by the patch + assert " ch_fasta // channel: [ fasta ]\n" not in main_nf_lines + + def test_create_patch_update_fail(self): + """ + Test creating a patch file and updating a subworkflow when there is a diff conflict + """ + self.setup_patch(self.pipeline_dir, True) + swf_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + patch_fn = "bam_sort_stats_samtools.diff" + # Check that a patch file with the correct name has been created + assert (swf_path / patch_fn).exists() + + # Check the 'modules.json' contains a patch file for the subworkflow + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) == Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools", patch_fn) + + # Save the file contents for downstream comparison + with open(swf_path / patch_fn) as fh: + patch_contents = fh.read() + + update_obj = nf_core.subworkflows.update.SubworkflowUpdate( + self.pipeline_dir, + sha=FAIL_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, + ) + update_obj.update("bam_sort_stats_samtools") + + # Check that the installed files have not been affected by the attempted patch + temp_dir = Path(tempfile.mkdtemp()) + nf_core.components.components_command.ComponentCommand( + "subworkflows", self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH + ).install_component_files("bam_sort_stats_samtools", FAIL_SHA, update_obj.modules_repo, temp_dir) + + temp_module_dir = temp_dir / "bam_sort_stats_samtools" + for file in os.listdir(temp_module_dir): + assert file in os.listdir(swf_path) + with open(swf_path / file) as fh: + installed = fh.read() + with open(temp_module_dir / file) as fh: + shouldbe = fh.read() + assert installed == shouldbe + + # Check that the patch file is unaffected + with open(swf_path / patch_fn) as fh: + new_patch_contents = fh.read() + assert patch_contents == new_patch_contents + + def test_remove_patch(self): + """Test creating a patch when there is no change to the subworkflow""" + self.setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + with mock.patch.object(nf_core.components.patch.questionary, "confirm") as mock_questionary: + mock_questionary.unsafe_ask.return_value = True + patch_obj.remove("bam_sort_stats_samtools") + # Check that the diff file has been removed + assert not (subworkflow_path / "bam_sort_stats_samtools.diff").exists() diff --git a/tests/subworkflows/test_update.py b/tests/subworkflows/test_update.py index 423eb516bc..63cc7c0efc 100644 --- a/tests/subworkflows/test_update.py +++ b/tests/subworkflows/test_update.py @@ -99,7 +99,7 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): with open(patch_path) as fh: line = fh.readline() assert line.startswith( - "Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" + "Changes in component 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" ) def test_install_at_hash_and_update_and_save_diff_limit_output(self): diff --git a/tests/test_cli.py b/tests/test_cli.py index 026efd1e6a..8df1e210b0 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -167,7 +167,7 @@ def test_cli_download(self, mock_dl): "compress": "tar.gz", "force": None, "platform": None, - "download-configuration": None, + "download-configuration": "yes", "tag": "3.12=testing", "container-system": "singularity", "container-library": "quay.io", @@ -188,7 +188,7 @@ def test_cli_download(self, mock_dl): params["compress"], "force" in params, "platform" in params, - "download-configuration" in params, + params["download-configuration"], (params["tag"],), params["container-system"], (params["container-library"],), @@ -358,7 +358,7 @@ def test_schema_lint(self, mock_get_schema_path): with open("nextflow_schema.json", "w") as f: f.write("{}") self.invoke_cli(cmd) - mock_get_schema_path.assert_called_with("nextflow_schema.json") + mock_get_schema_path.assert_called_with(Path("nextflow_schema.json")) @mock.patch("nf_core.pipelines.schema.PipelineSchema.get_schema_path") def test_schema_lint_filename(self, mock_get_schema_path): @@ -368,7 +368,7 @@ def test_schema_lint_filename(self, mock_get_schema_path): with open("some_other_filename", "w") as f: f.write("{}") self.invoke_cli(cmd) - mock_get_schema_path.assert_called_with("some_other_filename") + mock_get_schema_path.assert_called_with(Path("some_other_filename")) @mock.patch("nf_core.pipelines.create_logo.create_logo") def test_create_logo(self, mock_create_logo): diff --git a/tests/test_modules.py b/tests/test_modules.py index 0e16497176..d0692236e8 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -7,7 +7,7 @@ import pytest import requests_cache import responses -import yaml +import ruamel.yaml import nf_core.modules import nf_core.modules.create @@ -16,6 +16,7 @@ import nf_core.modules.remove import nf_core.pipelines.create.create from nf_core import __version__ +from nf_core.pipelines.lint_utils import run_prettier_on_file from nf_core.utils import NFCoreYamlConfig from .utils import ( @@ -28,11 +29,15 @@ create_tmp_pipeline, mock_anaconda_api_calls, mock_biocontainers_api_calls, + mock_biotools_api_calls, ) def create_modules_repo_dummy(tmp_dir): """Create a dummy copy of the nf-core/modules repo""" + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.indent(mapping=2, sequence=2, offset=0) root_dir = Path(tmp_dir, "modules") Path(root_dir, "modules", "nf-core").mkdir(parents=True) @@ -42,13 +47,14 @@ def create_modules_repo_dummy(tmp_dir): nf_core_yml = NFCoreYamlConfig(nf_core_version=__version__, repository_type="modules", org_path="nf-core") with open(Path(root_dir, ".nf-core.yml"), "w") as fh: yaml.dump(nf_core_yml.model_dump(), fh) - # mock biocontainers and anaconda response + # mock biocontainers and anaconda response and biotools response with responses.RequestsMock() as rsps: mock_anaconda_api_calls(rsps, "bpipe", "0.9.13--hdfd78af_0") mock_biocontainers_api_calls(rsps, "bpipe", "0.9.13--hdfd78af_0") + mock_biotools_api_calls(rsps, "bpipe") # bpipe is a valid package on bioconda that is very unlikely to ever be added to nf-core/modules module_create = nf_core.modules.create.ModuleCreate( - root_dir, "bpipe/test", "@author", "process_single", False, False + root_dir, "bpipe/test", "@author", "process_single", True, False ) with requests_cache.disabled(): assert module_create.create() @@ -57,10 +63,11 @@ def create_modules_repo_dummy(tmp_dir): meta_yml_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "meta.yml") with open(str(meta_yml_path)) as fh: - meta_yml = yaml.safe_load(fh) + meta_yml = yaml.load(fh) del meta_yml["tools"][0]["bpipe"]["doi"] with open(str(meta_yml_path), "w") as fh: yaml.dump(meta_yml, fh) + run_prettier_on_file(fh.name) # Add dummy content to main.nf.test.snap test_snap_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap") diff --git a/tests/test_pipelines.py b/tests/test_pipelines.py index 656ccbef55..455b2b71c2 100644 --- a/tests/test_pipelines.py +++ b/tests/test_pipelines.py @@ -1,6 +1,8 @@ import shutil from unittest import TestCase +import pytest + from nf_core.utils import Pipeline from .utils import create_tmp_pipeline @@ -24,3 +26,7 @@ def _make_pipeline_copy(self): new_pipeline = self.tmp_dir / "nf-core-testpipeline-copy" shutil.copytree(self.pipeline_dir, new_pipeline) return new_pipeline + + @pytest.fixture(autouse=True) + def _use_caplog(self, caplog): + self.caplog = caplog diff --git a/tests/test_utils.py b/tests/test_utils.py index bde561d95e..b13c8eb37d 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -113,7 +113,7 @@ def test_pip_package_pass(self): @mock.patch("requests.get") def test_pip_package_timeout(self, mock_get): """Tests the PyPi connection and simulates a request timeout, which should - return in an addiional warning in the linting""" + return in an additional warning in the linting""" # Define the behaviour of the request get mock mock_get.side_effect = requests.exceptions.Timeout() # Now do the test diff --git a/tests/utils.py b/tests/utils.py index 9a661c5927..cffe8ba103 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -100,13 +100,22 @@ def mock_biocontainers_api_calls(rsps: responses.RequestsMock, module: str, vers rsps.get(biocontainers_api_url, json=biocontainers_mock, status=200) +def mock_biotools_api_calls(rsps: responses.RequestsMock, module: str) -> None: + """Mock biotools api calls for module""" + biotools_api_url = f"https://bio.tools/api/t/?q={module}&format=json" + biotools_mock = { + "list": [{"name": "Bpipe", "biotoolsCURIE": "biotools:bpipe"}], + } + rsps.get(biotools_api_url, json=biotools_mock, status=200) + + def create_tmp_pipeline(no_git: bool = False) -> Tuple[Path, Path, str, Path]: """Create a new Pipeline for testing""" tmp_dir = Path(tempfile.TemporaryDirectory().name) root_repo_dir = Path(__file__).resolve().parent.parent template_dir = root_repo_dir / "nf_core" / "pipeline-template" - pipeline_name = "mypipeline" + pipeline_name = "testpipeline" pipeline_dir = tmp_dir / pipeline_name pipeline_dir.mkdir(parents=True) @@ -116,7 +125,7 @@ def create_tmp_pipeline(no_git: bool = False) -> Tuple[Path, Path, str, Path]: org_path="nf-core", lint=None, template=NFCoreTemplateConfig( - name="mypipeline", + name="testpipeline", author="me", description="it is mine", org="nf-core", From 1ecb3ec15299d3f581e7691423d19cd337c6ef69 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Cavalcante?= Date: Mon, 16 Dec 2024 10:54:08 -0300 Subject: [PATCH 58/60] refact: Use same org_path as modulesrepo first (#22) * refact: Use same org_path as modulesrepo first * refact: Change current_remote to a modulesrepo instance * refact: Try using modulesrepo in get_comps * refact: Try using utility functions to grab the yml * fix: Restore old regex behaviour * refact: Use parent path instead of regex in comp_utils * refact: Use modulesrepo for current_repo in install * refact: Move constants to new file and use MRepo Moves constants from component_utils to constants.py so that ModulesRepo can be used from inside of component_utils, thereby avoiding a circular import * Revert "refact: Use modulesrepo for current_repo in install" This reverts commit f4cc0fe7ae8bdb3b7c857ec59cf6c63f74b14507. * Reapply "refact: Use modulesrepo for current_repo in install" This reverts commit c5bebcf418ee641b9f149dde4ca73c49f8580337. * Revert "refact: Move constants to new file and use MRepo" This reverts commit c0a7a007ae15597bfd60304628e90d9c2b372a60. * refact: Change constants to diff file, use MRepo * fix: Change import in test_update --- nf_core/__main__.py | 2 +- nf_core/components/components_utils.py | 24 ++++++------------------ nf_core/components/constants.py | 4 ++++ nf_core/components/info.py | 2 +- nf_core/components/install.py | 14 ++++++++------ nf_core/components/update.py | 17 +++++++---------- nf_core/modules/modules_json.py | 3 ++- nf_core/modules/modules_repo.py | 3 +-- nf_core/synced_repo.py | 2 +- tests/modules/test_modules_json.py | 2 +- tests/modules/test_update.py | 2 +- tests/subworkflows/test_update.py | 2 +- 12 files changed, 34 insertions(+), 43 deletions(-) create mode 100644 nf_core/components/constants.py diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 9f16188e95..06d4be8f75 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -54,7 +54,7 @@ subworkflows_test, subworkflows_update, ) -from nf_core.components.components_utils import NF_CORE_MODULES_REMOTE +from nf_core.components.constants import NF_CORE_MODULES_REMOTE from nf_core.pipelines.download import DownloadError from nf_core.utils import check_if_outdated, nfcore_logo, rich_force_colors, setup_nfcore_dir diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 3acacb4fe4..be28a0d870 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -1,25 +1,18 @@ import logging import re from pathlib import Path -from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union +from typing import Dict, List, Optional, Tuple, Union import questionary import requests import rich.prompt import yaml -if TYPE_CHECKING: - from nf_core.modules.modules_repo import ModulesRepo - import nf_core.utils +from nf_core.modules.modules_repo import ModulesRepo log = logging.getLogger(__name__) -# Constants for the nf-core/modules repo used throughout the module files -NF_CORE_MODULES_NAME = "nf-core" -NF_CORE_MODULES_REMOTE = "https://github.com/nf-core/modules.git" -NF_CORE_MODULES_DEFAULT_BRANCH = "master" - def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[Path, Optional[str], str]: """ @@ -181,20 +174,15 @@ def get_components_to_install( for component in components: if isinstance(component, dict): component_name = list(component.keys())[0].lower() + branch = component[component_name].get("branch") git_remote = component[component_name]["git_remote"] - org_path_match = re.search(r"(?:https://|git@)[\w\.]+[:/](.*?)/", git_remote) - if org_path_match: - org_path = org_path_match.group(1) - else: - raise UserWarning( - f"The organisation path of {component_name} could not be established from '{git_remote}'" - ) + modules_repo = ModulesRepo(git_remote, branch=branch) current_comp_dict = subworkflows if component_name in subworkflows else modules component_dict = { - "org_path": org_path, + "org_path": modules_repo.repo_path, "git_remote": git_remote, - "branch": component[component_name].get("branch"), + "branch": branch, } current_comp_dict[component_name].update(component_dict) diff --git a/nf_core/components/constants.py b/nf_core/components/constants.py new file mode 100644 index 0000000000..cc155f3d58 --- /dev/null +++ b/nf_core/components/constants.py @@ -0,0 +1,4 @@ +# Constants for the nf-core/modules repo used throughout the module files +NF_CORE_MODULES_NAME = "nf-core" +NF_CORE_MODULES_REMOTE = "https://github.com/nf-core/modules.git" +NF_CORE_MODULES_DEFAULT_BRANCH = "master" diff --git a/nf_core/components/info.py b/nf_core/components/info.py index 31769785a1..a8cb5a0fa1 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -15,7 +15,7 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand -from nf_core.components.components_utils import NF_CORE_MODULES_REMOTE +from nf_core.components.constants import NF_CORE_MODULES_REMOTE from nf_core.modules.modules_json import ModulesJson log = logging.getLogger(__name__) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index f40b0712bf..d45b4d2c33 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -15,10 +15,12 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand from nf_core.components.components_utils import ( - NF_CORE_MODULES_NAME, get_components_to_install, prompt_component_version_sha, ) +from nf_core.components.constants import ( + NF_CORE_MODULES_NAME, +) from nf_core.modules.modules_json import ModulesJson from nf_core.modules.modules_repo import ModulesRepo @@ -39,7 +41,7 @@ def __init__( installed_by: Optional[List[str]] = None, ): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) - self.current_remote = remote_url + self.current_remote = ModulesRepo(remote_url, branch) self.branch = branch self.force = force self.prompt = prompt @@ -53,15 +55,15 @@ def __init__( def install(self, component: Union[str, Dict[str, str]], silent: bool = False) -> bool: if isinstance(component, dict): # Override modules_repo when the component to install is a dependency from a subworkflow. - remote_url = component.get("git_remote", self.current_remote) + remote_url = component.get("git_remote", self.current_remote.remote_url) branch = component.get("branch", self.branch) self.modules_repo = ModulesRepo(remote_url, branch) component = component["name"] if self.current_remote is None: - self.current_remote = self.modules_repo.remote_url + self.current_remote = self.modules_repo - if self.current_remote == self.modules_repo.remote_url and self.sha is not None: + if self.current_remote.remote_url == self.modules_repo.remote_url and self.sha is not None: self.current_sha = self.sha else: self.current_sha = None @@ -244,7 +246,7 @@ def collect_and_verify_name( raise ValueError - if self.current_remote == modules_repo.remote_url: + if self.current_remote.remote_url == modules_repo.remote_url: if not modules_repo.component_exists(component, self.component_type, commit=self.current_sha): warn_msg = f"{self.component_type[:-1].title()} '{component}' not found in remote '{modules_repo.remote_url}' ({modules_repo.branch})" log.warning(warn_msg) diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 1e80b05e3a..7c61b6b00e 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -1,6 +1,5 @@ import logging import os -import re import shutil import tempfile from pathlib import Path @@ -42,7 +41,7 @@ def __init__( limit_output=False, ): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) - self.current_remote = remote_url + self.current_remote = ModulesRepo(remote_url, branch) self.branch = branch self.force = force self.prompt = prompt @@ -97,7 +96,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr """ if isinstance(component, dict): # Override modules_repo when the component to install is a dependency from a subworkflow. - remote_url = component.get("git_remote", self.current_remote) + remote_url = component.get("git_remote", self.current_remote.remote_url) branch = component.get("branch", self.branch) self.modules_repo = ModulesRepo(remote_url, branch) component = component["name"] @@ -892,7 +891,7 @@ def get_components_to_update(self, component): if self.component_type == "modules": # All subworkflow names in the installed_by section of a module are subworkflows using this module # We need to update them too - git_remote = self.current_remote + git_remote = self.current_remote.remote_url for subworkflow in installed_by: if subworkflow != component: for remote_url, content in mods_json["repos"].items(): @@ -965,9 +964,7 @@ def update_linked_components( def manage_changes_in_linked_components(self, component, modules_to_update, subworkflows_to_update): """Check for linked components added or removed in the new subworkflow version""" if self.component_type == "subworkflows": - org_path_match = re.search(r"(?:https://|git@)[\w\.]+[:/](.*?)/", self.current_remote) - if org_path_match: - org_path = org_path_match.group(1) + org_path = self.current_remote.repo_path subworkflow_directory = Path(self.directory, self.component_type, org_path, component) included_modules, included_subworkflows = get_components_to_install(subworkflow_directory) @@ -991,7 +988,7 @@ def manage_changes_in_linked_components(self, component, modules_to_update, subw # If a new module/subworkflow is included in the subworklfow and wasn't included before for module in included_modules: module_name = module["name"] - module["git_remote"] = module.get("git_remote", self.current_remote) + module["git_remote"] = module.get("git_remote", self.current_remote.remote_url) module["branch"] = module.get("branch", self.branch) if module_name not in modules_to_update: log.info(f"Installing newly included module '{module_name}' for '{component}'") @@ -999,7 +996,7 @@ def manage_changes_in_linked_components(self, component, modules_to_update, subw install_module_object.install(module, silent=True) for subworkflow in included_subworkflows: subworkflow_name = subworkflow["name"] - subworkflow["git_remote"] = subworkflow.get("git_remote", self.current_remote) + subworkflow["git_remote"] = subworkflow.get("git_remote", self.current_remote.remote_url) subworkflow["branch"] = subworkflow.get("branch", self.branch) if subworkflow_name not in subworkflows_to_update: log.info(f"Installing newly included subworkflow '{subworkflow_name}' for '{component}'") @@ -1022,4 +1019,4 @@ def _reset_component_type(self, original_component_type, original_update_all): self.modules_json.pipeline_components = None self.update_all = original_update_all if self.current_remote is None: - self.current_remote = self.modules_repo.remote_url + self.current_remote = self.modules_repo diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index a9ba3b442c..f53097935a 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -15,7 +15,8 @@ from typing_extensions import NotRequired, TypedDict # for py<3.11 import nf_core.utils -from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE, get_components_to_install +from nf_core.components.components_utils import get_components_to_install +from nf_core.components.constants import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.modules.modules_repo import ModulesRepo from nf_core.pipelines.lint_utils import dump_json_with_prettier diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 357fc49cc5..30a724d736 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -10,9 +10,8 @@ import rich.prompt from git.exc import GitCommandError, InvalidGitRepositoryError -import nf_core.modules.modules_json import nf_core.modules.modules_utils -from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE +from nf_core.components.constants import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.synced_repo import RemoteProgressbar, SyncedRepo from nf_core.utils import NFCORE_CACHE_DIR, NFCORE_DIR, load_tools_config diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index dd61b72a2b..efb7a8e03e 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -9,7 +9,7 @@ import git from git.exc import GitCommandError -from nf_core.components.components_utils import ( +from nf_core.components.constants import ( NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE, ) diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py index 325a8073b7..029eb32ccd 100644 --- a/tests/modules/test_modules_json.py +++ b/tests/modules/test_modules_json.py @@ -3,7 +3,7 @@ import shutil from pathlib import Path -from nf_core.components.components_utils import ( +from nf_core.components.constants import ( NF_CORE_MODULES_DEFAULT_BRANCH, NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE, diff --git a/tests/modules/test_update.py b/tests/modules/test_update.py index 6c8eacc666..807f67cb81 100644 --- a/tests/modules/test_update.py +++ b/tests/modules/test_update.py @@ -8,7 +8,7 @@ import yaml import nf_core.utils -from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE +from nf_core.components.constants import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.modules.install import ModuleInstall from nf_core.modules.modules_json import ModulesJson from nf_core.modules.patch import ModulePatch diff --git a/tests/subworkflows/test_update.py b/tests/subworkflows/test_update.py index 63cc7c0efc..b540d35556 100644 --- a/tests/subworkflows/test_update.py +++ b/tests/subworkflows/test_update.py @@ -8,7 +8,7 @@ import yaml import nf_core.utils -from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE +from nf_core.components.constants import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.modules.modules_json import ModulesJson from nf_core.modules.update import ModuleUpdate from nf_core.subworkflows.install import SubworkflowInstall From fd98f956af55c70579a9b6a3f04f410131e13287 Mon Sep 17 00:00:00 2001 From: jvfe Date: Mon, 16 Dec 2024 11:04:21 -0300 Subject: [PATCH 59/60] Squashed commit of the following: MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit 769f8ddda11f5beded6baf83713b0cac79a2b333 Merge: 5ac5767a d44bcdf7 Author: Júlia Mir Pedrol Date: Mon Dec 16 14:23:39 2024 +0100 Merge pull request #3358 from mirpedrol/fix-pre-commit-template Fix pre commit template commit d44bcdf7e2065bd3ac281b27bcb33800e9af602d Author: mirpedrol Date: Mon Dec 16 13:06:36 2024 +0100 more prettier fixes commit 2c78c5ebc5fec773649e1f00ec5cc71f4c0f06a0 Author: mirpedrol Date: Mon Dec 16 12:28:59 2024 +0100 more template prittier fixing commit d19e3e071e7a792354221664f0b0b0ba4e409625 Author: mirpedrol Date: Mon Dec 16 12:11:49 2024 +0100 don't try running pre-commit if code-linters or github are not used in the template commit 13e553fca62efd06c810363625787396fe0e387c Author: mirpedrol Date: Mon Dec 16 12:08:20 2024 +0100 fix more prettier modifications form the template commit ac703bfed375f14c1c00798a9eba7260b0c57355 Author: mirpedrol Date: Mon Dec 16 11:58:43 2024 +0100 don't create rocrate file when we skip the feature commit 709e67ff4e395419b3de91dcf1b9b242f7689241 Author: mirpedrol Date: Mon Dec 16 11:46:35 2024 +0100 Remove Dumpler from ruamel yaml commit 0c4b0c0cf1f1cf110ab9a4b9695b098232e85d62 Merge: 2b2b4359 5ac5767a Author: Júlia Mir Pedrol Date: Mon Dec 16 11:35:31 2024 +0100 Merge branch 'dev' into fix-pre-commit-template commit 2b2b4359a9af852e8204d223659ae79b946a4864 Author: mirpedrol Date: Mon Dec 16 11:34:30 2024 +0100 prettify template and printing .nf-core.yml commit 5ac5767a4e901887d22c8cdaa090a19e282d7fc7 Merge: a8f63cd3 76ac617b Author: Matthias Zepper <6963520+MatthiasZepper@users.noreply.github.com> Date: Fri Dec 13 19:49:50 2024 +0100 Merge pull request #3351 from MatthiasZepper/no_env_in_download_action Replace Github environment usage in Download Action commit 76ac617b291caa37a53017c8758a0dc938f4c8b1 Author: Matthias Zepper Date: Fri Dec 13 14:11:56 2024 +0100 Remove endraw statement that 'nf-core pipelines' create does not like. commit 1fbd0119b561d32dfef7c6cbc618567ff8feb95c Author: nf-core-bot Date: Fri Dec 13 12:44:24 2024 +0000 [automated] Update CHANGELOG.md commit 9f61e8584ffc2f7a8417c226c04d8e15fc4dc2a4 Author: mirpedrol Date: Fri Dec 13 13:30:05 2024 +0100 run pre-commit from a git repo commit f6159c99490b7530bf3cbe081c78d71a828482a2 Author: mirpedrol Date: Fri Dec 13 13:29:17 2024 +0100 don't run prettier on ro-crate json file commit a8f63cd360b7ad18893c89140c01db765862c107 Merge: 6eaa9980 4db108c9 Author: Sateesh_Peri <33637490+sateeshperi@users.noreply.github.com> Date: Fri Dec 13 17:58:57 2024 +0530 Merge pull request #3357 from nf-core/usage-typo-fix Add missing p commit 059473c2e138aecfb451f2f848265767761d798a Author: mirpedrol Date: Fri Dec 13 13:21:09 2024 +0100 fix pre-commit linting failures commit 4db108c90e3b3816f2237669505f628e7461ee9d Author: nf-core-bot Date: Fri Dec 13 12:08:36 2024 +0000 [automated] Update CHANGELOG.md commit edd29e0fc02d9d34528a48c244c2b18880acb94b Author: James A. Fellows Yates Date: Fri Dec 13 13:07:30 2024 +0100 Add missing p commit ac26924b6b5b849180eb4a6ca76ee9268805ef24 Merge: f02d6bb5 6eaa9980 Author: Matthias Zepper <6963520+MatthiasZepper@users.noreply.github.com> Date: Fri Dec 13 11:10:17 2024 +0100 Merge branch 'dev' into no_env_in_download_action commit 6eaa9980a4cdd0ca752c29ceca62240438482e4e Merge: 06980d49 a2c2339d Author: Júlia Mir Pedrol Date: Fri Dec 13 11:07:58 2024 +0100 Merge pull request #3356 from mirpedrol/fix-include-modules-config fix including modules.config commit f02d6bb5f96cee457a2fa2526808875349c6afb5 Author: Júlia Mir Pedrol Date: Thu Dec 12 11:14:52 2024 +0100 Update nf_core/pipeline-template/.github/workflows/awsfulltest.yml commit e32293d1508ae1590183a16b70e9aedbc964ee59 Author: mashehu Date: Tue Dec 10 16:01:04 2024 +0100 update changelog commit 4c32e4795214f847760a4e91482f682fa769dd5f Author: mashehu Date: Tue Dec 10 16:00:03 2024 +0100 fix if clause commit 4958cf0135b6257d14bd4ef7c84c0e21f12497dc Author: mashehu Date: Tue Dec 10 15:40:28 2024 +0100 Be more verbose in approval check action commit a8065da8a87bcc888530e03bb645d12909803f96 Author: Matthias Hörtenhuber Date: Thu Dec 12 10:59:54 2024 +0100 Revert "don't set up loggin level to error [no changelog]" commit 060eff51c3562fef14f188678f2a8eea2a8a05a7 Author: Matthias Zepper <6963520+MatthiasZepper@users.noreply.github.com> Date: Fri Dec 13 10:58:08 2024 +0100 Apply suggestions from code review Co-authored-by: Matthias Hörtenhuber commit a2c2339d4a2e89215b25e7f549721bc94c23f566 Author: mirpedrol Date: Fri Dec 13 10:47:18 2024 +0100 revert #3301 until we add linting from #2508 commit eed09ba43a0b85924bb3f8526f02dbdf12d85588 Author: nf-core-bot Date: Fri Dec 13 09:38:20 2024 +0000 [automated] Update CHANGELOG.md commit 4cf860785d5735d4cd5b026b93653e67533b88c9 Author: mirpedrol Date: Fri Dec 13 10:36:28 2024 +0100 fix including modules.config commit 06980d49cdb60cecff9315a998d7939c0ba30b21 Merge: aef5e166 8c5855f1 Author: Júlia Mir Pedrol Date: Fri Dec 13 09:38:59 2024 +0100 Merge pull request #3354 from mashehu/fix-ro-crate-in-bump-version Fix ro crate in bump version commit 8c5855f19a12bad41d8db15dc9238b6e89d634d2 Author: Júlia Mir Pedrol Date: Fri Dec 13 08:19:50 2024 +0000 get max of 100 branch names commit b1ebca0ab010a2af1448ccef1100e7c57b9ffb41 Merge: 307d0d4b aef5e166 Author: Júlia Mir Pedrol Date: Fri Dec 13 08:53:30 2024 +0100 Merge branch 'dev' into fix-ro-crate-in-bump-version commit aef5e1669abd3f06d266f58ec3411e5748d95784 Merge: 3c20e003 d65da79d Author: Júlia Mir Pedrol Date: Fri Dec 13 08:46:32 2024 +0100 Merge pull request #3353 from mashehu/bump-version-3_1_1dev bump version to 3.1.1dev commit 307d0d4b91729ba9d708f6fbd761414559c45e45 Author: mashehu Date: Thu Dec 12 16:09:53 2024 +0100 only bump ro-crate if it already exists commit d65da79dc24adb9c36f6d114eef9279f4c55b07c Author: mashehu Date: Thu Dec 12 15:04:39 2024 +0100 bump version to 3.1.1dev commit 62ebfc1af318b3bdc749fe75ad1bce407273ae54 Author: Matthias Zepper Date: Thu Dec 12 11:35:57 2024 +0100 Replace Github environment usage in Download Action. commit 3c20e003e87c46d451c66534d1f1baeb11efc675 Merge: f77737da c2a9dee1 Author: Júlia Mir Pedrol Date: Thu Dec 12 11:30:04 2024 +0100 Merge pull request #3338 from mashehu/more-verbose-awsfulltest-checks Be more verbose in approval check action commit c2a9dee17bc981f0b20bcea7dc167a620d409bb9 Author: Júlia Mir Pedrol Date: Thu Dec 12 11:14:52 2024 +0100 Update nf_core/pipeline-template/.github/workflows/awsfulltest.yml commit f77737da918a2bbc120690c27cdac5f66095fab8 Merge: d56b528f 8f00659c Author: Matthias Hörtenhuber Date: Thu Dec 12 11:12:01 2024 +0100 Merge pull request #3350 from nf-core/revert-3349-sync-debugging commit 8f00659ca4c54144eee4415a98dedad1eaf87d67 Author: Matthias Hörtenhuber Date: Thu Dec 12 10:59:54 2024 +0100 Revert "don't set up loggin level to error [no changelog]" commit d56b528f29351fb78b9ae65dc27fefd8df6cd816 Merge: 1af4c615 e92f327c Author: Júlia Mir Pedrol Date: Thu Dec 12 10:21:37 2024 +0100 Merge pull request #3349 from mirpedrol/sync-debugging don't set up loggin level to error [no changelog] commit e92f327cdc75e93253524b4e1c277e9ea8d91149 Author: mirpedrol Date: Thu Dec 12 10:00:43 2024 +0100 don't set up loggin level to error commit 1af4c6156e23c4ef3202aeaa1b5c45ec4addb7b8 Merge: 380be6c2 be38c157 Author: Matthias Hörtenhuber Date: Thu Dec 12 09:24:47 2024 +0100 Merge pull request #3347 from mashehu/specify-ref-in-sync-yml commit be38c157038786f6e1ae60ce82ffa666674d9d0e Author: mashehu Date: Wed Dec 11 20:06:58 2024 +0100 use same ref in checkout as triggered the workflow commit 380be6c2c20cd8306bd73089b3999bea21440e89 Author: mirpedrol Date: Wed Dec 11 16:10:05 2024 +0100 cleanup debugging echo commit 3858e38a5410684bfd58a1f6a27bcc4dc65fa81a Author: mirpedrol Date: Wed Dec 11 16:03:12 2024 +0100 try exiting pipeline directory before saving defaultBranch commit 1de1034c0888f515bc26c32559707b24e835890c Author: mirpedrol Date: Wed Dec 11 15:34:37 2024 +0100 echo message to sync GHA commit e8b6a9f5b52ec0f71f8c2204ca7a0f0428033c41 Merge: a4965f3e df5b3233 Author: Júlia Mir Pedrol Date: Wed Dec 11 14:58:34 2024 +0100 Merge pull request #3345 from mirpedrol/sync-default-branch add more debugging on sync GHA [no changelog] commit df5b3233a38b6732b9f45946fbe6493b8bc73151 Author: mirpedrol Date: Wed Dec 11 14:51:17 2024 +0100 add more debugging on sync GHA commit a4965f3e3892c0610c9553146bdd9a46fed60ef1 Merge: 06db03f3 6148983c Author: Júlia Mir Pedrol Date: Wed Dec 11 12:12:13 2024 +0100 Merge pull request #3342 from mirpedrol/sync-default-branch go back to original directory after configuring git defaultBranch [no changelog] commit 6148983c8469149cda6e834998cc01c78102f418 Author: mirpedrol Date: Wed Dec 11 11:44:22 2024 +0100 go back to original directory after configuring git defaultBranch commit 06db03f30e970733f25f215dfceb333d188bc609 Merge: 1a221067 379bad0e Author: Júlia Mir Pedrol Date: Wed Dec 11 11:13:59 2024 +0100 Merge pull request #3339 from mashehu/add-verbose-mode-to-sync Add verbose mode to sync action commit 379bad0e97a6a1ec407cd9a0705cf5be06557873 Author: Júlia Mir Pedrol Date: Wed Dec 11 10:03:37 2024 +0000 add input debug for workflow_dispatch sync action commit bb3a25294af0326cbbc81857f623bca50a12acc8 Author: Júlia Mir Pedrol Date: Wed Dec 11 10:42:54 2024 +0100 Apply suggestions from code review commit 5c8937e38b406f5748ebf3ed936de9b1ed0ddaa6 Author: mashehu Date: Tue Dec 10 17:19:32 2024 +0100 update changelog commit fa4e07424bf60a42559f4fa756495d7f326eea3c Author: mashehu Date: Tue Dec 10 17:17:02 2024 +0100 add verbose mode to sync action commit 174861324f70c243c71484d3216c5edfb8f78013 Author: mashehu Date: Tue Dec 10 16:01:04 2024 +0100 update changelog commit f7ced6bcd6e07baf89e6ee7f67d7f8f8c532ab0e Author: mashehu Date: Tue Dec 10 16:00:03 2024 +0100 fix if clause commit 0b7fe501992f288c8e5164f46ac8fd63d95ddb99 Author: mashehu Date: Tue Dec 10 15:40:28 2024 +0100 Be more verbose in approval check action commit 1a22106771d3c6b468e4d3cfb2046157188564cb Author: mashehu Date: Tue Dec 10 15:17:06 2024 +0100 fix Changelog commit 15b044d1b3a1a29b2406d42b5c791fd40e96ce9b Author: mashehu Date: Tue Dec 10 15:15:26 2024 +0100 change working dir when setting defaultBranch commit 7a69b8cec1115281c1cee10003203104d40e4cb1 Merge: 666fc9f5 0e7c8701 Author: mashehu Date: Tue Dec 10 14:44:40 2024 +0100 Merge branch 'dev' of github.com:nf-core/tools into fix-sync-grep commit 666fc9f522f8dc94d314758a36bbd260271df7b1 Author: mashehu Date: Tue Dec 10 14:44:12 2024 +0100 fix path for nextflow.config grep commit 0e7c87015e8e72fff540cc76809512a122644df9 Merge: a7917071 5ee2d52d Author: Matthias Hörtenhuber Date: Tue Dec 10 14:33:03 2024 +0100 Merge pull request #3337 from mashehu/fix-init-branch-in-sync-action set git defaultBranch to master in sync action commit 5ee2d52d5ef6a2ffe6e6970b65fbde7e7f7539c8 Author: mashehu Date: Tue Dec 10 14:30:14 2024 +0100 use nextflow.config to set defaultBranch commit 60ae1afaa7e568806602f579ee63577f5c0e4832 Author: mashehu Date: Tue Dec 10 13:26:07 2024 +0100 update changelog commit de00162797ddd833a592b918e6405d7f87d26f15 Author: mashehu Date: Tue Dec 10 13:20:46 2024 +0100 set git defaultBranch to master in sync action commit a7917071da2f4170204aa1962f799a8c9104b992 Merge: 05e7b950 47054ad1 Author: Matthias Hörtenhuber Date: Tue Dec 10 12:44:04 2024 +0100 Merge pull request #3335 from mashehu/set-default-branch-to-master set default_branch to master for now commit 47054ad1500238aa7561e7e6cee812d0d5d489c1 Merge: 7110fff5 05e7b950 Author: Matthias Hörtenhuber Date: Tue Dec 10 12:25:27 2024 +0100 Merge branch 'dev' into set-default-branch-to-master commit 7110fff5287cdeb2e5e769d7cf18bd725436f0ad Author: mashehu Date: Tue Dec 10 12:20:01 2024 +0100 update changelog commit e9cde5910a6ebd3e8bccef91a84ee9a47a56381a Author: mashehu Date: Tue Dec 10 12:17:54 2024 +0100 set default_branch to master for now commit 05e7b9503312b8987a7b178d38dd944a05d0d040 Merge: 55ca4d48 953055d3 Author: Matthias Hörtenhuber Date: Tue Dec 10 11:38:06 2024 +0100 Merge pull request #3334 from mashehu/update-crate-with-version-bump rocrate: Update crate with version bump and handle new contributor field commit 953055d337811a6c45db9f49b71008485e51e6b0 Author: mashehu Date: Tue Dec 10 11:22:01 2024 +0100 fix type error commit 8a78d4bcf489c1073ca4376a93c763e4c4927154 Author: Matthias Hörtenhuber Date: Tue Dec 10 10:51:44 2024 +0100 Update nf_core/pipelines/rocrate.py commit b976ec32a49f17e51784c425b2fc1a49be3a3307 Author: mashehu Date: Tue Dec 10 10:16:12 2024 +0100 update changelog commit 6ae3de544a77354c2276d4be16863b65ccaaa823 Author: mashehu Date: Tue Dec 10 10:14:47 2024 +0100 handle new author field commit 3e81adb62de4649c17a0c7233c8d3feec77e82e8 Author: mashehu Date: Tue Dec 10 10:13:51 2024 +0100 update version in ro crate on version bump commit 55ca4d4808b8527e15a11ee05cec2325d46b3a13 Merge: 5497cacb 0e62e6ed Author: Matthias Hörtenhuber Date: Mon Dec 9 18:57:09 2024 +0100 Merge pull request #3333 from mashehu/fix-duplicated-ro-crate-entries commit 0e62e6edcf77f46ff34d640b04019d781828ade2 Author: mashehu Date: Mon Dec 9 18:13:37 2024 +0100 update changelog commit 3c4fa811d521fe4090b2aca78154471d707ee58d Author: mashehu Date: Mon Dec 9 18:12:08 2024 +0100 rocrate: remove duplicated entries for name and version commit 5497cacb1e0ddcf9a7a73b7cb92276904a7abe4b Merge: 4ea867a4 045aaec4 Author: Júlia Mir Pedrol Date: Mon Dec 9 17:30:06 2024 +0100 Merge pull request #3332 from mashehu/fix-gitpod-yml Don't break gitpod.yml with template string commit 045aaec42944e2ef7dfd2d1d6e2fcd1a6775d320 Author: mashehu Date: Mon Dec 9 17:11:04 2024 +0100 fix template string commit 5b1cf24f7fc9548928221f914253570c7238b63d Author: Matthias Hörtenhuber Date: Mon Dec 9 17:09:32 2024 +0100 Update nf_core/pipeline-template/.gitpod.yml Co-authored-by: Júlia Mir Pedrol commit 15880be40d065d4d89dd2c2c65513713a029289b Author: mashehu Date: Mon Dec 9 16:54:17 2024 +0100 make prettier happy commit 337143daba96d73a78ff2c324c2ff38381f60e4f Author: mashehu Date: Mon Dec 9 16:43:57 2024 +0100 update changelog commit 5bc9952308c1e99ec24ee92f4b29cec52399fca7 Merge: 03854903 77ccc6a3 Author: mashehu Date: Mon Dec 9 16:41:43 2024 +0100 Merge branch 'fix-gitpod-yml' of github.com:mashehu/tools into fix-gitpod-yml commit 038549034dadeed8835719b893429cd8b29a80a3 Author: mashehu Date: Mon Dec 9 16:41:31 2024 +0100 don't break gitpod.yml with template string commit 4ea867a4e7732c72fb6f5cff6f4ec3fd62ecce5a Merge: 3f85556c 8c9888bf Author: Matthias Hörtenhuber Date: Mon Dec 9 16:09:09 2024 +0100 Merge pull request #3330 from mashehu/skip-GRCh37 Template: Remove mention of GRCh37 if igenomes is skipped commit 3f85556c65a8122b40a5acc3043bc1a255f89671 Merge: 7586248b a59e929b Author: Matthias Hörtenhuber Date: Mon Dec 9 16:02:06 2024 +0100 Merge pull request #3331 from mirpedrol/revert-textual-dev-1.7.0 Revert textual dev 1.7.0 [no changelog] commit 8c9888bf893ac17fa943c2998a09a7e24c619d56 Merge: 1da1fc60 7586248b Author: mashehu Date: Mon Dec 9 15:51:16 2024 +0100 Merge branch 'dev' of github.com:nf-core/tools into skip-GRCh37 # Conflicts: # nf_core/pipeline-template/.gitpod.yml commit a59e929bd677e4437ba59fcf8ac3d921b0570fd2 Author: mirpedrol Date: Mon Dec 9 15:44:24 2024 +0100 revert updating textual-dev to 1.7.0 commit 1da1fc600271fccdccc41b4c1cb8f5724a85c148 Author: mashehu Date: Mon Dec 9 15:27:29 2024 +0100 update changelog commit e073622e8bdfb1879eb38fc981cf931152322aac Author: mashehu Date: Mon Dec 9 15:23:56 2024 +0100 add missing closing tag commit 76ac145edee3cc5ee8e60c3959dba41e296d66ab Author: mashehu Date: Mon Dec 9 15:23:20 2024 +0100 remove mention of GRCh37 from template if igenomes is skipped closes #3322 commit 7586248b02c7f03fb510ff962b0074bd98404393 Merge: edbeee36 afdb0128 Author: Matthias Hörtenhuber Date: Mon Dec 9 13:52:54 2024 +0100 Merge pull request #3329 from mashehu/bump-version-3_1_0 Bump version 3.1.0 commit afdb012820ffbb07c037d36da0233e30502624a5 Author: Júlia Mir Pedrol Date: Mon Dec 9 12:29:16 2024 +0000 add highlights and version updates sections to changelog commit ae1066c79dccda7068b957360234feecdfe0d5c4 Author: mashehu Date: Mon Dec 9 13:10:57 2024 +0100 bump to 3.1.0 commit edbeee366f0407cb43b5580d86af04a3bce26f4b Merge: 7e30b29f ee12866a Author: Júlia Mir Pedrol Date: Mon Dec 9 12:58:26 2024 +0100 Merge pull request #3328 from mirpedrol/update-template-components Update template components commit 7e30b29fa51ff4329caa754ff59ea8a608d4d657 Merge: 99dba72b 4ab2637d Author: Júlia Mir Pedrol Date: Mon Dec 9 12:51:02 2024 +0100 Merge pull request #3308 from nf-core/renovate/textual-dev-1.x Update dependency textual-dev to v1.7.0 commit ee12866a96d2b4038be1765034f0d7511cb25be0 Author: nf-core-bot Date: Mon Dec 9 11:45:14 2024 +0000 [automated] Update CHANGELOG.md commit f2090961118d94c5be201b9e6bbbf03899ba920f Author: mirpedrol Date: Mon Dec 9 12:43:50 2024 +0100 fix jinja template missing endif commit 69f00bc5003830677c42bceab34a60aec56e7365 Author: mirpedrol Date: Mon Dec 9 12:40:42 2024 +0100 update utils_nfcore_pipeline swf commit 4ab2637dd0dad73ba4d3574acd0e3fb9dc0aca0e Merge: c5ffcf87 99dba72b Author: Júlia Mir Pedrol Date: Mon Dec 9 12:34:23 2024 +0100 Merge branch 'dev' into renovate/textual-dev-1.x commit 99dba72b19b5ee18a135c810c7beb66e54dfc351 Merge: 98285e34 e5f0bd35 Author: Júlia Mir Pedrol Date: Mon Dec 9 12:31:05 2024 +0100 Merge pull request #3292 from nf-core/vs-code-tests build: Setup VS Code tests commit e5f0bd35c18d94f05f28327af37bb77206e32c93 Merge: a39ed597 98285e34 Author: Júlia Mir Pedrol Date: Mon Dec 9 12:04:05 2024 +0100 Merge branch 'dev' into vs-code-tests commit 98285e3480cd68b5d9309bdcbd3ba4a1bb8950c6 Merge: 582774ba 7a03d907 Author: Matthias Hörtenhuber Date: Mon Dec 9 12:03:18 2024 +0100 Merge pull request #3323 from mashehu/fix-api-doc-headers fix headers in api docs commit 582774baedefe239f5d35ea6bde8af4e3aa8de72 Merge: b14b5048 9f4036c5 Author: Júlia Mir Pedrol Date: Mon Dec 9 12:03:04 2024 +0100 Merge pull request #3304 from nf-core/remove-channel-operations-from-oncomplete Remove toList() channel operation from inside onComplete block commit 9f4036c5125ea364e06a90cef4eb0c69e0eec113 Merge: f22d7d9e b14b5048 Author: Júlia Mir Pedrol Date: Mon Dec 9 11:06:25 2024 +0100 Merge branch 'dev' into remove-channel-operations-from-oncomplete commit 7a03d907f6cdf8d75344edbbf2a141854346da11 Author: nf-core-bot Date: Mon Dec 9 09:47:45 2024 +0000 [automated] Fix code linting commit efdced268bddc3e217045c978d42efaf6e640d0d Merge: f54e1234 b14b5048 Author: Matthias Hörtenhuber Date: Mon Dec 9 10:44:03 2024 +0100 Merge branch 'dev' into fix-api-doc-headers commit b14b50487d16ec2a21dd92a2548b7890b26743b8 Merge: 0450503e 9c0f2acb Author: Phil Ewels Date: Sat Dec 7 08:10:25 2024 +0100 Merge pull request #3326 from nf-core/modules-outside-nf-core-too commit 0450503e049d884ac4242a598ee46342a359c93c Merge: b2132cf1 cc38f8d7 Author: lmReef <60915116+lmReef@users.noreply.github.com> Date: Sat Dec 7 10:47:24 2024 +1300 Merge pull request #3317 from lmReef/fix-linting-error Fix meta_yml linting error commit cc38f8d75b664141596be4e7289c94bdaf1c8119 Merge: 3cc7646c b2132cf1 Author: lmReef <60915116+lmReef@users.noreply.github.com> Date: Sat Dec 7 10:27:12 2024 +1300 Merge branch 'dev' into fix-linting-error commit b2132cf116e26925668c44d02daf5d44cb6f7395 Merge: 0ba1c700 af05e4ac Author: Matthias Hörtenhuber Date: Fri Dec 6 22:24:03 2024 +0100 Merge pull request #3321 from lmReef/fix-module-section-regex commit af05e4ac3aff1b8ae3b38485c05fac66fe49c1ce Merge: faded6a9 0ba1c700 Author: lmReef <60915116+lmReef@users.noreply.github.com> Date: Sat Dec 7 10:04:17 2024 +1300 Merge branch 'dev' into fix-module-section-regex commit 3cc7646ce4f2ac3a5e2b3eeb7b4ad681cbb88aa8 Merge: c5dc6ac7 0ba1c700 Author: lmReef <60915116+lmReef@users.noreply.github.com> Date: Sat Dec 7 10:04:15 2024 +1300 Merge branch 'dev' into fix-linting-error commit 9c0f2acb3ed544130b2ab71f12f64ee1eeb93fcb Merge: ba447fbf 0ba1c700 Author: Phil Ewels Date: Fri Dec 6 19:25:24 2024 +0100 Merge branch 'dev' into modules-outside-nf-core-too commit 0ba1c7007490f10a0f985a5d70fef474b58f0cac Author: nf-core-bot Date: Fri Dec 6 13:55:42 2024 +0000 [automated] Update CHANGELOG.md commit 6a59a9749450bcf796aab68506302145e5f93efd Author: Mahesh Binzer-Panchal Date: Fri Dec 6 13:50:30 2024 +0000 Update gitpod vscode extensions to use nf-core extension pack commit ba447fbf7027312d7562ac533b9ca26ff68574fe Author: nf-core-bot Date: Thu Dec 5 22:57:35 2024 +0000 [automated] Update CHANGELOG.md commit da16adb69b352d9a2e5120137a5c2ac0be88264a Author: Phil Ewels Date: Thu Dec 5 23:56:27 2024 +0100 Make prompt less nf-core specific commit c5dc6ac70825396bc7b54232c2115db25cd76efc Merge: c3fd9bb0 978f8656 Author: lmReef <60915116+lmReef@users.noreply.github.com> Date: Fri Dec 6 08:45:34 2024 +1300 Merge branch 'dev' into fix-linting-error commit faded6a9f52f7b8a1e331388dde35a87239a3d5b Merge: c22c571c 978f8656 Author: lmReef <60915116+lmReef@users.noreply.github.com> Date: Fri Dec 6 08:45:21 2024 +1300 Merge branch 'dev' into fix-module-section-regex commit 978f86564b74fa924dc9a576b2158c8b503313ab Merge: e8510ccf c03f2128 Author: Matthias Hörtenhuber Date: Thu Dec 5 19:49:45 2024 +0100 Merge pull request #3325 from nf-core/renovate/astral-sh-ruff-pre-commit-0.x commit c03f21288a0ff05dc42f2b5dddf135531564fbed Author: nf-core-bot Date: Thu Dec 5 16:52:26 2024 +0000 [automated] Update CHANGELOG.md commit 2c5932e66e2cdbf85728fb0ec03c695e5aebdb8a Author: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Date: Thu Dec 5 16:49:54 2024 +0000 Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.2 commit f54e1234e9451afbafecb26f124261ba8bc3977b Author: nf-core-bot Date: Thu Dec 5 16:04:21 2024 +0000 [automated] Update CHANGELOG.md commit e8510ccf4438d7c1e01045a3b090d7f7ebc65703 Merge: f4b5b5d0 ce866c62 Author: Júlia Mir Pedrol Date: Thu Dec 5 16:47:32 2024 +0100 Merge pull request #3142 from mashehu/master-to-main Allow `main` as default channel commit f4b5b5d00764ba05b07f9c2cca894b5140b67264 Merge: a9d97e4f c23af55f Author: Matthias Hörtenhuber Date: Thu Dec 5 16:36:36 2024 +0100 Merge pull request #3298 from nf-core/subworkflow-messages Improve subworkflow nf-test migration warning commit 6ddfe4d35b4a159dd5f18a12c77c6761ec66068f Author: mashehu Date: Thu Dec 5 16:30:44 2024 +0100 fix headers commit c23af55faa65b9f4bc6735917bdcbb77acdc500e Author: nf-core-bot Date: Thu Dec 5 15:19:54 2024 +0000 [automated] Update CHANGELOG.md commit a9d97e4f296551fd0c7b9e071a0990007b3b3770 Author: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Date: Thu Dec 5 12:16:57 2024 +0000 Update python:3.12-slim Docker digest to 2b00791 (#3319) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> commit ce866c62c141ce1e0126dad9942f879e8bea3f89 Author: Júlia Mir Pedrol Date: Thu Dec 5 11:55:09 2024 +0100 Remove not needed setup-java step commit c22c571cff747b1fb47ae11d3dc1fa76e14a9a2f Author: lmReef Date: Thu Dec 5 10:14:01 2024 +1300 Revert "[automated] Update CHANGELOG.md" This reverts commit 355dff2c8e8fa28b628459bab65efeadf3793dfc. commit 9996df9e061a7512f6cce1d34fc0e8f7a937df95 Author: lmReef Date: Thu Dec 5 10:11:58 2024 +1300 update changelog commit 355dff2c8e8fa28b628459bab65efeadf3793dfc Author: nf-core-bot Date: Wed Dec 4 21:10:36 2024 +0000 [automated] Update CHANGELOG.md commit 57a92a0eff27c3570ae2cfcf73a765a66ed3c541 Author: lmReef Date: Thu Dec 5 09:34:08 2024 +1300 fix: module section linter regex commit c3fd9bb084c86e1204e9d4cbeebef413444b1834 Author: lmReef Date: Thu Dec 5 08:59:07 2024 +1300 fix: add explicit str type to self.process_name commit f22d7d9e03f33c078967e38bc7174e6a5de6547c Author: Rob Syme Date: Wed Dec 4 08:09:57 2024 -0500 Revert changes to nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf commit b94435b92891b2cee0a78c47890b1d7349d30376 Author: Júlia Mir Pedrol Date: Wed Dec 4 09:48:13 2024 +0000 remove too many raw/endraw in branch.yml commit 4a8e97631b6164b92f039de74e462cd9b9dd94ee Author: lmReef Date: Wed Dec 4 10:41:19 2024 +1300 update changelog entry with link commit d3d9c72d7a20b617e417a8e0ba22ad9b29944d07 Author: lmReef Date: Wed Dec 4 10:29:10 2024 +1300 update changelog commit b04351db539fc3962b816595ee7ab9eae3a83bd3 Author: lmReef Date: Wed Dec 4 09:54:53 2024 +1300 fix: python linting warn commit dc9c3b1763d3f41ae42a4c97a405ba9bfad61878 Author: lmReef Date: Wed Dec 4 09:43:11 2024 +1300 fix: linting error in meta_yml where module.process_name is always "" commit 229a2329dfefc5981897006c546a00d61da587da Author: mashehu Date: Tue Dec 3 18:14:33 2024 +0100 disambiguate in more places commit 2e4a8ede4b1f7af1efc003cd0ce8524b002e8612 Author: mashehu Date: Tue Dec 3 16:50:53 2024 +0100 use contect to figure out if it master or main Co-authored-by: @mirpredrol commit 9c43f15ea589c17374b593dabcf6dea621c07e64 Author: mashehu Date: Tue Dec 3 15:36:20 2024 +0100 avoid confusion by separating the string commit 14480d280739292c69c32bc2dd0f1f0127abe136 Author: mashehu Date: Tue Dec 3 15:26:31 2024 +0100 use java v17 commit 1857ebf51658a0100d70fa324303b6e4fa3adb12 Author: mashehu Date: Tue Dec 3 14:46:55 2024 +0100 add missing setup-java action commit 8bc1a307d1ef53a9311f0ce5e44064217f2e1006 Author: mashehu Date: Tue Dec 3 14:43:11 2024 +0100 nope, setup-java it is commit a604cb5595e9f93059d58ba1e6a9d613430803b6 Author: mashehu Date: Tue Dec 3 14:37:17 2024 +0100 set the variable manually commit 2475ad1604a73aaff7901ce8edb99312cd408298 Merge: 4b2338ff a7351a24 Author: Júlia Mir Pedrol Date: Tue Dec 3 13:35:46 2024 +0000 Merge branch 'dev' of https://github.com/nf-core/tools into remove-channel-operations-from-oncomplete commit 4b2338ff161a1ddea799c8ab5270e1cefe0f7da0 Author: Júlia Mir Pedrol Date: Tue Dec 3 13:32:17 2024 +0000 update modules and subworkflows commit 00b8ae0da46a7562be763c9efa6b82c696a362ba Author: mashehu Date: Tue Dec 3 14:18:06 2024 +0100 pin java version 21 in CI commit e9ed94ec0f8ece41b406e2f15cd1d7089bc54621 Author: mashehu Date: Tue Dec 3 14:08:04 2024 +0100 set defaultBranch in nextflow config to allow `main` commit 831548e31b0771eb2e1e0eaade2a6a46562ad554 Merge: 73957fd1 a7351a24 Author: Matthias Hörtenhuber Date: Tue Dec 3 13:55:21 2024 +0100 Merge branch 'dev' into master-to-main commit 73957fd113da99ec85f7c54dfe71a93007308b54 Merge: bd9608d6 41b4516e Author: mashehu Date: Tue Dec 3 13:54:11 2024 +0100 Merge branch 'main' of github.com:nf-core/tools into master-to-main # Conflicts: # .github/RELEASE_CHECKLIST.md # .github/workflows/branch.yml # .github/workflows/pytest.yml # nf_core/pipeline-template/README.md # nf_core/pipeline-template/assets/schema_input.json # nf_core/pipeline-template/nextflow_schema.json commit bd9608d688f722d17398d8a7fe1d5dfeaeed02be Author: Matthias Hörtenhuber Date: Tue Dec 3 13:38:33 2024 +0100 Update nf_core/components/create.py commit a39ed5974bcc4ee454023990d3908d99c19b5ec9 Merge: 35eb958d a7351a24 Author: Júlia Mir Pedrol Date: Tue Dec 3 13:27:47 2024 +0100 Merge branch 'dev' into vs-code-tests commit abb0fa2f4bee2123559583f27c2855c52588d3be Author: Robert Syme Date: Fri Nov 29 14:34:37 2024 -0500 Remove toList() channel operation from inside onComplete block This PR resolves an important bug in the nf-core template, whereby all workflows will hang if the --email parameter is supplied. The onComplete block will hang if there are any (queue) channel operations inside the block. All values in the onComplete block must be resolved to single values or value channels _before_ the onComplete block starts. The async channels are not available inside onComplete, so calling the toList() operation will hang forever as the async queue channel will never be completed. commit c5ffcf87e6c742fb224e79b116c8d318b448244e Author: nf-core-bot Date: Sun Dec 1 02:09:38 2024 +0000 [automated] Update CHANGELOG.md commit 456aa055b48af339a04ce7170d7aad3d5732b940 Author: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Date: Sun Dec 1 02:08:49 2024 +0000 Update dependency textual-dev to v1.7.0 commit 35eb958d39fe52b938e535c100afcb6bb19d1549 Author: Edmund Miller Date: Tue Nov 26 02:17:48 2024 +0000 Add Python terminal env settings commit 805620b5dee7185dd9e5e4f3a409208fdf66c03a Author: Edmund Miller Date: Mon Nov 25 11:09:22 2024 -0600 fix(#3297): Update warning message for pytest-workflow commit 3456346eea3f6104cf0de7e38ccc8543a44d3c3f Author: nf-core-bot Date: Thu Nov 21 17:29:49 2024 +0000 [automated] Update CHANGELOG.md commit dd477ab82f7a162e1515bc0f43a72af7147d3618 Author: Edmund Miller Date: Wed Nov 20 22:50:15 2024 -0600 build: Setup VS Code tests commit fa2750a6c3fbc2ce3ccaf1f97e862fbbb301940e Author: mashehu Date: Tue Aug 27 15:18:27 2024 +0200 use format instead of f-string commit 19ec18810aafff259a85bb848520e9671c919672 Author: mashehu Date: Tue Aug 27 10:30:07 2024 +0200 revert to format commit 20545509ff899d82683baca5a7395a84febb9c73 Author: mashehu Date: Tue Aug 27 10:07:47 2024 +0200 fix quotes commit 9d2a3abf8af09b172b5065e40d7618f0dbfbecca Author: mashehu Date: Tue Aug 27 09:12:38 2024 +0200 fix order in lint commit ed0fb1b4bb0b7c21ea61d6ed84197b692b02905a Author: mashehu Date: Tue Aug 27 09:08:51 2024 +0200 set default value for default branch commit da915195f6450f6870eec2b2dbccf1dbd7f2206d Author: mashehu Date: Tue Aug 27 09:00:05 2024 +0200 fix schema check commit 5b3dabd588f84f2e2dc111aa639e07a3ebfd3c87 Author: mashehu Date: Tue Aug 27 08:47:44 2024 +0200 fix order of initialization commit 889e59e15f2d77377ebfd7912ee5205bd853a403 Author: mashehu Date: Tue Aug 27 08:41:53 2024 +0200 handle missing self.default_branch commit 6fc6da2514c0aa73a6c971f639e52bd4d44590cf Author: mashehu Date: Tue Aug 27 07:30:15 2024 +0200 change pipeline template and tooling to allow `main` as default branch commit 77ccc6a3608b4656d157cf729799636b676d201b Author: mashehu Date: Wed Dec 20 15:51:04 2023 +0100 set gitpod.yml docker image to latest --- .github/actions/create-lint-wf/action.yml | 1 - .github/workflows/create-lint-wf.yml | 4 +- .../create-test-lint-wf-template.yml | 1 + .github/workflows/create-test-wf.yml | 4 +- .github/workflows/sync.yml | 23 +++++- .gitpod.yml | 9 +-- .pre-commit-config.yaml | 2 +- .vscode/settings.json | 9 +++ CHANGELOG.md | 69 ++++++++++++++---- Dockerfile | 2 +- docs/api/_src/api/pipelines/bump_version.md | 2 +- docs/api/_src/api/pipelines/create.md | 2 +- docs/api/_src/api/pipelines/download.md | 2 +- docs/api/_src/api/pipelines/launch.md | 2 +- docs/api/_src/api/pipelines/lint.md | 2 +- docs/api/_src/api/pipelines/list.md | 2 +- docs/api/_src/api/pipelines/params-file.md | 2 +- docs/api/_src/api/pipelines/schema.md | 2 +- docs/api/_src/api/pipelines/sync.md | 2 +- docs/api/_src/api/pipelines/utils.md | 2 +- nf_core/__main__.py | 4 +- nf_core/components/components_utils.py | 4 +- nf_core/components/nfcore_component.py | 10 ++- nf_core/modules/lint/main_nf.py | 11 ++- .../pipeline-template/.github/CONTRIBUTING.md | 4 +- .../.github/ISSUE_TEMPLATE/bug_report.yml | 1 - .../.github/PULL_REQUEST_TEMPLATE.md | 4 +- .../.github/workflows/awsfulltest.yml | 18 ++++- .../.github/workflows/branch.yml | 18 +++-- .../.github/workflows/download_pipeline.yml | 49 ++++++++----- nf_core/pipeline-template/.gitpod.yml | 14 +--- nf_core/pipeline-template/.prettierignore | 3 + nf_core/pipeline-template/CITATIONS.md | 2 +- nf_core/pipeline-template/README.md | 13 ++-- .../assets/schema_input.json | 2 +- nf_core/pipeline-template/docs/output.md | 11 ++- nf_core/pipeline-template/docs/usage.md | 8 +- nf_core/pipeline-template/modules.json | 2 +- nf_core/pipeline-template/nextflow.config | 15 ++-- .../pipeline-template/nextflow_schema.json | 8 +- .../utils_nfcore_pipeline_pipeline/main.nf | 6 +- .../nf-core/utils_nfcore_pipeline/main.nf | 42 +++++------ .../tests/main.function.nf.test | 44 +++++++++++ nf_core/pipelines/bump_version.py | 5 ++ nf_core/pipelines/create/create.py | 73 ++++++++++++------- .../pipelines/create/template_features.yml | 6 ++ nf_core/pipelines/download.py | 2 +- nf_core/pipelines/lint/actions_awsfulltest.py | 2 +- nf_core/pipelines/lint/version_consistency.py | 2 +- nf_core/pipelines/lint_utils.py | 2 +- nf_core/pipelines/rocrate.py | 45 +++++++++++- nf_core/pipelines/schema.py | 8 +- .../subworkflows/lint/subworkflow_tests.py | 2 +- nf_core/synced_repo.py | 7 +- nf_core/utils.py | 2 +- requirements-dev.txt | 2 +- setup.py | 2 +- tests/pipelines/lint/test_actions_awstest.py | 2 +- tests/pipelines/test_lint.py | 2 +- tests/pipelines/test_rocrate.py | 35 +++++++++ tests/pipelines/test_sync.py | 8 +- tests/utils.py | 4 +- 62 files changed, 444 insertions(+), 204 deletions(-) create mode 100644 .vscode/settings.json diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml index 3ef0760513..3ffd960d23 100644 --- a/.github/actions/create-lint-wf/action.yml +++ b/.github/actions/create-lint-wf/action.yml @@ -15,7 +15,6 @@ runs: cd create-lint-wf export NXF_WORK=$(pwd) - # Set up Nextflow - name: Install Nextflow uses: nf-core/setup-nextflow@v2 with: diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 37ab71bc3b..fa6c38ef07 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -27,14 +27,12 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true -env: - NXF_ANSI_LOG: false - jobs: MakeTestWorkflow: runs-on: ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} env: NXF_ANSI_LOG: false + strategy: matrix: NXF_VER: diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index fffa9ffe7a..cabd4b9abe 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -51,6 +51,7 @@ jobs: needs: prepare-matrix env: NXF_ANSI_LOG: false + strategy: matrix: TEMPLATE: ${{ fromJson(needs.prepare-matrix.outputs.all_features) }} diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 93581b9153..53f84b72c4 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -27,15 +27,13 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true -env: - NXF_ANSI_LOG: false - jobs: RunTestWorkflow: # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default runs-on: ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} env: NXF_ANSI_LOG: false + strategy: matrix: NXF_VER: diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index 625f00d247..7061294783 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -23,6 +23,10 @@ on: description: "Pipeline to sync" type: string default: "all" + debug: + description: "Enable debug/verbose mode (true or false)" + type: boolean + default: false # Cancel if a newer run is started concurrency: @@ -62,6 +66,8 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out nf-core/tools + with: + ref: ${{ github.ref_name }} - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out nf-core/${{ matrix.pipeline }} @@ -87,6 +93,19 @@ jobs: with: version: "latest-everything" + - name: Set Git default branch from nextflow.config and set git default branch to that or "master" + + run: | + pushd nf-core/${{ matrix.pipeline }} + defaultBranch=$(grep -B5 -A5 "nextflowVersion" nextflow.config | grep "defaultBranch" | cut -d"=" -f2) + if [ -z "$defaultBranch" ]; then + defaultBranch="master" + fi + popd + echo "Default branch: $defaultBranch" + echo "defaultBranch=$defaultBranch" >> GITHUB_OUTPUT + git config --global init.defaultBranch $defaultBranch + - name: Run synchronisation if: github.repository == 'nf-core/tools' env: @@ -94,7 +113,9 @@ jobs: run: | git config --global user.email "core@nf-co.re" git config --global user.name "nf-core-bot" - nf-core --log-file sync_log_${{ matrix.pipeline }}.txt pipelines sync -d nf-core/${{ matrix.pipeline }} \ + nf-core --log-file sync_log_${{ matrix.pipeline }}.txt \ + ${{ github.event.inputs.debug == 'true' && '--verbose' || '' }} \ + pipelines sync -d nf-core/${{ matrix.pipeline }} \ --from-branch dev \ --pull-request \ --username nf-core-bot \ diff --git a/.gitpod.yml b/.gitpod.yml index efe193f35f..d5948695bf 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -9,11 +9,4 @@ tasks: vscode: extensions: - - esbenp.prettier-vscode # Markdown/CommonMark linting and style checking for Visual Studio Code - - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files - - Gruntfuggly.todo-tree # Display TODO and FIXME in a tree view in the activity bar - - mechatroner.rainbow-csv # Highlight columns in csv files in different colors - - nextflow.nextflow # Nextflow syntax highlighting - - oderwat.indent-rainbow # Highlight indentation level - - streetsidesoftware.code-spell-checker # Spelling checker for source code - - charliermarsh.ruff # Code linter Ruff + - nf-core.nf-core-extensionpack # https://github.com/nf-core/vscode-extensionpack diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1494f58182..68a6fa3ed7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.1 + rev: v0.8.2 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000..5ffdff086c --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,9 @@ +{ + "python.testing.pytestEnabled": true, + "python.testing.unittestEnabled": false, + "python.testing.nosetestsEnabled": false, + "python.testing.pytestArgs": ["tests", "-v", "--tb=short"], + "python.testing.autoTestDiscoverOnSaveEnabled": true, + "python.terminal.activateEnvInCurrentTerminal": true, + "python.terminal.shellIntegration.enabled": true +} diff --git a/CHANGELOG.md b/CHANGELOG.md index da5f72c357..603dd664c6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,13 +1,42 @@ # nf-core/tools: Changelog -## v3.0.3dev +## v3.1.1dev + +### Template + +- Use outputs instead of the environment to pass around values between steps in the Download Test Action ([#3351](https://github.com/nf-core/tools/pull/3351)) +- Fix pre commit template ([#3358](https://github.com/nf-core/tools/pull/3358)) + +### Download + +### Linting + +### Modules + +- fix including modules.config ([#3356](https://github.com/nf-core/tools/pull/3356)) + +### Subworkflows + +### General + +- Add missing p ([#3357](https://github.com/nf-core/tools/pull/3357)) + +### Version updates + +## [v3.1.0 - Brass Boxfish](https://github.com/nf-core/tools/releases/tag/3.1.0) - [2024-12-09] + +**Highlights** + +- We added the new `contributors` field to the pipeline template `manifest`. +- The `nf-core pipelines download` command supports ORAS container URIs. +- New command `nf-core subworkflows patch`. ### Template - Keep pipeline name in version.yml file ([#3223](https://github.com/nf-core/tools/pull/3223)) - Fix Manifest DOI text ([#3224](https://github.com/nf-core/tools/pull/3224)) - Do not assume pipeline name is url ([#3225](https://github.com/nf-core/tools/pull/3225)) -- fix workflow_dispatch trigger and parse more review comments in awsfulltest ([#3235](https://github.com/nf-core/tools/pull/3235)) +- fix `workflow_dispatch` trigger and parse more review comments in awsfulltest ([#3235](https://github.com/nf-core/tools/pull/3235)) - Add resource limits to Gitpod profile([#3255](https://github.com/nf-core/tools/pull/3255)) - Fix a typo ([#3268](https://github.com/nf-core/tools/pull/3268)) - Remove `def` from `nextflow.config` and add `trace_report_suffix` param ([#3296](https://github.com/nf-core/tools/pull/3296)) @@ -15,16 +44,21 @@ - Use `params.monochrome_logs` in the template and update nf-core components ([#3310](https://github.com/nf-core/tools/pull/3310)) - Fix some typos and improve writing in `usage.md` and `CONTRIBUTING.md` ([#3302](https://github.com/nf-core/tools/pull/3302)) - Add `manifest.contributors` to `nextflow.config` ([#3311](https://github.com/nf-core/tools/pull/3311)) +- Update template components ([#3328](https://github.com/nf-core/tools/pull/3328)) +- Template: Remove mention of GRCh37 if igenomes is skipped ([#3330](https://github.com/nf-core/tools/pull/3330)) +- Be more verbose in approval check action ([#3338](https://github.com/nf-core/tools/pull/3338)) ### Download -- First steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Modify `prioritize_direct_download()` to retain Seqera Singularity https:// Container URIs and hardcode Seqera Containers into `gather_registries()` ([#3244](https://github.com/nf-core/tools/pull/3244)). +- First steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Modify `prioritize_direct_download()` to retain Seqera Singularity `https://` Container URIs and hardcode Seqera Containers into `gather_registries()` ([#3244](https://github.com/nf-core/tools/pull/3244)). - Further steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Enable limited support for `oras://` container paths (_only absolute URIs, no flexible registries like with Docker_) and prevent unnecessary image downloads for Seqera Container modules with `reconcile_seqera_container_uris()` ([#3293](https://github.com/nf-core/tools/pull/3293)). - Update dawidd6/action-download-artifact action to v7 ([#3306](https://github.com/nf-core/tools/pull/3306)) ### Linting - allow mixed `str` and `dict` entries in lint config ([#3228](https://github.com/nf-core/tools/pull/3228)) +- fix `meta_yml` linting test failing due to `module.process_name` always being `""` ([#3317](https://github.com/nf-core/tools/pull/3317)) +- fix module section regex matching wrong things ([#3321](https://github.com/nf-core/tools/pull/3321)) ### Modules @@ -33,29 +67,38 @@ ### Subworkflows - Add `nf-core subworkflows patch` command ([#2861](https://github.com/nf-core/tools/pull/2861)) +- Improve subworkflow nf-test migration warning ([#3298](https://github.com/nf-core/tools/pull/3298)) ### General -- Include .nf-core.yml in `nf-core pipelines bump-version` ([#3220](https://github.com/nf-core/tools/pull/3220)) +- Include `.nf-core.yml` in `nf-core pipelines bump-version` ([#3220](https://github.com/nf-core/tools/pull/3220)) - create: add shortcut to toggle all switches ([#3226](https://github.com/nf-core/tools/pull/3226)) - Remove unrelated values when saving `.nf-core` file ([#3227](https://github.com/nf-core/tools/pull/3227)) -- chore(deps): update pre-commit hook pre-commit/mirrors-mypy to v1.12.0 ([#3230](https://github.com/nf-core/tools/pull/3230)) -- chore(deps): update pre-commit hook astral-sh/ruff-pre-commit to v0.7.0 ([#3229](https://github.com/nf-core/tools/pull/3229)) -- Update python:3.12-slim Docker digest to 032c526 ([#3232](https://github.com/nf-core/tools/pull/3232)) - use correct `--profile` options for `nf-core subworkflows test` ([#3233](https://github.com/nf-core/tools/pull/3233)) - Update GitHub Actions ([#3237](https://github.com/nf-core/tools/pull/3237)) - add `--dir/-d` option to schema commands ([#3247](https://github.com/nf-core/tools/pull/3247)) -- Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.1 ([#3250](https://github.com/nf-core/tools/pull/3250)) +- fix headers in api docs ([#3323](https://github.com/nf-core/tools/pull/3323)) - handle new schema structure in `nf-core pipelines create-params-file` ([#3276](https://github.com/nf-core/tools/pull/3276)) - Update Gitpod image to use Miniforge instead of Miniconda([#3274](https://github.com/nf-core/tools/pull/3274)) -- Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.3 ([#3275](https://github.com/nf-core/tools/pull/3275)) - Add hint to solve git errors with a synced repo ([#3279](https://github.com/nf-core/tools/pull/3279)) -- Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.4 ([#3282](https://github.com/nf-core/tools/pull/3282)) +- Run pre-commit when testing linting the template pipeline ([#3280](https://github.com/nf-core/tools/pull/3280)) +- Make CLI prompt less nf-core specific ([#3326](https://github.com/nf-core/tools/pull/3326)) +- Update gitpod vscode extensions to use nf-core extension pack ([#3327](https://github.com/nf-core/tools/pull/3327)) +- Remove toList() channel operation from inside onComplete block ([#3304](https://github.com/nf-core/tools/pull/3304)) +- build: Setup VS Code tests ([#3292](https://github.com/nf-core/tools/pull/3292)) +- Don't break gitpod.yml with template string ([#3332](https://github.com/nf-core/tools/pull/3332)) +- rocrate: remove duplicated entries for name and version ([#3333](https://github.com/nf-core/tools/pull/3333)) +- rocrate: Update crate with version bump and handle new contributor field ([#3334](https://github.com/nf-core/tools/pull/3334)) +- set default_branch to master for now ([#3335](https://github.com/nf-core/tools/issues/3335)) +- Set git defaultBranch to master in sync action ([#3337](https://github.com/nf-core/tools/pull/3337)) +- Add verbose mode to sync action ([#3339](https://github.com/nf-core/tools/pull/3339)) + +### Version updates + +- chore(deps): update pre-commit hook pre-commit/mirrors-mypy to v1.12.0 ([#3230](https://github.com/nf-core/tools/pull/3230)) - Update codecov/codecov-action action to v5 ([#3283](https://github.com/nf-core/tools/pull/3283)) -- Update python:3.12-slim Docker digest to 2a6386a ([#3284](https://github.com/nf-core/tools/pull/3284)) -- Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.0 ([#3299](https://github.com/nf-core/tools/pull/3299)) - Update gitpod/workspace-base Docker digest to 12853f7 ([#3309](https://github.com/nf-core/tools/pull/3309)) -- Run pre-commit when testing linting the template pipeline ([#3280](https://github.com/nf-core/tools/pull/3280)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.2 ([#3325](https://github.com/nf-core/tools/pull/3325)) ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] diff --git a/Dockerfile b/Dockerfile index dc9948ea4b..f2141145b8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:2a6386ad2db20e7f55073f69a98d6da2cf9f168e05e7487d2670baeb9b7601c5 +FROM python:3.12-slim@sha256:2b0079146a74e23bf4ae8f6a28e1b484c6292f6fb904cbb51825b4a19812fcd8 LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" diff --git a/docs/api/_src/api/pipelines/bump_version.md b/docs/api/_src/api/pipelines/bump_version.md index cd7dc280f6..76db67837a 100644 --- a/docs/api/_src/api/pipelines/bump_version.md +++ b/docs/api/_src/api/pipelines/bump_version.md @@ -1,4 +1,4 @@ -# nf_core.bump_version +# nf_core.pipelines.bump_version ```{eval-rst} .. automodule:: nf_core.pipelines.bump_version diff --git a/docs/api/_src/api/pipelines/create.md b/docs/api/_src/api/pipelines/create.md index 576335e951..5019a5f3c8 100644 --- a/docs/api/_src/api/pipelines/create.md +++ b/docs/api/_src/api/pipelines/create.md @@ -1,4 +1,4 @@ -# nf_core.create +# nf_core.pipelines.create ```{eval-rst} .. automodule:: nf_core.pipelines.create diff --git a/docs/api/_src/api/pipelines/download.md b/docs/api/_src/api/pipelines/download.md index 540fb92c49..afb31ddea6 100644 --- a/docs/api/_src/api/pipelines/download.md +++ b/docs/api/_src/api/pipelines/download.md @@ -1,4 +1,4 @@ -# nf_core.download +# nf_core.pipelines.download ```{eval-rst} .. automodule:: nf_core.pipelines.download diff --git a/docs/api/_src/api/pipelines/launch.md b/docs/api/_src/api/pipelines/launch.md index 0f7fc03f64..0d0260cae6 100644 --- a/docs/api/_src/api/pipelines/launch.md +++ b/docs/api/_src/api/pipelines/launch.md @@ -1,4 +1,4 @@ -# nf_core.launch +# nf_core.pipelines.launch ```{eval-rst} .. automodule:: nf_core.pipelines.launch diff --git a/docs/api/_src/api/pipelines/lint.md b/docs/api/_src/api/pipelines/lint.md index aa62c404b8..91b37c26f6 100644 --- a/docs/api/_src/api/pipelines/lint.md +++ b/docs/api/_src/api/pipelines/lint.md @@ -1,4 +1,4 @@ -# nf_core.lint +# nf_core.pipelines.lint :::{seealso} See the [Lint Tests](/docs/nf-core-tools/api_reference/dev/pipeline_lint_tests) docs for information about specific linting functions. diff --git a/docs/api/_src/api/pipelines/list.md b/docs/api/_src/api/pipelines/list.md index 7df7564544..5f404b91c3 100644 --- a/docs/api/_src/api/pipelines/list.md +++ b/docs/api/_src/api/pipelines/list.md @@ -1,4 +1,4 @@ -# nf_core.list +# nf_core.pipelines.list ```{eval-rst} .. automodule:: nf_core.pipelines.list diff --git a/docs/api/_src/api/pipelines/params-file.md b/docs/api/_src/api/pipelines/params-file.md index 06f27cc592..37e91f458a 100644 --- a/docs/api/_src/api/pipelines/params-file.md +++ b/docs/api/_src/api/pipelines/params-file.md @@ -1,4 +1,4 @@ -# nf_core.params_file +# nf_core.pipelines.params_file ```{eval-rst} .. automodule:: nf_core.pipelines.params_file diff --git a/docs/api/_src/api/pipelines/schema.md b/docs/api/_src/api/pipelines/schema.md index c885d9ed23..4ca1aab480 100644 --- a/docs/api/_src/api/pipelines/schema.md +++ b/docs/api/_src/api/pipelines/schema.md @@ -1,4 +1,4 @@ -# nf_core.schema +# nf_core.pipelines.schema ```{eval-rst} .. automodule:: nf_core.pipelines.schema diff --git a/docs/api/_src/api/pipelines/sync.md b/docs/api/_src/api/pipelines/sync.md index da1f468fe5..f78733bb7d 100644 --- a/docs/api/_src/api/pipelines/sync.md +++ b/docs/api/_src/api/pipelines/sync.md @@ -1,4 +1,4 @@ -# nf_core.sync +# nf_core.pipelines.sync ```{eval-rst} .. automodule:: nf_core.pipelines.sync diff --git a/docs/api/_src/api/pipelines/utils.md b/docs/api/_src/api/pipelines/utils.md index 86b8c3f36f..36c2ecca4d 100644 --- a/docs/api/_src/api/pipelines/utils.md +++ b/docs/api/_src/api/pipelines/utils.md @@ -1,4 +1,4 @@ -# nf_core.utils +# nf_core.pipelines.utils ```{eval-rst} .. automodule:: nf_core.pipelines.utils diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 06d4be8f75..7cfba64531 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -288,7 +288,7 @@ def command_pipelines_create(ctx, name, description, author, version, force, out @click.option( "--release", is_flag=True, - default=os.path.basename(os.path.dirname(os.environ.get("GITHUB_REF", "").strip(" '\""))) == "master" + default=Path(os.environ.get("GITHUB_REF", "").strip(" '\"")).parent.name in ["master", "main"] and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", help="Execute additional checks for release-ready workflows.", @@ -2240,7 +2240,7 @@ def command_download( @click.option( "--release", is_flag=True, - default=os.path.basename(os.path.dirname(os.environ.get("GITHUB_REF", "").strip(" '\""))) == "master" + default=Path(os.environ.get("GITHUB_REF", "").strip(" '\"")).parent.name in ["master", "main"] and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", help="Execute additional checks for release-ready workflows.", diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index be28a0d870..ab4cc79ae6 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -37,10 +37,10 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P if not repo_type and use_prompt: log.warning("'repository_type' not defined in %s", config_fn.name) repo_type = questionary.select( - "Is this repository an nf-core pipeline or a fork of nf-core/modules?", + "Is this repository a pipeline or a modules repository?", choices=[ {"name": "Pipeline", "value": "pipeline"}, - {"name": "nf-core/modules", "value": "modules"}, + {"name": "Modules repository", "value": "modules"}, ], style=nf_core.utils.nfcore_question_style, ).unsafe_ask() diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 37e43a536e..81c0ba98e7 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -62,7 +62,6 @@ def __init__( # Initialize the important files self.main_nf: Path = Path(self.component_dir, "main.nf") self.meta_yml: Optional[Path] = Path(self.component_dir, "meta.yml") - self.process_name = "" self.environment_yml: Optional[Path] = Path(self.component_dir, "environment.yml") component_list = self.component_name.split("/") @@ -96,6 +95,8 @@ def __init__( self.test_yml = None self.test_main_nf = None + self.process_name: str = self._get_process_name() + def __repr__(self) -> str: return f"" @@ -169,6 +170,13 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st included_components.append(component) return included_components + def _get_process_name(self): + with open(self.main_nf) as fh: + for line in fh: + if re.search(r"^\s*process\s*\w*\s*{", line): + return re.search(r"^\s*process\s*(\w*)\s*{.*", line).group(1) or "" + return "" + def get_inputs_from_main_nf(self) -> None: """Collect all inputs from the main.nf file.""" inputs: Any = [] # Can be 'list[list[dict[str, dict[str, str]]]]' or 'list[str]' diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 848e17130e..ba3b87f79a 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -96,19 +96,19 @@ def main_nf( for line in iter_lines: if re.search(r"^\s*process\s*\w*\s*{", line) and state == "module": state = "process" - if re.search(r"input\s*:", line) and state in ["process"]: + if re.search(r"^\s*input\s*:", line) and state in ["process"]: state = "input" continue - if re.search(r"output\s*:", line) and state in ["input", "process"]: + if re.search(r"^\s*output\s*:", line) and state in ["input", "process"]: state = "output" continue - if re.search(r"when\s*:", line) and state in ["input", "output", "process"]: + if re.search(r"^\s*when\s*:", line) and state in ["input", "output", "process"]: state = "when" continue - if re.search(r"script\s*:", line) and state in ["input", "output", "when", "process"]: + if re.search(r"^\s*script\s*:", line) and state in ["input", "output", "when", "process"]: state = "script" continue - if re.search(r"shell\s*:", line) and state in ["input", "output", "when", "process"]: + if re.search(r"^\s*shell\s*:", line) and state in ["input", "output", "when", "process"]: state = "shell" continue @@ -256,7 +256,6 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): bioconda_packages = [] # Process name should be all capital letters - self.process_name = lines[0].split()[1] if all(x.upper() for x in self.process_name): self.passed.append(("process_capitals", "Process name is in capital letters", self.main_nf)) else: diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index 37970c09e8..b08b43667f 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -64,9 +64,9 @@ These tests are run both with the latest available version of `Nextflow` and als :warning: Only in the unlikely and regretful event of a release happening with a bug. -- On your own fork, make a new branch `patch` based on `upstream/master`. +- On your own fork, make a new branch `patch` based on `upstream/main` or `upstream/master`. - Fix the bug, and bump version (X.Y.Z+1). -- Open a pull-request from `patch` to `master` with the changes. +- Open a pull-request from `patch` to `main`/`master` with the changes. {% if is_nfcore -%} diff --git a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml index 412f5bd3b3..f3624afc9c 100644 --- a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml @@ -11,7 +11,6 @@ body: - [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting) - [{{ name }} pipeline documentation](https://nf-co.re/{{ short_name }}/usage) {%- endif %} - - type: textarea id: description attributes: diff --git a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md index c96f2dd4c2..0df95c0a40 100644 --- a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md +++ b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md @@ -8,14 +8,14 @@ These are the most common things requested on pull requests (PRs). Remember that PRs should be made against the dev branch, unless you're preparing a pipeline release. -Learn more about contributing: [CONTRIBUTING.md](https://github.com/{{ name }}/tree/master/.github/CONTRIBUTING.md) +Learn more about contributing: [CONTRIBUTING.md](https://github.com/{{ name }}/tree/{{ default_branch }}/.github/CONTRIBUTING.md) --> ## PR checklist - [ ] This comment contains a description of changes (with reason). - [ ] If you've fixed a bug or added code that should be tested, add tests! -- [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/{{ name }}/tree/master/.github/CONTRIBUTING.md) +- [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/{{ name }}/tree/{{ default_branch }}/.github/CONTRIBUTING.md) {%- if is_nfcore %} - [ ] If necessary, also make a PR on the {{ name }} _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. {%- endif %} diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index 1ca2ac2c74..6805c83a27 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -1,11 +1,12 @@ name: nf-core AWS full size tests -# This workflow is triggered on PRs opened against the master branch. +# This workflow is triggered on PRs opened against the main/master branch. # It can be additionally triggered manually with GitHub actions workflow dispatch button. # It runs the -profile 'test_full' on AWS batch on: pull_request: branches: + - main - master workflow_dispatch: pull_request_review: @@ -18,19 +19,30 @@ jobs: if: github.repository == '{{ name }}' && github.event.review.state == 'approved' && github.event.pull_request.base.ref == 'master' || github.event_name == 'workflow_dispatch' runs-on: ubuntu-latest steps: - - uses: octokit/request-action@v2.x + - name: Get PR reviews + uses: octokit/request-action@v2.x if: github.event_name != 'workflow_dispatch' id: check_approvals + continue-on-error: true with: route: GET /repos/{%- raw -%}${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/reviews?per_page=100 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - id: test_variables + + - name: Check for approvals + if: ${{ failure() && github.event_name != 'workflow_dispatch' }} + run: | + echo "No review approvals found. At least 2 approvals are required to run this action automatically." + exit 1 + + - name: Check for enough approvals (>=2) + id: test_variables if: github.event_name != 'workflow_dispatch' run: | JSON_RESPONSE='${{ steps.check_approvals.outputs.data }}'{% endraw %} CURRENT_APPROVALS_COUNT=$(echo $JSON_RESPONSE | jq -c '[.[] | select(.state | contains("APPROVED")) ] | length') test $CURRENT_APPROVALS_COUNT -ge 2 || exit 1 # At least 2 approvals are required + - name: Launch workflow via Seqera Platform uses: seqeralabs/action-tower-launch@v2 # TODO nf-core: You can customise AWS full pipeline tests as required diff --git a/nf_core/pipeline-template/.github/workflows/branch.yml b/nf_core/pipeline-template/.github/workflows/branch.yml index df1a627b15..110b4a5f5a 100644 --- a/nf_core/pipeline-template/.github/workflows/branch.yml +++ b/nf_core/pipeline-template/.github/workflows/branch.yml @@ -1,15 +1,17 @@ name: nf-core branch protection -# This workflow is triggered on PRs to master branch on the repository -# It fails when someone tries to make a PR against the nf-core `master` branch instead of `dev` +# This workflow is triggered on PRs to `main`/`master` branch on the repository +# It fails when someone tries to make a PR against the nf-core `main`/`master` branch instead of `dev` on: pull_request_target: - branches: [master] + branches: + - main + - master jobs: test: runs-on: ubuntu-latest steps: - # PRs to the nf-core repo master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches + # PRs to the nf-core repo main/master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches - name: Check PRs if: github.repository == '{{ name }}' run: | @@ -22,7 +24,7 @@ jobs: uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 with: message: | - ## This PR is against the `master` branch :x: + ## This PR is against the `${{github.event.pull_request.base.ref}}` branch :x: * Do not close this PR * Click _Edit_ and change the `base` to `dev` @@ -32,9 +34,9 @@ jobs: Hi @${{ github.event.pull_request.user.login }}, - It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `master` branch. - The `master` branch on nf-core repositories should always contain code from the latest release. - Because of this, PRs to `master` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. + It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) ${{github.event.pull_request.base.ref}} branch. + The ${{github.event.pull_request.base.ref}} branch on nf-core repositories should always contain code from the latest release. + Because of this, PRs to ${{github.event.pull_request.base.ref}} are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. You do not need to close this PR, you can change the target branch to `dev` by clicking the _"Edit"_ button at the top of this page. Note that even after this, the test will continue to show as failing until you push a new commit. diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index 1bc42469c4..f270dc5411 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -2,7 +2,7 @@ name: Test successful pipeline download with 'nf-core pipelines download' # Run the workflow when: # - dispatched manually -# - when a PR is opened or reopened to master branch +# - when a PR is opened or reopened to main/master branch # - the head branch of the pull request is updated, i.e. if fixes for a release are pushed last minute to dev. on: workflow_dispatch: @@ -17,19 +17,25 @@ on: - edited - synchronize branches: + - main - master pull_request_target: branches: + - main - master env: NXF_ANSI_LOG: false jobs: - download: - runs-on: ubuntu-latest + configure: + runs-on: ubuntu-latest{% raw %} + outputs: + REPO_LOWERCASE: ${{ steps.get_repo_properties.outputs.REPO_LOWERCASE }} + REPOTITLE_LOWERCASE: ${{ steps.get_repo_properties.outputs.REPOTITLE_LOWERCASE }} + REPO_BRANCH: ${{ steps.get_repo_properties.outputs.REPO_BRANCH }} steps: - - name: Install Nextflow + - name: Install Nextflow{% endraw %} uses: nf-core/setup-nextflow@v2 - name: Disk space cleanup @@ -51,22 +57,27 @@ jobs: pip install git+https://github.com/nf-core/tools.git@dev - name: Get the repository name and current branch set as environment variable + id: get_repo_properties run: | - echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV} - echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV} - echo "{% raw %}REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> ${GITHUB_ENV} + echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> "$GITHUB_OUTPUT" + echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> "$GITHUB_OUTPUT" + echo "{% raw %}REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> "$GITHUB_OUTPUT" - name: Make a cache directory for the container images run: | mkdir -p ./singularity_container_images + download: + runs-on: ubuntu-latest + needs: configure + steps: - name: Download the pipeline env: NXF_SINGULARITY_CACHEDIR: ./singularity_container_images run: | - nf-core pipelines download ${{ env.REPO_LOWERCASE }} \ - --revision ${{ env.REPO_BRANCH }} \ - --outdir ./${{ env.REPOTITLE_LOWERCASE }} \ + nf-core pipelines download ${{ needs.configure.outputs.REPO_LOWERCASE }} \ + --revision ${{ needs.configure.outputs.REPO_BRANCH }} \ + --outdir ./${{ needs.configure.outputs.REPOTITLE_LOWERCASE }} \ --compress "none" \ --container-system 'singularity' \ --container-library "quay.io" -l "docker.io" -l "community.wave.seqera.io/library/" \ @@ -74,14 +85,14 @@ jobs: --download-configuration 'yes' - name: Inspect download - run: tree ./${{ env.REPOTITLE_LOWERCASE }}{% endraw %}{% if test_config %}{% raw %} + run: tree ./${{ needs.configure.outputs.REPOTITLE_LOWERCASE }}{% endraw %}{% if test_config %}{% raw %} - name: Count the downloaded number of container images id: count_initial run: | image_count=$(ls -1 ./singularity_container_images | wc -l | xargs) echo "Initial container image count: $image_count" - echo "IMAGE_COUNT_INITIAL=$image_count" >> ${GITHUB_ENV} + echo "IMAGE_COUNT_INITIAL=$image_count" >> "$GITHUB_OUTPUT" - name: Run the downloaded pipeline (stub) id: stub_run_pipeline @@ -89,27 +100,27 @@ jobs: env: NXF_SINGULARITY_CACHEDIR: ./singularity_container_images NXF_SINGULARITY_HOME_MOUNT: true - run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results + run: nextflow run ./${{needs.configure.outputs.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ needs.configure.outputs.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results - name: Run the downloaded pipeline (stub run not supported) id: run_pipeline - if: ${{ job.steps.stub_run_pipeline.status == failure() }} + if: ${{ steps.stub_run_pipeline.outcome == 'failure' }} env: NXF_SINGULARITY_CACHEDIR: ./singularity_container_images NXF_SINGULARITY_HOME_MOUNT: true - run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results + run: nextflow run ./${{ needs.configure.outputs.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ needs.configure.outputs.REPO_BRANCH }}) -profile test,singularity --outdir ./results - name: Count the downloaded number of container images id: count_afterwards run: | image_count=$(ls -1 ./singularity_container_images | wc -l | xargs) echo "Post-pipeline run container image count: $image_count" - echo "IMAGE_COUNT_AFTER=$image_count" >> ${GITHUB_ENV} + echo "IMAGE_COUNT_AFTER=$image_count" >> "$GITHUB_OUTPUT" - name: Compare container image counts run: | - if [ "${{ env.IMAGE_COUNT_INITIAL }}" -ne "${{ env.IMAGE_COUNT_AFTER }}" ]; then - initial_count=${{ env.IMAGE_COUNT_INITIAL }} - final_count=${{ env.IMAGE_COUNT_AFTER }} + if [ "${{ steps.count_initial.outputs.IMAGE_COUNT_INITIAL }}" -ne "${{ steps.count_afterwards.outputs.IMAGE_COUNT_AFTER }}" ]; then + initial_count=${{ steps.count_initial.outputs.IMAGE_COUNT_INITIAL }} + final_count=${{ steps.count_afterwards.outputs.IMAGE_COUNT_AFTER }} difference=$((final_count - initial_count)) echo "$difference additional container images were \n downloaded at runtime . The pipeline has no support for offline runs!" tree ./singularity_container_images diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index 5907fb59c9..c6a2e40b87 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -3,16 +3,8 @@ tasks: - name: Update Nextflow and setup pre-commit command: | pre-commit install --install-hooks - nextflow self-update + nextflow self-update {%- if code_linters %} vscode: - extensions: # based on nf-core.nf-core-extensionpack - #{%- if code_linters -%} - - esbenp.prettier-vscode # Markdown/CommonMark linting and style checking for Visual Studio Code - - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files{% endif %} - - Gruntfuggly.todo-tree # Display TODO and FIXME in a tree view in the activity bar - - mechatroner.rainbow-csv # Highlight columns in csv files in different colors - - nextflow.nextflow # Nextflow syntax highlighting - - oderwat.indent-rainbow # Highlight indentation level - - streetsidesoftware.code-spell-checker # Spelling checker for source code - - charliermarsh.ruff # Code linter Ruff + extensions: + - nf-core.nf-core-extensionpack # https://github.com/nf-core/vscode-extensionpack{% endif %} diff --git a/nf_core/pipeline-template/.prettierignore b/nf_core/pipeline-template/.prettierignore index 7ecc9b61cb..02ba84c006 100644 --- a/nf_core/pipeline-template/.prettierignore +++ b/nf_core/pipeline-template/.prettierignore @@ -16,3 +16,6 @@ testing/ testing* *.pyc bin/ +{%- if rocrate %} +ro-crate-metadata.json +{%- endif %} diff --git a/nf_core/pipeline-template/CITATIONS.md b/nf_core/pipeline-template/CITATIONS.md index 16da9a4207..c355fd6129 100644 --- a/nf_core/pipeline-template/CITATIONS.md +++ b/nf_core/pipeline-template/CITATIONS.md @@ -18,7 +18,7 @@ {%- endif %} -{% if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) +{%- if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) > Ewels P, Magnusson M, Lundin S, Käller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index a8f2e60546..4cd41de368 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -7,7 +7,7 @@ -{% else %} +{%- else -%} # {{ name }} @@ -48,13 +48,13 @@ workflows use the "tube map" design for that. See https://nf-co.re/docs/contributing/design_guidelines#examples for examples. --> -{% if fastqc %}1. Read QC ([`FastQC`](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/)){% endif %} -{% if multiqc %}2. Present QC for raw reads ([`MultiQC`](http://multiqc.info/)){% endif %} +{%- if fastqc %}1. Read QC ([`FastQC`](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/)){% endif %} +{%- if multiqc %}2. Present QC for raw reads ([`MultiQC`](http://multiqc.info/)){% endif %} ## Usage > [!NOTE] -> If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how to set-up Nextflow. {% if test_config %}Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) with `-profile test` before running the workflow on actual data.{% endif %} +> If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how to set-up Nextflow. {%- if test_config %}Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) with `-profile test` before running the workflow on actual data.{% endif %} -{% if citations %} +{%- if citations %} An extensive list of references for the tools used by the pipeline can be found in the [`CITATIONS.md`](CITATIONS.md) file. -{% endif %} +{%- endif %} + {% if is_nfcore -%} You can cite the `nf-core` publication as follows: diff --git a/nf_core/pipeline-template/assets/schema_input.json b/nf_core/pipeline-template/assets/schema_input.json index 28a468adaf..0dbc03df56 100644 --- a/nf_core/pipeline-template/assets/schema_input.json +++ b/nf_core/pipeline-template/assets/schema_input.json @@ -1,6 +1,6 @@ { "$schema": "https://json-schema.org/draft/2020-12/schema", - "$id": "https://raw.githubusercontent.com/{{ name }}/master/assets/schema_input.json", + "$id": "https://raw.githubusercontent.com/{{ name }}/{{ default_branch }}/assets/schema_input.json", "title": "{{ name }} pipeline - params.input schema", "description": "Schema for the file provided with params.input", "type": "array", diff --git a/nf_core/pipeline-template/docs/output.md b/nf_core/pipeline-template/docs/output.md index 83d5d23fe3..a9be6620e1 100644 --- a/nf_core/pipeline-template/docs/output.md +++ b/nf_core/pipeline-template/docs/output.md @@ -2,7 +2,7 @@ ## Introduction -This document describes the output produced by the pipeline. {% if multiqc %}Most of the plots are taken from the MultiQC report, which summarises results at the end of the pipeline.{% endif %} +This document describes the output produced by the pipeline.{% if multiqc %} Most of the plots are taken from the MultiQC report, which summarises results at the end of the pipeline.{% endif %} The directories listed below will be created in the results directory after the pipeline has finished. All paths are relative to the top-level results directory. @@ -14,9 +14,8 @@ The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes d {% if fastqc -%} -- [FastQC](#fastqc) - Raw read QC - {%- endif %} - {%- if multiqc %} +- [FastQC](#fastqc) - Raw read QC{% endif %} + {%- if multiqc -%} - [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline {%- endif %} - [Pipeline information](#pipeline-information) - Report metrics generated during the workflow execution @@ -35,7 +34,7 @@ The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes d [FastQC](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/) gives general quality metrics about your sequenced reads. It provides information about the quality score distribution across your reads, per base sequence content (%A/T/G/C), adapter contamination and overrepresented sequences. For further reading and documentation see the [FastQC help pages](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/). -{%- endif %} +{%- endif -%} {% if multiqc -%} @@ -54,7 +53,7 @@ The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes d [MultiQC](http://multiqc.info) is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in the report data directory. Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQC. The pipeline has special steps which also allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see . -{%- endif %} +{%- endif -%} ### Pipeline information diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index 16e6220aaf..bbc8a828c4 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -61,7 +61,7 @@ An [example samplesheet](../assets/samplesheet.csv) has been provided with the p The typical command for running the pipeline is as follows: ```bash -nextflow run {{ name }} --input ./samplesheet.csv --outdir ./results --genome GRCh37 -profile docker +nextflow run {{ name }} --input ./samplesheet.csv --outdir ./results {% if igenomes %}--genome GRCh37{% endif %} -profile docker ``` This will launch the pipeline with the `docker` configuration profile. See below for more information about profiles. @@ -93,7 +93,9 @@ with: ```yaml title="params.yaml" input: './samplesheet.csv' outdir: './results/' +{% if igenomes -%} genome: 'GRCh37' +{% endif -%} <...> ``` @@ -113,7 +115,7 @@ It is a good idea to specify the pipeline version when running the pipeline on y First, go to the [{{ name }} releases page](https://github.com/{{ name }}/releases) and find the latest pipeline version - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. Of course, you can switch to another version by changing the number after the `-r` flag. -This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. {% if multiqc %}For example, at the bottom of the MultiQC reports.{% endif %} +This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future.{% if multiqc %} For example, at the bottom of the MultiQC reports.{% endif %} To further assist in reproducibility, you can use share and reuse [parameter files](#running-the-pipeline) to repeat pipeline runs with the same settings without having to write out a command with every single parameter. @@ -136,7 +138,7 @@ Several generic profiles are bundled with the pipeline which instruct the pipeli {%- if nf_core_configs %} -The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to check if your system is suported, please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). +The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to check if your system is supported, please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). {% else %} {% endif %} Note that multiple profiles can be loaded, for example: `-profile test,docker` - the order of arguments is important! diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 90c5728d9a..7d2761d290 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -28,7 +28,7 @@ }, "utils_nfcore_pipeline": { "branch": "master", - "git_sha": "9a1e8bb6a5d205cf7807dcefca872a3314b2f3e6", + "git_sha": "51ae5406a030d4da1e49e4dab49756844fdd6c7a", "installed_by": ["subworkflows"] }{% if nf_schema %}, "utils_nfschema_plugin": { diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 21174bbdc5..3325af4e06 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -77,11 +77,6 @@ params { includeConfig 'conf/base.config' {%- else %} -{% if modules -%} -// Load modules.config for DSL2 module specific options -includeConfig 'conf/modules.config' -{%- endif %} - process { // TODO nf-core: Check the defaults for all processes cpus = { 1 * task.attempt } @@ -291,6 +286,7 @@ manifest { homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" mainScript = 'main.nf' + defaultBranch = '{{ default_branch }}' nextflowVersion = '!>=24.04.2' version = '{{ version }}' doi = '' @@ -310,7 +306,7 @@ validation { command = "nextflow run {{ name }} -profile --input samplesheet.csv --outdir " fullParameter = "help_full" showHiddenParameter = "show_hidden" - {% if is_nfcore -%} + {%- if is_nfcore %} beforeText = """ -\033[2m----------------------------------------------------\033[0m- \033[0;32m,--.\033[0;30m/\033[0;32m,-.\033[0m @@ -326,7 +322,7 @@ validation { https://doi.org/10.1038/s41587-020-0439-x * Software dependencies - https://github.com/{{ name }}/blob/master/CITATIONS.md + https://github.com/{{ name }}/blob/{{ default_branch }}/CITATIONS.md """{% endif %} }{% if is_nfcore %} summary { @@ -335,3 +331,8 @@ validation { }{% endif %} } {%- endif %} + +{% if modules -%} +// Load modules.config for DSL2 module specific options +includeConfig 'conf/modules.config' +{%- endif %} diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 3e59a8ba54..c28929b47d 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -1,6 +1,6 @@ { "$schema": "https://json-schema.org/draft/2020-12/schema", - "$id": "https://raw.githubusercontent.com/{{ name }}/master/nextflow_schema.json", + "$id": "https://raw.githubusercontent.com/{{ name }}/{{ default_branch }}/nextflow_schema.json", "title": "{{ name }} pipeline parameters", "description": "{{ description }}", "type": "object", @@ -243,10 +243,12 @@ { "$ref": "#/$defs/input_output_options" }, - {% if igenomes %}{ + {%- if igenomes %} + { "$ref": "#/$defs/reference_genome_options" },{% endif %} - {% if nf_core_configs %}{ + {%- if nf_core_configs %} + { "$ref": "#/$defs/institutional_config_options" },{% endif %} { diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index be5776b836..3d540600ba 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -140,6 +140,10 @@ workflow PIPELINE_COMPLETION { summary_params = [:] {%- endif %} + {%- if multiqc %} + def multiqc_reports = multiqc_report.toList() + {%- endif %} + // // Completion email and summary // @@ -153,7 +157,7 @@ workflow PIPELINE_COMPLETION { plaintext_email, outdir, monochrome_logs, - {% if multiqc %}multiqc_report.toList(){% else %}[]{% endif %} + {% if multiqc %}multiqc_reports.getVal(),{% else %}[]{% endif %} ) } {%- endif %} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf index 228dbff897..bfd258760d 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -203,29 +203,24 @@ def logColours(monochrome_logs=true) { return colorcodes } +// Return a single report from an object that may be a Path or List // -// Attach the multiqc report to email -// -def attachMultiqcReport(multiqc_report) { - def mqc_report = null - try { - if (workflow.success) { - mqc_report = multiqc_report.getVal() - if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) { - if (mqc_report.size() > 1) { - log.warn("[${workflow.manifest.name}] Found multiple reports from process 'MULTIQC', will use only one") - } - mqc_report = mqc_report[0] - } +def getSingleReport(multiqc_reports) { + if (multiqc_reports instanceof Path) { + return multiqc_reports + } else if (multiqc_reports instanceof List) { + if (multiqc_reports.size() == 0) { + log.warn("[${workflow.manifest.name}] No reports found from process 'MULTIQC'") + return null + } else if (multiqc_reports.size() == 1) { + return multiqc_reports.first() + } else { + log.warn("[${workflow.manifest.name}] Found multiple reports from process 'MULTIQC', will use only one") + return multiqc_reports.first() } + } else { + return null } - catch (Exception msg) { - log.debug(msg) - if (multiqc_report) { - log.warn("[${workflow.manifest.name}] Could not attach MultiQC report to summary email") - } - } - return mqc_report } // @@ -279,7 +274,7 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi email_fields['summary'] = summary << misc_fields // On success try attach the multiqc report - def mqc_report = attachMultiqcReport(multiqc_report) + def mqc_report = getSingleReport(multiqc_report) // Check if we are only sending emails on failure def email_address = email @@ -310,7 +305,8 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi if (email_address) { try { if (plaintext_email) { -new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') } + new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') + } // Try to send HTML e-mail using sendmail def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html") sendmail_tf.withWriter { w -> w << sendmail_html } @@ -318,7 +314,7 @@ new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (sendmail)-") } catch (Exception msg) { - log.debug(msg) + log.debug(msg.toString()) log.debug("Trying with mail instead of sendmail") // Catch failures and try with plaintext def mail_cmd = ['mail', '-s', subject, '--content-type=text/html', email_address] diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test index e43d208b1b..f117040cbd 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test @@ -79,4 +79,48 @@ nextflow_function { ) } } + + test("Test Function getSingleReport with a single file") { + function "getSingleReport" + + when { + function { + """ + input[0] = file(params.modules_testdata_base_path + '/generic/tsv/test.tsv', checkIfExists: true) + """ + } + } + + then { + assertAll( + { assert function.success }, + { assert function.result.contains("test.tsv") } + ) + } + } + + test("Test Function getSingleReport with multiple files") { + function "getSingleReport" + + when { + function { + """ + input[0] = [ + file(params.modules_testdata_base_path + '/generic/tsv/test.tsv', checkIfExists: true), + file(params.modules_testdata_base_path + '/generic/tsv/network.tsv', checkIfExists: true), + file(params.modules_testdata_base_path + '/generic/tsv/expression.tsv', checkIfExists: true) + ] + """ + } + } + + then { + assertAll( + { assert function.success }, + { assert function.result.contains("test.tsv") }, + { assert !function.result.contains("network.tsv") }, + { assert !function.result.contains("expression.tsv") } + ) + } + } } diff --git a/nf_core/pipelines/bump_version.py b/nf_core/pipelines/bump_version.py index 3190ed70d4..664d7a22a3 100644 --- a/nf_core/pipelines/bump_version.py +++ b/nf_core/pipelines/bump_version.py @@ -11,6 +11,7 @@ from ruamel.yaml import YAML import nf_core.utils +from nf_core.pipelines.rocrate import ROCrate from nf_core.utils import Pipeline log = logging.getLogger(__name__) @@ -127,6 +128,10 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: yaml_key=["template", "version"], ) + # update rocrate if ro-crate is present + if Path(pipeline_obj.wf_path, "ro-crate-metadata.json").exists(): + ROCrate(pipeline_obj.wf_path).update_rocrate() + def bump_nextflow_version(pipeline_obj: Pipeline, new_version: str) -> None: """Bumps the required Nextflow version number of a pipeline. diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 61e0b63ec3..4f90ca17f9 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -22,7 +22,7 @@ from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file from nf_core.pipelines.rocrate import ROCrate -from nf_core.utils import NFCoreTemplateConfig, NFCoreYamlLintConfig +from nf_core.utils import NFCoreTemplateConfig, NFCoreYamlLintConfig, custom_yaml_dumper log = logging.getLogger(__name__) @@ -57,7 +57,7 @@ def __init__( template_config: Optional[Union[CreateConfig, str, Path]] = None, organisation: str = "nf-core", from_config_file: bool = False, - default_branch: Optional[str] = None, + default_branch: str = "master", is_interactive: bool = False, ) -> None: if isinstance(template_config, CreateConfig): @@ -87,8 +87,17 @@ def __init__( # Read features yaml file self.template_features_yml = load_features_yaml() + # Set fields used by the class methods + self.no_git = no_git + self.default_branch = default_branch + self.is_interactive = is_interactive + if self.config.outdir is None: self.config.outdir = str(Path.cwd()) + + # Get the default branch name from the Git configuration + self.get_default_branch() + self.jinja_params, self.skip_areas = self.obtain_jinja_params_dict( self.config.skip_features or [], str(self.config.outdir) ) @@ -107,11 +116,6 @@ def __init__( # Set convenience variables self.name = self.config.name - - # Set fields used by the class methods - self.no_git = no_git - self.default_branch = default_branch - self.is_interactive = is_interactive self.force = self.config.force if self.config.outdir == ".": @@ -233,6 +237,7 @@ def obtain_jinja_params_dict( jinja_params["name_docker"] = jinja_params["name"].replace(jinja_params["org"], jinja_params["prefix_nodash"]) jinja_params["logo_light"] = f"{jinja_params['name_noslash']}_logo_light.png" jinja_params["logo_dark"] = f"{jinja_params['name_noslash']}_logo_dark.png" + jinja_params["default_branch"] = self.default_branch if config_yml is not None: if ( hasattr(config_yml, "lint") @@ -254,12 +259,21 @@ def obtain_jinja_params_dict( def init_pipeline(self): """Creates the nf-core pipeline.""" + # Make the new pipeline self.render_template() # Init the git repository and make the first commit if not self.no_git: self.git_init_pipeline() + # Run prettier on files + if self.config.skip_features is None or not ( + "code_linters" in self.config.skip_features or "github" in self.config.skip_features + ): + current_dir = Path.cwd() + os.chdir(self.outdir) + run_prettier_on_file([str(f) for f in self.outdir.glob("**/*")]) + os.chdir(current_dir) if self.config.is_nfcore and not self.is_interactive: log.info( @@ -292,7 +306,6 @@ def render_template(self) -> None: template_dir = Path(nf_core.__file__).parent / "pipeline-template" object_attrs = self.jinja_params object_attrs["nf_core_version"] = nf_core.__version__ - # Can't use glob.glob() as need recursive hidden dotfiles - https://stackoverflow.com/a/58126417/713980 template_files = list(Path(template_dir).glob("**/*")) template_files += list(Path(template_dir).glob("*")) @@ -357,7 +370,7 @@ def render_template(self) -> None: # Make a logo and save it, if it is a nf-core pipeline self.make_pipeline_logo() - if self.config.skip_features is None or "ro-crate" not in self.config.skip_features: + if self.config.skip_features is None or "rocrate" not in self.config.skip_features: # Create the RO-Crate metadata file rocrate_obj = ROCrate(self.outdir) rocrate_obj.create_rocrate(json_path=self.outdir / "ro-crate-metadata.json") @@ -370,12 +383,9 @@ def render_template(self) -> None: if config_fn is not None and config_yml is not None: with open(str(config_fn), "w") as fh: config_yml.template = NFCoreTemplateConfig(**self.config.model_dump(exclude_none=True)) - yaml.safe_dump(config_yml.model_dump(exclude_none=True), fh) + yaml.dump(config_yml.model_dump(exclude_none=True), fh, Dumper=custom_yaml_dumper()) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") - # Run prettier on files - run_prettier_on_file([str(f) for f in self.outdir.glob("**/*")]) - def fix_linting(self): """ Updates the .nf-core.yml with linting configurations @@ -403,7 +413,13 @@ def fix_linting(self): if config_fn is not None and nf_core_yml is not None: nf_core_yml.lint = NFCoreYamlLintConfig(**lint_config) with open(self.outdir / config_fn, "w") as fh: - yaml.dump(nf_core_yml.model_dump(exclude_none=True), fh, default_flow_style=False, sort_keys=False) + yaml.dump( + nf_core_yml.model_dump(exclude_none=True), + fh, + sort_keys=False, + default_flow_style=False, + Dumper=custom_yaml_dumper(), + ) def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" @@ -421,20 +437,18 @@ def make_pipeline_logo(self): force=bool(self.force), ) - def git_init_pipeline(self) -> None: - """Initialises the new pipeline as a Git repository and submits first commit. - - Raises: - UserWarning: if Git default branch is set to 'dev' or 'TEMPLATE'. - """ - default_branch: Optional[str] = self.default_branch + def get_default_branch(self) -> None: + """Gets the default branch name from the Git configuration.""" try: - default_branch = default_branch or str(git.config.GitConfigParser().get_value("init", "defaultBranch")) + self.default_branch = ( + str(git.config.GitConfigParser().get_value("init", "defaultBranch")) or "master" + ) # default to master + log.debug(f"Default branch name: {self.default_branch}") except configparser.Error: log.debug("Could not read init.defaultBranch") - if default_branch in ["dev", "TEMPLATE"]: + if self.default_branch in ["dev", "TEMPLATE"]: raise UserWarning( - f"Your Git defaultBranch '{default_branch}' is incompatible with nf-core.\n" + f"Your Git defaultBranch '{self.default_branch}' is incompatible with nf-core.\n" "'dev' and 'TEMPLATE' can not be used as default branch name.\n" "Set the default branch name with " "[white on grey23] git config --global init.defaultBranch [/]\n" @@ -442,12 +456,19 @@ def git_init_pipeline(self) -> None: "Pipeline git repository will not be initialised." ) + def git_init_pipeline(self) -> None: + """Initialises the new pipeline as a Git repository and submits first commit. + + Raises: + UserWarning: if Git default branch is set to 'dev' or 'TEMPLATE'. + """ + log.info("Initialising local pipeline git repository") repo = git.Repo.init(self.outdir) repo.git.add(A=True) repo.index.commit(f"initial template build from nf-core/tools, version {nf_core.__version__}") - if default_branch: - repo.active_branch.rename(default_branch) + if self.default_branch: + repo.active_branch.rename(self.default_branch) try: repo.git.branch("TEMPLATE") repo.git.branch("dev") diff --git a/nf_core/pipelines/create/template_features.yml b/nf_core/pipelines/create/template_features.yml index 9841879e83..fa24debffe 100644 --- a/nf_core/pipelines/create/template_features.yml +++ b/nf_core/pipelines/create/template_features.yml @@ -148,6 +148,10 @@ is_nfcore: - "docs/images/nf-core-{{short_name}}_logo_light.png" - "docs/images/nf-core-{{short_name}}_logo_dark.png" - ".github/ISSUE_TEMPLATE/bug_report.yml" + - ".github/CONTRIBUTING.md" + - ".github/PULL_REQUEST_TEMPLATE.md" + - "assets/email_template.txt" + - "docs/README.md" nextflow_config: - "manifest.name" - "manifest.homePage" @@ -445,6 +449,8 @@ rocrate: linting: files_warn: - "ro-crate-metadata.json" + files_unchanged: + - ".prettierignore" vscode: skippable_paths: - ".vscode" diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index d37dce86d1..11adebce2c 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1744,7 +1744,7 @@ def tidy_tags_and_branches(self): for tag in tags_to_remove: self.repo.delete_tag(tag) - # switch to a revision that should be kept, because deleting heads fails, if they are checked out (e.g. "master") + # switch to a revision that should be kept, because deleting heads fails, if they are checked out (e.g. "main") self.checkout(self.revision[0]) # delete unwanted heads/branches from repository diff --git a/nf_core/pipelines/lint/actions_awsfulltest.py b/nf_core/pipelines/lint/actions_awsfulltest.py index 7ea167f6c9..080ae3583e 100644 --- a/nf_core/pipelines/lint/actions_awsfulltest.py +++ b/nf_core/pipelines/lint/actions_awsfulltest.py @@ -42,7 +42,7 @@ def actions_awsfulltest(self) -> Dict[str, List[str]]: # Check that the action is only turned on for published releases try: - if wf[True]["pull_request"]["branches"] != ["master"]: + if wf[True]["pull_request"]["branches"] != ["main", "master"]: raise AssertionError() if wf[True]["pull_request_review"]["types"] != ["submitted"]: raise AssertionError() diff --git a/nf_core/pipelines/lint/version_consistency.py b/nf_core/pipelines/lint/version_consistency.py index 5fe24ed723..2f9cead83c 100644 --- a/nf_core/pipelines/lint/version_consistency.py +++ b/nf_core/pipelines/lint/version_consistency.py @@ -5,7 +5,7 @@ def version_consistency(self): """Pipeline and container version number consistency. .. note:: This test only runs when the ``--release`` flag is set for ``nf-core pipelines lint``, - or ``$GITHUB_REF`` is equal to ``master``. + or ``$GITHUB_REF`` is equal to ``main``. This lint fetches the pipeline version number from three possible locations: diff --git a/nf_core/pipelines/lint_utils.py b/nf_core/pipelines/lint_utils.py index b4c56c6007..a6b98b1899 100644 --- a/nf_core/pipelines/lint_utils.py +++ b/nf_core/pipelines/lint_utils.py @@ -97,7 +97,7 @@ def run_prettier_on_file(file: Union[Path, str, List[str]]) -> None: all_lines = [line for line in e.stdout.decode().split("\n")] files = "\n".join(all_lines[3:]) log.debug(f"The following files were modified by prettier:\n {files}") - elif e.stderr.decode(): + else: log.warning( "There was an error running the prettier pre-commit hook.\n" f"STDOUT: {e.stdout.decode()}\nSTDERR: {e.stderr.decode()}" diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index 915f203f00..bc868273c4 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -216,6 +216,9 @@ def set_main_entity(self, main_entity_filename: str): ) self.crate.mainEntity.append_to("version", self.version, compact=True) + # remove duplicate entries for version + self.crate.mainEntity["version"] = list(set(self.crate.mainEntity["version"])) + # get keywords from nf-core website remote_workflows = requests.get("https://nf-co.re/pipelines.json").json()["remote_workflows"] # go through all remote workflows and find the one that matches the pipeline name @@ -236,6 +239,9 @@ def set_main_entity(self, main_entity_filename: str): self.crate.mainEntity.append_to("license", self.crate.license) self.crate.mainEntity.append_to("name", self.crate.name) + # remove duplicate entries for name + self.crate.mainEntity["name"] = list(set(self.crate.mainEntity["name"])) + if "dev" in self.version: self.crate.creativeWorkStatus = "InProgress" else: @@ -261,14 +267,26 @@ def add_main_authors(self, wf_file: rocrate.model.entity.Entity) -> None: # add author entity to crate try: - authors = self.pipeline_obj.nf_config["manifest.author"].split(",") - # remove spaces - authors = [a.strip() for a in authors] + authors = [] + if "manifest.author" in self.pipeline_obj.nf_config: + authors.extend([a.strip() for a in self.pipeline_obj.nf_config["manifest.author"].split(",")]) + if "manifest.contributor" in self.pipeline_obj.nf_config: + authors.extend( + [ + c.get("name", "").strip() + for c in self.pipeline_obj.nf_config["manifest.contributor"] + if "name" in c + ] + ) + if not authors: + raise KeyError("No authors found") # add manifest authors as maintainer to crate except KeyError: - log.error("No author field found in manifest of nextflow.config") + log.error("No author or contributor fields found in manifest of nextflow.config") return + # remove duplicates + authors = list(set(authors)) # look at git contributors for author names try: git_contributors: Set[str] = set() @@ -330,6 +348,25 @@ def add_main_authors(self, wf_file: rocrate.model.entity.Entity) -> None: if author in authors: wf_file.append_to("maintainer", author_entitity) + def update_rocrate(self) -> bool: + """ + Update the rocrate file + """ + # check if we need to output a json file and/or a zip file based on the file extensions + # try to find a json file + json_path: Optional[Path] = None + potential_json_path = Path(self.pipeline_dir, "ro-crate-metadata.json") + if potential_json_path.exists(): + json_path = potential_json_path + + # try to find a zip file + zip_path: Optional[Path] = None + potential_zip_path = Path(self.pipeline_dir, "ro-crate.crate.zip") + if potential_zip_path.exists(): + zip_path = potential_zip_path + + return self.create_rocrate(json_path=json_path, zip_path=zip_path) + def get_orcid(name: str) -> Optional[str]: """ diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 61fd6bc2d7..a08dd0a2d0 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -517,11 +517,13 @@ def validate_schema_title_description(self, schema=None): if "title" not in self.schema: raise AssertionError("Schema missing top-level `title` attribute") # Validate that id, title and description match the pipeline manifest - id_attr = "https://raw.githubusercontent.com/{}/master/nextflow_schema.json".format( + id_attr = "https://raw.githubusercontent.com/{}/main/nextflow_schema.json".format( self.pipeline_manifest["name"].strip("\"'") ) - if self.schema["$id"] != id_attr: - raise AssertionError(f"Schema `$id` should be `{id_attr}`\n Found `{self.schema['$id']}`") + if self.schema["$id"] not in [id_attr, id_attr.replace("/main/", "/master/")]: + raise AssertionError( + f"Schema `$id` should be `{id_attr}` or {id_attr.replace('/main/', '/master/')}. \n Found `{self.schema['$id']}`" + ) title_attr = "{} pipeline parameters".format(self.pipeline_manifest["name"].strip("\"'")) if self.schema["title"] != title_attr: diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index 7ca825f04f..8e9e62430a 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -50,7 +50,7 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): subworkflow.warned.append( ( "test_dir_exists", - "nf-test directory is missing", + "Migrate pytest-workflow to nf-test", subworkflow.nftest_testdir, ) ) diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index efb7a8e03e..43f9b8046e 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -9,10 +9,7 @@ import git from git.exc import GitCommandError -from nf_core.components.constants import ( - NF_CORE_MODULES_NAME, - NF_CORE_MODULES_REMOTE, -) +from nf_core.components.constants import NF_CORE_MODULES_DEFAULT_BRANCH, NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.utils import load_tools_config log = logging.getLogger(__name__) @@ -186,7 +183,7 @@ def setup_branch(self, branch): if branch is None: # Don't bother fetching default branch if we're using nf-core if self.remote_url == NF_CORE_MODULES_REMOTE: - self.branch = "master" + self.branch = NF_CORE_MODULES_DEFAULT_BRANCH else: self.branch = self.get_default_branch() else: diff --git a/nf_core/utils.py b/nf_core/utils.py index 30b0743493..dc208c0a78 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1083,7 +1083,7 @@ def get_repo_releases_branches(pipeline, wfs): raise AssertionError(f"Not able to find pipeline '{pipeline}'") # Get branch information from github api - should be no need to check if the repo exists again - branch_response = gh_api.safe_get(f"https://api.github.com/repos/{pipeline}/branches") + branch_response = gh_api.safe_get(f"https://api.github.com/repos/{pipeline}/branches?per_page=100") for branch in branch_response.json(): if ( branch["name"] != "TEMPLATE" diff --git a/requirements-dev.txt b/requirements-dev.txt index aab9b1e5d7..04c6372d72 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,7 +6,7 @@ responses ruff Sphinx sphinx-rtd-theme -textual-dev==1.6.1 +textual-dev==1.5.1 types-PyYAML types-requests types-jsonschema diff --git a/setup.py b/setup.py index 11b3022494..b5c5de9a4f 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "3.0.3dev" +version = "3.1.1dev" with open("README.md") as f: readme = f.read() diff --git a/tests/pipelines/lint/test_actions_awstest.py b/tests/pipelines/lint/test_actions_awstest.py index 51b55cb867..01dc9f6168 100644 --- a/tests/pipelines/lint/test_actions_awstest.py +++ b/tests/pipelines/lint/test_actions_awstest.py @@ -24,7 +24,7 @@ def test_actions_awstest_fail(self): new_pipeline = self._make_pipeline_copy() with open(Path(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: awstest_yml = yaml.safe_load(fh) - awstest_yml[True]["push"] = ["master"] + awstest_yml[True]["push"] = ["main"] with open(Path(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: yaml.dump(awstest_yml, fh) diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index ca7353d50d..f33ac3a2ee 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -25,7 +25,7 @@ def setUp(self) -> None: ########################## class TestPipelinesLint(TestLint): def test_run_linting_function(self): - """Run the master run_linting() function in lint.py + """Run the run_linting() function in lint.py We don't really check any of this code as it's just a series of function calls and we're testing each of those individually. This is mostly to check for syntax errors.""" diff --git a/tests/pipelines/test_rocrate.py b/tests/pipelines/test_rocrate.py index 01a77ecd76..ac86e64bdf 100644 --- a/tests/pipelines/test_rocrate.py +++ b/tests/pipelines/test_rocrate.py @@ -1,5 +1,6 @@ """Test the nf-core pipelines rocrate command""" +import json import shutil import tempfile from pathlib import Path @@ -12,6 +13,7 @@ import nf_core.pipelines.create.create import nf_core.pipelines.rocrate import nf_core.utils +from nf_core.pipelines.bump_version import bump_pipeline_version from ..test_pipelines import TestPipelines @@ -125,3 +127,36 @@ def test_rocrate_creation_for_fetchngs(self): # Clean up shutil.rmtree(tmp_dir) + + def test_update_rocrate(self): + """Run the nf-core rocrate command with a zip output""" + + assert self.rocrate_obj.create_rocrate(json_path=self.pipeline_dir, zip_path=self.pipeline_dir) + + # read the crate json file + with open(Path(self.pipeline_dir, "ro-crate-metadata.json")) as f: + crate = json.load(f) + + # check the old version + self.assertEqual(crate["@graph"][2]["version"][0], "1.0.0dev") + # check creativeWorkStatus is InProgress + self.assertEqual(crate["@graph"][0]["creativeWorkStatus"], "InProgress") + + # bump version + bump_pipeline_version(self.pipeline_obj, "1.1.0") + + # Check that the crate was created + self.assertTrue(Path(self.pipeline_dir, "ro-crate.crate.zip").exists()) + + # Check that the crate was updated + self.assertTrue(Path(self.pipeline_dir, "ro-crate-metadata.json").exists()) + + # read the crate json file + with open(Path(self.pipeline_dir, "ro-crate-metadata.json")) as f: + crate = json.load(f) + + # check that the version was updated + self.assertEqual(crate["@graph"][2]["version"][0], "1.1.0") + + # check creativeWorkStatus is Stable + self.assertEqual(crate["@graph"][0]["creativeWorkStatus"], "Stable") diff --git a/tests/pipelines/test_sync.py b/tests/pipelines/test_sync.py index 8bf8a3c4ec..9959975e27 100644 --- a/tests/pipelines/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -43,14 +43,14 @@ def mocked_requests_get(url) -> MockResponse: { "state": "closed", "head": {"ref": "nf-core-template-merge-2"}, - "base": {"ref": "master"}, + "base": {"ref": "main"}, "html_url": "pr_url", } ] + [ { "state": "open", "head": {"ref": f"nf-core-template-merge-{branch_no}"}, - "base": {"ref": "master"}, + "base": {"ref": "main"}, "html_url": "pr_url", } for branch_no in range(3, 7) @@ -345,7 +345,7 @@ def test_close_open_pr(self, mock_patch, mock_post) -> None: pr: Dict[str, Union[str, Dict[str, str]]] = { "state": "open", "head": {"ref": "nf-core-template-merge-3"}, - "base": {"ref": "master"}, + "base": {"ref": "main"}, "html_url": "pr_html_url", "url": "url_to_update_pr", "comments_url": "pr_comments_url", @@ -368,7 +368,7 @@ def test_close_open_pr_fail(self, mock_patch, mock_post): pr = { "state": "open", "head": {"ref": "nf-core-template-merge-3"}, - "base": {"ref": "master"}, + "base": {"ref": "main"}, "html_url": "pr_html_url", "url": "bad_url_to_update_pr", "comments_url": "pr_comments_url", diff --git a/tests/utils.py b/tests/utils.py index cffe8ba103..1aa3750d8e 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -14,7 +14,7 @@ import nf_core.modules import nf_core.pipelines.create.create from nf_core import __version__ -from nf_core.utils import NFCoreTemplateConfig, NFCoreYamlConfig +from nf_core.utils import NFCoreTemplateConfig, NFCoreYamlConfig, custom_yaml_dumper TEST_DATA_DIR = Path(__file__).parent / "data" OLD_TRIMGALORE_SHA = "9b7a3bdefeaad5d42324aa7dd50f87bea1b04386" @@ -138,7 +138,7 @@ def create_tmp_pipeline(no_git: bool = False) -> Tuple[Path, Path, str, Path]: bump_version=None, ) with open(str(Path(pipeline_dir, ".nf-core.yml")), "w") as fh: - yaml.dump(nf_core_yml.model_dump(), fh) + yaml.dump(nf_core_yml.model_dump(), fh, Dumper=custom_yaml_dumper()) nf_core.pipelines.create.create.PipelineCreate( pipeline_name, "it is mine", "me", no_git=no_git, outdir=pipeline_dir, force=True From ec41bf2e6b8aa8cc5a01bcd5c6c3d04885d967f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Cavalcante?= Date: Mon, 16 Dec 2024 11:24:32 -0300 Subject: [PATCH 60/60] test: Use correct org name in remove (#23) --- tests/subworkflows/test_remove.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/subworkflows/test_remove.py b/tests/subworkflows/test_remove.py index a4ff6a76ee..a0ba6b525a 100644 --- a/tests/subworkflows/test_remove.py +++ b/tests/subworkflows/test_remove.py @@ -106,9 +106,9 @@ def test_subworkflows_remove_subworkflow_keep_installed_cross_org_module(self): self.subworkflow_install_cross_org.install("fastq_trim_fastp_fastqc") self.mods_install.install("fastqc") - subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "jvfe") + subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core-test") fastq_trim_fastp_fastqc_path = Path(subworkflow_path, "fastq_trim_fastp_fastqc") - fastqc_path = Path(self.subworkflow_install.directory, "modules", "jvfe", "fastqc") + fastqc_path = Path(self.subworkflow_install.directory, "modules", "nf-core-test", "fastqc") nfcore_fastqc_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "fastqc") mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json()