diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index a90c06f9a9..75da414db6 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -92,7 +92,7 @@ pre-commit run --all ## API Documentation We aim to write function docstrings according to the [Google Python style-guide](https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings). These are used to automatically generate package documentation on the nf-core website using Sphinx. -You can find this documentation here: [https://nf-co.re/tools-docs/](https://nf-co.re/tools-docs/) +You can find this documentation here: [https://nf-co.re/tools/docs/](https://nf-co.re/tools/docs/) If you would like to test the documentation, you can install Sphinx locally by following Sphinx's [installation instruction](https://www.sphinx-doc.org/en/master/usage/installation.html). Once done, you can run `make clean` and then `make html` in the `docs/api` directory of `nf-core tools`. diff --git a/.github/renovate.json5 b/.github/renovate.json5 new file mode 100644 index 0000000000..f9b377c615 --- /dev/null +++ b/.github/renovate.json5 @@ -0,0 +1,5 @@ +{ + $schema: "https://docs.renovatebot.com/renovate-schema.json", + extends: ["github>nf-core/ops//.github/renovate/default.json5"], + baseBranches: ["dev"], +} diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 8f6836f309..57dbe86d65 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -17,7 +17,7 @@ env: jobs: MakeTestWorkflow: - runs-on: ubuntu-latest + runs-on: self-hosted env: NXF_ANSI_LOG: false strategy: @@ -27,7 +27,7 @@ jobs: - "latest-everything" steps: # Get the repo code - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 name: Check out source-code repository # Set up nf-core/tools @@ -48,84 +48,101 @@ jobs: version: ${{ matrix.NXF_VER }} # Install the Prettier linting tools - - uses: actions/setup-node@v3 - - - name: Install Prettier - run: npm install -g prettier + - uses: actions/setup-node@v4 + with: + node-version: "20" - # Install the editorconfig linting tools - - name: Install editorconfig-checker - run: npm install -g editorconfig-checker + - name: Install Prettier and editorconfig-checker + run: npm install -g prettier editorconfig-checker # Build a pipeline from the template - name: nf-core create - run: nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain + run: | + mkdir create-lint-wf && cd create-lint-wf + export NXF_WORK=$(pwd) + nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain # Try syncing it before we change anything - name: nf-core sync run: nf-core --log-file log.txt sync --dir nf-core-testpipeline/ + working-directory: create-lint-wf # Build a module from the template - name: nf-core modules create run: nf-core --log-file log.txt modules create bpipe --dir nf-core-testpipeline --author @nf-core-bot --label process_low --meta + working-directory: create-lint-wf # Run code style linting - name: Run Prettier --check - run: prettier --check nf-core-testpipeline + run: prettier --check create-lint-wf/nf-core-testpipeline - name: Run ECLint check run: editorconfig-checker -exclude README.md $(find nf-core-testpipeline/.* -type f | grep -v '.git\|.py\|md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile') + working-directory: create-lint-wf # Update modules to the latest version - name: nf-core modules update run: nf-core --log-file log.txt modules update --dir nf-core-testpipeline --all --no-preview + working-directory: create-lint-wf # Remove TODO statements - name: remove TODO run: find nf-core-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; + working-directory: create-lint-wf # Replace zenodo.XXXXXX to pass readme linting - name: replace zenodo.XXXXXX run: find nf-core-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; + working-directory: create-lint-wf # Run nf-core linting - name: nf-core lint run: nf-core --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned + working-directory: create-lint-wf # Run the other nf-core commands - name: nf-core list run: nf-core --log-file log.txt list + working-directory: create-lint-wf # - name: nf-core licences # run: nf-core --log-file log.txt licences nf-core-testpipeline - name: nf-core schema run: nf-core --log-file log.txt schema build --dir nf-core-testpipeline/ --no-prompts + working-directory: create-lint-wf - name: nf-core bump-version run: nf-core --log-file log.txt bump-version --dir nf-core-testpipeline/ 1.1 + working-directory: create-lint-wf - name: nf-core lint in release mode run: nf-core --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned --release + working-directory: create-lint-wf - name: nf-core modules install run: nf-core --log-file log.txt modules install fastqc --dir nf-core-testpipeline/ --force + working-directory: create-lint-wf - name: nf-core modules install gitlab run: nf-core --log-file log.txt modules --git-remote https://gitlab.com/nf-core/modules-test.git --branch branch-tester install fastp --dir nf-core-testpipeline/ + working-directory: create-lint-wf - name: nf-core modules list local run: nf-core --log-file log.txt modules list local --dir nf-core-testpipeline/ + working-directory: create-lint-wf - name: nf-core modules list remote run: nf-core --log-file log.txt modules list remote + working-directory: create-lint-wf - name: nf-core modules list remote gitlab run: nf-core --log-file log.txt modules --git-remote https://gitlab.com/nf-core/modules-test.git list remote + working-directory: create-lint-wf - name: Upload log file artifact if: ${{ always() }} uses: actions/upload-artifact@v3 with: name: nf-core-log-file - path: log.txt + path: create-lint-wf/log.txt diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 302399b3dc..37cbf65c7d 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -16,6 +16,7 @@ concurrency: env: NXF_ANSI_LOG: false + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} jobs: RunTestWorkflow: @@ -32,7 +33,7 @@ jobs: - "template_skip_nf_core_configs.yml" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 name: Check out source-code repository - name: Set up Python 3.11 @@ -51,7 +52,9 @@ jobs: version: latest-everything # Install the Prettier linting tools - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 + with: + node-version: "20" - name: Install Prettier run: npm install -g prettier @@ -63,75 +66,86 @@ jobs: # Create template files - name: Create template skip all (except github) run: | - printf "prefix: my-prefix\nskip: ['ci', 'github_badges', 'igenomes', 'nf_core_configs']" > template_skip_all.yml + mkdir create-test-lint-wf + export NXF_WORK=$(pwd) + printf "prefix: my-prefix\nskip: ['ci', 'github_badges', 'igenomes', 'nf_core_configs']" > create-test-lint-wf/template_skip_all.yml - name: Create template skip github_badges run: | - printf "prefix: my-prefix\nskip: github_badges" > template_skip_github_badges.yml + printf "prefix: my-prefix\nskip: github_badges" > create-test-lint-wf/template_skip_github_badges.yml - name: Create template skip igenomes run: | - printf "prefix: my-prefix\nskip: igenomes" > template_skip_igenomes.yml + printf "prefix: my-prefix\nskip: igenomes" > create-test-lint-wf/template_skip_igenomes.yml - name: Create template skip ci run: | - printf "prefix: my-prefix\nskip: ci" > template_skip_ci.yml + printf "prefix: my-prefix\nskip: ci" > create-test-lint-wf/template_skip_ci.yml - name: Create template skip nf_core_configs run: | - printf "prefix: my-prefix\nskip: nf_core_configs" > template_skip_nf_core_configs.yml + printf "prefix: my-prefix\nskip: nf_core_configs" > create-test-lint-wf/template_skip_nf_core_configs.yml # Create a pipeline from the template - name: create a pipeline from the template ${{ matrix.TEMPLATE }} run: | + cd create-test-lint-wf nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml ${{ matrix.TEMPLATE }} - name: run the pipeline run: | + cd create-test-lint-wf nextflow run my-prefix-testpipeline -profile test,docker --outdir ./results # Remove results folder before linting - name: remove results folder run: | - rm -rf ./results + rm -rf create-test-lint-wf/results # Try syncing it before we change anything - name: nf-core sync - run: nf-core --log-file log.txt sync --dir my-prefix-testpipeline/ + run: nf-core --log-file log.txt sync --dir create-test-lint-wf/my-prefix-testpipeline/ # Run code style linting - name: Run Prettier --check - run: prettier --check my-prefix-testpipeline + run: prettier --check create-test-lint-wf/my-prefix-testpipeline - name: Run ECLint check run: editorconfig-checker -exclude README.md $(find my-prefix-testpipeline/.* -type f | grep -v '.git\|.py\|md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile') + working-directory: create-test-lint-wf # Remove TODO statements - name: remove TODO run: find my-prefix-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; + working-directory: create-test-lint-wf # Replace zenodo.XXXXXX to pass readme linting - name: replace zenodo.XXXXXX run: find my-prefix-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; + working-directory: create-test-lint-wf # Run nf-core linting - name: nf-core lint run: nf-core --log-file log.txt --hide-progress lint --dir my-prefix-testpipeline --fail-warned + working-directory: create-test-lint-wf # Run bump-version - name: nf-core bump-version run: nf-core --log-file log.txt bump-version --dir my-prefix-testpipeline/ 1.1 + working-directory: create-test-lint-wf # Run nf-core linting in release mode - name: nf-core lint in release mode run: nf-core --log-file log.txt --hide-progress lint --dir my-prefix-testpipeline --fail-warned --release + working-directory: create-test-lint-wf - name: Tar files run: tar -cvf artifact_files.tar log.txt template_skip*.yml + working-directory: create-test-lint-wf - name: Upload log file artifact if: ${{ always() }} uses: actions/upload-artifact@v3 with: name: nf-core-log-file - path: artifact_files.tar + path: create-test-lint-wf/artifact_files.tar diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 5faa59772c..026b0a889b 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -26,7 +26,7 @@ jobs: - "23.04.0" - "latest-everything" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 name: Check out source-code repository - name: Set up Python 3.11 @@ -46,6 +46,8 @@ jobs: - name: Run nf-core/tools run: | + mkdir create-test-wf && cd create-test-wf + export NXF_WORK=$(pwd) nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain nextflow run nf-core-testpipeline -profile test,docker --outdir ./results @@ -54,4 +56,4 @@ jobs: uses: actions/upload-artifact@v3 with: name: nf-core-log-file - path: log.txt + path: create-test-wf/log.txt diff --git a/.github/workflows/deploy-pypi.yml b/.github/workflows/deploy-pypi.yml index 1f539fe09a..62c53508d8 100644 --- a/.github/workflows/deploy-pypi.yml +++ b/.github/workflows/deploy-pypi.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 name: Check out source-code repository - name: Set up Python 3.11 diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml index ed2314046a..4184bc5e59 100644 --- a/.github/workflows/fix-linting.yml +++ b/.github/workflows/fix-linting.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: # Use the @nf-core-bot token to check out so we can push later - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: token: ${{ secrets.nf_core_bot_auth_token }} @@ -24,7 +24,9 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 + with: + node-version: "20" - name: Install Prettier run: npm install -g prettier @prettier/plugin-php diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml index 045f393f42..23972c56f6 100644 --- a/.github/workflows/lint-code.yml +++ b/.github/workflows/lint-code.yml @@ -14,11 +14,13 @@ concurrency: jobs: EditorConfig: - runs-on: ubuntu-latest + runs-on: ["self-hosted"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 + with: + node-version: "20" - name: Install editorconfig-checker run: npm install -g editorconfig-checker @@ -28,11 +30,13 @@ jobs: run: editorconfig-checker -exclude README.md $(git ls-files | grep -v 'test\|.py\|md\|json\|yml\|yaml\|html\|css\|Makefile') Prettier: - runs-on: ubuntu-latest + runs-on: ["self-hosted"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 + with: + node-version: "20" - name: Install Prettier run: npm install -g prettier @@ -41,9 +45,9 @@ jobs: run: prettier --check ${GITHUB_WORKSPACE} PythonBlack: - runs-on: ubuntu-latest + runs-on: ["self-hosted"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Check code lints with Black uses: psf/black@stable @@ -71,10 +75,10 @@ jobs: allow-repeats: false isort: - runs-on: ubuntu-latest + runs-on: ["self-hosted"] steps: - name: Check out source-code repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python 3.11 uses: actions/setup-python@v4 @@ -85,3 +89,43 @@ jobs: with: isortVersion: "latest" requirementsFiles: "requirements.txt requirements-dev.txt" + + static-type-check: + runs-on: ["self-hosted"] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v4 + with: + python-version: 3.11 + cache: "pip" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip -r requirements-dev.txt + pip install -e . + + - name: Cache nf-test installation + id: cache-software + uses: actions/cache@v3 + with: + path: | + /usr/local/bin/nf-test + /home/runner/.nf-test/nf-test.jar + key: ${{ runner.os }}-${{ env.NFTEST_VER }}-nftest + + - name: Install nf-test + if: steps.cache-software.outputs.cache-hit != 'true' + run: | + wget -qO- https://code.askimed.com/install/nf-test | bash + sudo mv nf-test /usr/local/bin/ + + - name: Get Python changed files + id: changed-py-files + uses: tj-actions/changed-files@v23 + with: + files: | + *.py + **/*.py + - name: Run if any of the listed files above is changed + if: steps.changed-py-files.outputs.any_changed == 'true' + run: mypy ${{ steps.changed-py-files.outputs.all_changed_files }} diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml index dea28cdd35..1230bfc9d3 100644 --- a/.github/workflows/push_dockerhub_dev.yml +++ b/.github/workflows/push_dockerhub_dev.yml @@ -13,7 +13,7 @@ concurrency: jobs: push_dockerhub: name: Push new Docker image to Docker Hub (dev) - runs-on: ubuntu-latest + runs-on: self-hosted # Only run for the nf-core repo, for releases and merged PRs if: ${{ github.repository == 'nf-core/tools' }} env: @@ -23,7 +23,7 @@ jobs: fail-fast: false steps: - name: Check out code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Build nfcore/tools:dev docker image run: docker build --no-cache . -t nfcore/tools:dev diff --git a/.github/workflows/push_dockerhub_release.yml b/.github/workflows/push_dockerhub_release.yml index 857b241022..49ce17dd84 100644 --- a/.github/workflows/push_dockerhub_release.yml +++ b/.github/workflows/push_dockerhub_release.yml @@ -23,7 +23,7 @@ jobs: fail-fast: false steps: - name: Check out code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Build nfcore/tools:latest docker image run: docker build --no-cache . -t nfcore/tools:latest diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 01067848cf..b6f3592165 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -21,46 +21,80 @@ concurrency: cancel-in-progress: true env: - GITHUB_TOKEN: ${{ github.token }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} jobs: - pytest: - runs-on: ${{ matrix.runner }} + setup: + runs-on: ["ubuntu-latest"] strategy: matrix: python-version: ["3.8", "3.11"] runner: ["ubuntu-latest"] include: - - runner: "ubuntu-20.04" - python-version: "3.8" + - python-version: "3.8" + runner: "ubuntu-20.04" steps: - - uses: actions/checkout@v3 + - name: Check conditions + id: conditions + run: echo "run-tests=${{ github.ref == 'refs/heads/master' || (matrix.runner == 'ubuntu-20.04' && matrix.python-version == '3.8') }}" >> "$GITHUB_OUTPUT" + + outputs: + python-version: ${{ matrix.python-version }} + runner: ${{ matrix.runner }} + run-tests: ${{ steps.conditions.outputs.run-tests }} + + test: + name: Test with Python ${{ needs.setup.outputs.python-version }} on ${{ needs.setup.outputs.runner }} + needs: setup + if: ${{ needs.setup.outputs.run-tests}} + runs-on: ${{ needs.setup.outputs.runner }} + steps: + - uses: actions/checkout@v2 name: Check out source-code repository - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + - name: Set up Python ${{ needs.setup.outputs.python-version }} + uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: ${{ needs.setup.outputs.python-version }} + cache: "pip" - - name: Install python dependencies + - name: Install dependencies run: | python -m pip install --upgrade pip -r requirements-dev.txt pip install -e . - name: Downgrade git to the Ubuntu official repository's version - if: ${{ matrix.runner == 'ubuntu-20.04' && matrix.python-version == '3.8' }} + if: ${{ needs.setup.outputs.runner == 'ubuntu-20.04' && needs.setup.outputs.python-version == '3.8' }} run: | sudo apt update - sudo apt remove git git-man + sudo apt remove -y git git-man sudo add-apt-repository --remove ppa:git-core/ppa - sudo apt install git + sudo apt install -y git + - name: Get current date + id: date + run: echo "date=$(date +'%Y-%m')" >> $GITHUB_ENV - name: Install Nextflow uses: nf-core/setup-nextflow@v1 with: version: "latest-everything" + - name: Cache nf-test installation + id: cache-software + uses: actions/cache@v3 + with: + path: | + /usr/local/bin/nf-test + /home/runner/.nf-test/nf-test.jar + key: ${{ runner.os }}-nftest-${{ env.date }} + + - name: Install nf-test + if: steps.cache-software.outputs.cache-hit != 'true' + run: | + wget -qO- https://code.askimed.com/install/nf-test | bash + sudo mv nf-test /usr/local/bin/ + - name: Test with pytest run: python3 -m pytest tests/ --color=yes --cov-report=xml --cov-config=.github/.coveragerc --cov=nf_core diff --git a/.github/workflows/rich-codex.yml b/.github/workflows/rich-codex.yml index 54aaf240df..8368255390 100644 --- a/.github/workflows/rich-codex.yml +++ b/.github/workflows/rich-codex.yml @@ -6,7 +6,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out the repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 with: diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index fbbdacc8ab..94f8ee54e2 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -31,15 +31,15 @@ jobs: sync: needs: get-pipelines - runs-on: ubuntu-latest + runs-on: self-hosted strategy: matrix: ${{fromJson(needs.get-pipelines.outputs.matrix)}} fail-fast: false steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 name: Check out nf-core/tools - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 name: Check out nf-core/${{ matrix.pipeline }} with: repository: nf-core/${{ matrix.pipeline }} diff --git a/.github/workflows/tools-api-docs-dev.yml b/.github/workflows/tools-api-docs-dev.yml index add939aba1..51c25fa250 100644 --- a/.github/workflows/tools-api-docs-dev.yml +++ b/.github/workflows/tools-api-docs-dev.yml @@ -20,11 +20,11 @@ concurrency: jobs: api-docs: name: Build & push Sphinx API docs - runs-on: ubuntu-latest + runs-on: self-hosted steps: - name: Check out source-code repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python 3.11 uses: actions/setup-python@v4 diff --git a/.github/workflows/tools-api-docs-release.yml b/.github/workflows/tools-api-docs-release.yml index f049d74ca0..b0869190d9 100644 --- a/.github/workflows/tools-api-docs-release.yml +++ b/.github/workflows/tools-api-docs-release.yml @@ -19,7 +19,7 @@ jobs: - ${{ github.event.release.tag_name }} steps: - name: Check out source-code repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python 3.11 uses: actions/setup-python@v4 diff --git a/.gitpod.yml b/.gitpod.yml index 0cc1006299..899f58e556 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -6,6 +6,9 @@ tasks: python -m pip install -r requirements-dev.txt pre-commit install --install-hooks nextflow self-update + - name: unset JAVA_TOOL_OPTIONS + command: | + unset JAVA_TOOL_OPTIONS vscode: extensions: # based on nf-core.nf-core-extensionpack - codezombiech.gitignore # Language support for .gitignore files diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b7aeeb5bc9..ad23a3c895 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,3 +11,18 @@ repos: rev: "v2.7.1" hooks: - id: prettier + - repo: https://github.com/asottile/pyupgrade + rev: v3.15.0 + hooks: + - id: pyupgrade + args: [--py38-plus] + - repo: https://github.com/pre-commit/mirrors-mypy + rev: "v1.7.1" # Use the sha / tag you want to point at + hooks: + - id: mypy + additional_dependencies: + - types-PyYAML + - types-requests + - types-jsonschema + - types-Markdown + - types-setuptools diff --git a/.prettierignore b/.prettierignore index 4cd77bb4ed..344cafca6e 100644 --- a/.prettierignore +++ b/.prettierignore @@ -3,5 +3,6 @@ adaptivecard.json slackreport.json docs/api/_build testing -nf_core/module-template/modules/meta.yml -nf_core/module-template/tests/test.yml +nf_core/module-template/meta.yml +nf_core/module-template/tests/tags.yml +nf_core/subworkflow-template/tests/tags.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index 9311f19785..9804e9e062 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,61 @@ # nf-core/tools: Changelog +# [v2.11 - Magnesium Dragon](https://github.com/nf-core/tools/releases/tag/2.11) + [2023-12-19] + +### Template + +- Fix writing files to a remote outdir in the NfcoreTemplate helper functions ([#2465](https://github.com/nf-core/tools/pull/2465)) +- Fancier syntax highlighting for example samplesheets in the usage.md template ([#2503](https://github.com/nf-core/tools/pull/2503)) +- Use closure for multiqc ext.args ([#2509](https://github.com/nf-core/tools/pull/2509)) +- Fix how the modules template references the conda environment file ([#2540](https://github.com/nf-core/tools/pull/2540)) +- Unset env variable JAVA_TOOL_OPTIONS in gitpod ([#2569](https://github.com/nf-core/tools/pull/2569)) +- Pin the version of nf-validation ([#2579](https://github.com/nf-core/tools/pull/2579)) +- Disable process selector warnings by default ([#2161](https://github.com/nf-core/tools/issues/2161)) +- Remove `docker.userEmulation` from nextflow.config in pipeline template ([#2580](https://github.com/nf-core/tools/pull/2580)) + +### Download + +- Add `docker://` prefix for absolute container URIs as well ([#2576](https://github.com/nf-core/tools/pull/2576)). +- Bugfix for AttributeError: `ContainerError` object has no attribute `absoluteURI` ([#2543](https://github.com/nf-core/tools/pull/2543)). + +### Linting + +- Fix incorrectly failing linting if 'modules' was not found in meta.yml ([#2447](https://github.com/nf-core/tools/pull/2447)) +- Correctly pass subworkflow linting test if `COMPONENT.out.versions` is used in the script ([#2448](https://github.com/nf-core/tools/pull/2448)) +- Add pyupgrade to pre-commit config and dev requirements as mentioned in [#2200](https://github.com/nf-core/tools/issues/2200) +- Check for spaces in modules container URLs ([#2452](https://github.com/nf-core/tools/issues/2452)) +- Correctly ignore `timeline.enabled`, `report.enabled`, `trace.enabled`, `dag.enabled` variables when linting a pipeline. ([#2507](https://github.com/nf-core/tools/pull/2507)) +- Lint nf-test main.nf.test tags include all used components in chained tests ([#2572](https://github.com/nf-core/tools/pull/2572)) +- Don't fail linting if md5sum for empty files are found in a stub test ([#2571](https://github.com/nf-core/tools/pull/2571)) +- Check for existence of test profile ([#2478](https://github.com/nf-core/tools/pull/2478)) + +### Modules + +- Added stub test creation to `create_test_yml` ([#2476](https://github.com/nf-core/tools/pull/2476)) +- Replace ModulePatch by ComponentPatch ([#2482](https://github.com/nf-core/tools/pull/2482)) +- Fixed `nf-core modules lint` to work with new module structure for nf-test ([#2494](https://github.com/nf-core/tools/pull/2494)) +- Add option `--migrate-pytest` to create a module with nf-test taking into account an existing module ([#2549](https://github.com/nf-core/tools/pull/2549)) +- When installing modules and subworkflows, automatically create the `./modules` directory if it doesn't exist ([#2563](https://github.com/nf-core/tools/issues/2563)) +- When `.nf-core.yml` is not found create it in the current directory instead of the root filesystem ([#2237](https://github.com/nf-core/tools/issues/2237)) +- Modules `--migrate-pytest` copies template scripts ([#2568](https://github.com/nf-core/tools/pull/2568)) + +### Subworkflows + +- Added stub test creation to `create_test_yml` ([#2476](https://github.com/nf-core/tools/pull/2476)) +- Fixed `nf-core subworkflows lint` to work with new module structure for nf-test ([#2494](https://github.com/nf-core/tools/pull/2494)) +- Add option `--migrate-pytest` to create a subworkflow with nf-test taking into account an existing subworkflow ([#2549](https://github.com/nf-core/tools/pull/2549)) + +### General + +- Update `schema build` functionality to automatically update defaults which have changed in the `nextflow.config`([#2479](https://github.com/nf-core/tools/pull/2479)) +- Change testing framework for modules and subworkflows from pytest to nf-test ([#2490](https://github.com/nf-core/tools/pull/2490)) +- `bump_version` keeps now the indentation level of the updated version entries ([#2514](https://github.com/nf-core/tools/pull/2514)) +- Add mypy to pre-commit config for the tools repo ([#2545](https://github.com/nf-core/tools/pull/2545)) +- Use Path objects for ComponentCreate and update the structure of components templates ([#2551](https://github.com/nf-core/tools/pull/2551)). +- GitPod base image: swap tool installation back to `conda` from `mamba` ([#2566](https://github.com/nf-core/tools/pull/2566)). +- Sort the `installed_by` list in `modules.json` ([#2570](https://github.com/nf-core/tools/pull/2570)). +- Unset env variable JAVA_TOOL_OPTIONS in gitpod ([#2569](https://github.com/nf-core/tools/pull/2569)) + # [v2.10 - Nickel Ostrich](https://github.com/nf-core/tools/releases/tag/2.10) + [2023-09-25] ### Template diff --git a/Dockerfile b/Dockerfile index b148c4b544..95d544b26f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -31,6 +31,11 @@ ENV NXF_VER ${NXF_VER} RUN curl -s https://get.nextflow.io | bash \ && mv nextflow /usr/local/bin \ && chmod a+rx /usr/local/bin/nextflow +# Install nf-test +RUN curl -fsSL https://code.askimed.com/install/nf-test | bash \ + && mv nf-test /usr/local/bin \ + && chmod a+rx /usr/local/bin/nf-test + # Add the nf-core source files to the image COPY . /usr/src/nf_core WORKDIR /usr/src/nf_core diff --git a/README.md b/README.md index c9bcd25398..ec58d668bd 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,6 @@ A python package with helper tools for the nf-core community. - [`modules remove` - Remove a module from a pipeline](#remove-a-module-from-a-pipeline) - [`modules patch` - Create a patch file for a module](#create-a-patch-file-for-a-module) - [`modules create` - Create a module from the template](#create-a-new-module) - - [`modules create-test-yml` - Create the `test.yml` file for a module](#create-a-module-test-config-file) - [`modules lint` - Check a module against nf-core guidelines](#check-a-module-against-nf-core-guidelines) - [`modules test` - Run the tests for a module](#run-the-tests-for-a-module-using-pytest) - [`modules bump-versions` - Bump software versions of modules](#bump-bioconda-and-container-versions-of-modules-in) @@ -53,13 +52,12 @@ A python package with helper tools for the nf-core community. - [`subworkflows update` - Update subworkflows in a pipeline](#update-subworkflows-in-a-pipeline) - [`subworkflows remove` - Remove a subworkflow from a pipeline](#remove-a-subworkflow-from-a-pipeline) - [`subworkflows create` - Create a subworkflow from the template](#create-a-new-subworkflow) - - [`subworkflows create-test-yml` - Create the `test.yml` file for a subworkflow](#create-a-subworkflow-test-config-file) - [`subworkflows lint` - Check a subworkflow against nf-core guidelines](#check-a-subworkflow-against-nf-core-guidelines) - [`subworkflows test` - Run the tests for a subworkflow](#run-the-tests-for-a-subworkflow-using-pytest) - [Citation](#citation) The nf-core tools package is written in Python and can be imported and used within other packages. -For documentation of the internal Python functions, please refer to the [Tools Python API docs](https://nf-co.re/tools-docs/). +For documentation of the internal Python functions, please refer to the [Tools Python API docs](https://nf-co.re/tools/docs/). ## Installation @@ -188,7 +186,7 @@ for wf in wfs.remote_workflows: print(wf.full_name) ``` -Please see [https://nf-co.re/tools-docs/](https://nf-co.re/tools-docs/) for the function documentation. +Please see [https://nf-co.re/tools/docs/](https://nf-co.re/tools/docs/) for the function documentation. ### Automatic version check @@ -227,14 +225,12 @@ Auto-completion for the `nf-core` command is available for bash, zsh and fish. T After a restart of the shell session you should have auto-completion for the `nf-core` command and all its sub-commands and options. -:::note -The added line will run the command `nf-core` (which will also slow down startup time of your shell). You should therefore either have the nf-core/tools installed globally. -You can also wrap it inside `if type nf-core > /dev/null; then ` \ `fi` for bash and zsh or `if command -v nf-core &> /dev/null eval (env _NF_CORE_COMPLETE=fish_source nf-core) end` for fish. You need to then source the config in your environment for the completions to be activated. -::: +> [!NOTE] +> The added line will run the command `nf-core` (which will also slow down startup time of your shell). You should therefore either have the nf-core/tools installed globally. +> You can also wrap it inside `if type nf-core > /dev/null; then ` \ `fi` for bash and zsh or `if command -v nf-core &> /dev/null eval (env _NF_CORE_COMPLETE=fish_source nf-core) end` for fish. You need to then source the config in your environment for the completions to be activated. -:::info -If you see the error `command not found compdef` , be sure that your config file contains the line `autoload -Uz compinit && compinit` before the eval line. -::: +> [!TIP] +> If you see the error `command not found compdef` , be sure that your config file contains the line `autoload -Uz compinit && compinit` before the eval line. ## Listing pipelines @@ -365,18 +361,16 @@ You can run the pipeline by simply providing the directory path for the `workflo nextflow run /path/to/download/nf-core-rnaseq-dev/workflow/ --input mydata.csv --outdir results # usual parameters here ``` -:::note -If you downloaded Singularity container images, you will need to use `-profile singularity` or have it enabled in your config file. -::: +> [!NOTE] +> If you downloaded Singularity container images, you will need to use `-profile singularity` or have it enabled in your config file. ### Downloaded nf-core configs The pipeline files are automatically updated (`params.custom_config_base` is set to `../configs`), so that the local copy of institutional configs are available when running the pipeline. So using `-profile ` should work if available within [nf-core/configs](https://github.com/nf-core/configs). -:::warning -This option is not available when downloading a pipeline for use with [Nextflow Tower](#adapting-downloads-to-nextflow-tower) because the application manages all configurations separately. -::: +> [!WARNING] +> This option is not available when downloading a pipeline for use with [Nextflow Tower](#adapting-downloads-to-nextflow-tower) because the application manages all configurations separately. ### Downloading Apptainer containers @@ -434,18 +428,16 @@ If the download speeds are much slower than your internet connection is capable Subsequently, the `*.git` folder can be moved to it's final destination and linked with a pipeline in _Tower_ using the `file:/` prefix. -:::tip -Also without access to Tower, pipelines downloaded with the `--tower` flag can be run: `nextflow run -r 2.5 file:/path/to/pipelinedownload.git`. Downloads in this format allow you to include multiple revisions of a pipeline in a single file, but require that the revision (e.g. `-r 2.5`) is always explicitly specified. -::: +> [!TIP] +> Also without access to Tower, pipelines downloaded with the `--tower` flag can be run if the _absolute_ path is specified: `nextflow run -r 2.5 file:/path/to/pipelinedownload.git`. Downloads in this format allow you to include multiple revisions of a pipeline in a single file, but require that the revision (e.g. `-r 2.5`) is always explicitly specified. ## Pipeline software licences Sometimes it's useful to see the software licences of the tools used in a pipeline. You can use the `licences` subcommand to fetch and print the software licence from each conda / PyPI package used in an nf-core pipeline. -:::warning -This command does not currently work for newer DSL2 pipelines. This will hopefully be addressed [soon](https://github.com/nf-core/tools/issues/1155). -::: +> [!WARNING] +> This command does not currently work for newer DSL2 pipelines. This will hopefully be addressed [soon](https://github.com/nf-core/tools/issues/1155). - -![`nf-core modules create-test-yml fastqc --no-prompts --force`](docs/images/nf-core-modules-create-test.svg) - ### Check a module against nf-core guidelines Run the `nf-core modules lint` command to check modules in the current working directory (pipeline or nf-core/modules clone) against nf-core guidelines. @@ -974,15 +951,12 @@ before_command: sed 's/1.13a/1.10/g' modules/multiqc/main.nf > modules/multiqc/m ![`nf-core modules lint multiqc`](docs/images/nf-core-modules-lint.svg) -### Run the tests for a module using pytest +### Create a test for a module -To run unit tests of a module that you have installed or the test created by the command [`nf-core modules create-test-yml`](#create-a-module-test-config-file), you can use `nf-core modules test` command. This command runs the tests specified in `modules/tests/software///test.yml` file using [pytest](https://pytest-workflow.readthedocs.io/en/stable/). +All modules on [nf-core/modules](https://github.com/nf-core/modules) have a strict requirement of being unit tested using minimal test data. We use [nf-test](https://code.askimed.com/nf-test/) as our testing framework. +Each module comes already with a template for the test file in `test/main.nf.test`. Replace the placeholder code in that file with your specific input, output and proces. In order to generate the corresponding snapshot after writing your test, you can use the `nf-core modules test` command. This command will run `nf-test test` twice, to also check for snapshot stability, i.e. that the same snapshot is generated on multiple runs. -:::info -This command uses the pytest argument `--git-aware` to avoid copying the whole `.git` directory and files ignored by `git`. This means that it will only include files listed by `git ls-files`. Remember to **commit your changes** after adding a new module to add the new files to your git index. -::: - -You can specify the module name in the form TOOL/SUBTOOL in command line or provide it later by prompts. +You can specify the module name in the form TOOL/SUBTOOL in the command or provide it later through interactive prompts. -![`nf-core modules test samtools/view --no-prompts`](docs/images/nf-core-modules-test.svg) +![`nf-core modules test fastqc --no-prompts --force`](docs/images/nf-core-modules-test.svg) + +In case you changed something in the test and want to update the snapshot, run + +```bash +nf-core modules test --update +``` + +If you want to run the test only once without checking for snapshot stability, you can use the `--once` flag. ### Bump bioconda and container versions of modules in @@ -1221,19 +1203,29 @@ fake_command: nf-core subworkflows create bam_stats_samtools --author @nf-core-b ![`nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force`](docs/images/nf-core-subworkflows-create.svg) -### Create a subworkflow test config file +### Create a test for a subworkflow + +All subworkflows on [nf-core/modules](https://github.com/nf-core/modules) have a strict requirement of being unit tested using minimal test data. We use [nf-test](https://code.askimed.com/nf-test/) as our testing framework. +Each subworkflow comes already with a template for the test file in `test/main.nf.test`. Replace the placeholder code in that file with your specific input, output and proces. In order to generate the corresponding snapshot after writing your test, you can use the `nf-core subworkflows test` command. This command will run `nf-test test` twice, to also check for snapshot stability, i.e. that the same snapshot is generated on multiple runs. -All subworkflows on [nf-core/modules](https://github.com/nf-core/modules) have a strict requirement of being unit tested using minimal test data. -To help developers build new subworkflows, the `nf-core subworkflows create-test-yml` command automates the creation of the yaml file required to document the output file `md5sum` and other information generated by the testing. -After you have written a minimal Nextflow script to test your subworkflow in `/tests/subworkflow//main.nf`, this command will run the tests for you and create the `/tests/subworkflow///test.yml` file. +You can specify the subworkflow name in the command or provide it later through interactive prompts. -![`nf-core subworkflows create-test-yml bam_stats_samtools --no-prompts --force`](docs/images/nf-core-subworkflows-create-test.svg) +![`nf-core subworkflows test bam_rseqc --no-prompts`](docs/images/nf-core-subworkflows-test.svg) + +In case you changed something in the test and want to update the snapshot, run + +```bash +nf-core subworkflows test --update +``` + +If you want to run the test only once without checking for snapshot stability, you can use the `--once` flag. ### Check a subworkflow against nf-core guidelines @@ -1249,25 +1241,6 @@ extra_env: ![`nf-core subworkflows lint bam_stats_samtools`](docs/images/nf-core-subworkflows-lint.svg) -### Run the tests for a subworkflow using pytest - -To run unit tests of a subworkflow that you have installed or the test created by the command [`nf-core subworkflow create-test-yml`](#create-a-subworkflow-test-config-file), you can use `nf-core subworkflows test` command. This command runs the tests specified in `tests/subworkflows//test.yml` file using [pytest](https://pytest-workflow.readthedocs.io/en/stable/). - -:::info -This command uses the pytest argument `--git-aware` to avoid copying the whole `.git` directory and files ignored by `git`. This means that it will only include files listed by `git ls-files`. Remember to **commit your changes** after adding a new subworkflow to add the new files to your git index. -::: - -You can specify the subworkflow name in the form TOOL/SUBTOOL in command line or provide it later by prompts. - - - -![`nf-core subworkflows test bam_rseqc --no-prompts`](docs/images/nf-core-subworkflows-test.svg) - ## Citation If you use `nf-core tools` in your work, please cite the `nf-core` publication as follows: diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py index 4d8ae661d5..27eaf9bcb3 100644 --- a/docs/api/_src/conf.py +++ b/docs/api/_src/conf.py @@ -14,6 +14,7 @@ # import os import sys +from typing import Dict sys.path.insert(0, os.path.abspath("../../../nf_core")) import nf_core @@ -58,7 +59,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language: str = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -113,7 +114,7 @@ # -- Options for LaTeX output ------------------------------------------------ -latex_elements = { +latex_elements: Dict[str, str] = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000000..c48aa5884b --- /dev/null +++ b/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +warn_unused_configs = True +ignore_missing_imports = true diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 72762ff026..8cfacf7399 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -44,7 +44,7 @@ }, { "name": "Developing new modules", - "commands": ["create", "create-test-yml", "lint", "bump-versions", "test"], + "commands": ["create", "lint", "bump-versions", "test"], }, ], "nf-core subworkflows": [ @@ -54,7 +54,7 @@ }, { "name": "Developing new subworkflows", - "commands": ["create", "create-test-yml"], + "commands": ["create", "test", "lint"], }, ], } @@ -151,7 +151,7 @@ def nf_core_cli(ctx, verbose, hide_progress, log_file): # nf-core list -@nf_core_cli.command() +@nf_core_cli.command("list") @click.argument("keywords", required=False, nargs=-1, metavar="") @click.option( "-s", @@ -162,7 +162,7 @@ def nf_core_cli(ctx, verbose, hide_progress, log_file): ) @click.option("--json", is_flag=True, default=False, help="Print full output as JSON") @click.option("--show-archived", is_flag=True, default=False, help="Print archived workflows") -def list(keywords, sort, json, show_archived): +def list_pipelines(keywords, sort, json, show_archived): """ List available nf-core pipelines with local info. @@ -448,7 +448,7 @@ def lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, ma Runs a large number of automated tests to ensure that the supplied pipeline meets the nf-core guidelines. Documentation of all lint tests can be found - on the nf-core website: [link=https://nf-co.re/tools-docs/]https://nf-co.re/tools-docs/[/] + on the nf-core website: [link=https://nf-co.re/tools/docs/]https://nf-co.re/tools/docs/[/] You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. See the documentation for details. @@ -553,9 +553,9 @@ def subworkflows(ctx, git_remote, branch, no_pull): # nf-core modules list subcommands -@modules.group() +@modules.group("list") @click.pass_context -def list(ctx): +def modules_list(ctx): """ List modules in a local pipeline or remote repository. """ @@ -563,11 +563,11 @@ def list(ctx): # nf-core modules list remote -@list.command() +@modules_list.command("remote") @click.pass_context @click.argument("keywords", required=False, nargs=-1, metavar="") @click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") -def remote(ctx, keywords, json): +def modules_list_remote(ctx, keywords, json): """ List modules in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. """ @@ -588,7 +588,7 @@ def remote(ctx, keywords, json): # nf-core modules list local -@list.command() +@modules_list.command("local") @click.pass_context @click.argument("keywords", required=False, nargs=-1, metavar="") @click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") @@ -599,7 +599,7 @@ def remote(ctx, keywords, json): default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin """ List modules installed locally in a pipeline """ @@ -620,7 +620,7 @@ def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin # nf-core modules install -@modules.command() +@modules.command("install") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option( @@ -633,7 +633,7 @@ def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin @click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the module") @click.option("-f", "--force", is_flag=True, default=False, help="Force reinstallation of module if it already exists") @click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA") -def install(ctx, tool, dir, prompt, force, sha): +def modules_install(ctx, tool, dir, prompt, force, sha): """ Install DSL2 modules within a pipeline. @@ -652,7 +652,7 @@ def install(ctx, tool, dir, prompt, force, sha): ctx.obj["modules_repo_no_pull"], ) exit_status = module_install.install(tool) - if not exit_status and all: + if not exit_status: sys.exit(1) except (UserWarning, LookupError) as e: log.error(e) @@ -660,12 +660,13 @@ def install(ctx, tool, dir, prompt, force, sha): # nf-core modules update -@modules.command() +@modules.command("update") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -673,7 +674,9 @@ def install(ctx, tool, dir, prompt, force, sha): @click.option("-f", "--force", is_flag=True, default=False, help="Force update of module") @click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the module") @click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA") -@click.option("-a", "--all", is_flag=True, default=False, help="Update all modules installed in pipeline") +@click.option( + "-a", "--all", "install_all", is_flag=True, default=False, help="Update all modules installed in pipeline" +) @click.option( "-x/-y", "--preview/--no-preview", @@ -696,7 +699,7 @@ def install(ctx, tool, dir, prompt, force, sha): default=False, help="Automatically update all linked modules and subworkflows without asking for confirmation", ) -def update(ctx, tool, dir, force, prompt, sha, all, preview, save_diff, update_deps): +def modules_update(ctx, tool, directory, force, prompt, sha, install_all, preview, save_diff, update_deps): """ Update DSL2 modules within a pipeline. @@ -706,11 +709,11 @@ def update(ctx, tool, dir, force, prompt, sha, all, preview, save_diff, update_d try: module_install = ModuleUpdate( - dir, + directory, force, prompt, sha, - all, + install_all, preview, save_diff, update_deps, @@ -719,7 +722,7 @@ def update(ctx, tool, dir, force, prompt, sha, all, preview, save_diff, update_d ctx.obj["modules_repo_no_pull"], ) exit_status = module_install.update(tool) - if not exit_status and all: + if not exit_status and install_all: sys.exit(1) except (UserWarning, LookupError) as e: log.error(e) @@ -764,7 +767,7 @@ def patch(ctx, tool, dir, remove): # nf-core modules remove -@modules.command() +@modules.command("remove") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option( @@ -774,7 +777,7 @@ def patch(ctx, tool, dir, remove): default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -def remove(ctx, dir, tool): +def modules_remove(ctx, dir, tool): """ Remove a module from a pipeline. """ @@ -812,8 +815,20 @@ def remove(ctx, dir, tool): default=False, help="Create a module from the template without TODOs or examples", ) +@click.option("--migrate-pytest", is_flag=True, default=False, help="Migrate a module with pytest tests to nf-test") def create_module( - ctx, tool, dir, author, label, meta, no_meta, force, conda_name, conda_package_version, empty_template + ctx, + tool, + dir, + author, + label, + meta, + no_meta, + force, + conda_name, + conda_package_version, + empty_template, + migrate_pytest, ): """ Create a new DSL2 module from the nf-core template. @@ -838,7 +853,7 @@ def create_module( # Run function try: module_create = ModuleCreate( - dir, tool, author, label, has_meta, force, conda_name, conda_package_version, empty_template + dir, tool, author, label, has_meta, force, conda_name, conda_package_version, empty_template, migrate_pytest ) module_create.create() except UserWarning as e: @@ -849,41 +864,42 @@ def create_module( sys.exit(1) -# nf-core modules create-test-yml -@modules.command("create-test-yml") +# nf-core modules test +@modules.command("test") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") -@click.option("-t", "--run-tests", is_flag=True, default=False, help="Run the test workflows") -@click.option("-o", "--output", type=str, help="Path for output YAML file") -@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output YAML file if it already exists") +@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") @click.option("-p", "--no-prompts", is_flag=True, default=False, help="Use defaults without prompting") -def create_test_yml(ctx, tool, run_tests, output, force, no_prompts): +@click.option("-u", "--update", is_flag=True, default=False, help="Update existing snapshots") +@click.option("-o", "--once", is_flag=True, default=False, help="Run tests only once. Don't check snapshot stability") +def test_module(ctx, tool, dir, no_prompts, update, once): """ - Auto-generate a test.yml file for a new module. + Run nf-test for a module. - Given the name of a module, runs the Nextflow test command and automatically generate - the required `test.yml` file based on the output files. + Given the name of a module, runs the nf-test command to test the module and generate snapshots. """ - from nf_core.modules import ModulesTestYmlBuilder + from nf_core.components.components_test import ComponentsTest try: - meta_builder = ModulesTestYmlBuilder( - module_name=tool, - run_tests=run_tests, - test_yml_output_path=output, - force_overwrite=force, + module_tester = ComponentsTest( + component_type="modules", + component_name=tool, + directory=dir, no_prompts=no_prompts, + update=update, + once=once, remote_url=ctx.obj["modules_repo_url"], branch=ctx.obj["modules_repo_branch"], + verbose=ctx.obj["verbose"], ) - meta_builder.run() + module_tester.run() except (UserWarning, LookupError) as e: log.critical(e) sys.exit(1) # nf-core modules lint -@modules.command() +@modules.command("lint") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") @@ -908,9 +924,7 @@ def create_test_yml(ctx, tool, run_tests, output, force, no_prompts): show_default=True, ) @click.option("--fix-version", is_flag=True, help="Fix the module version if a newer version is available") -def lint( - ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version -): # pylint: disable=redefined-outer-name +def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version): """ Lint one or more modules in a directory. @@ -955,7 +969,7 @@ def lint( # nf-core modules info -@modules.command() +@modules.command("info") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option( @@ -965,7 +979,7 @@ def lint( default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def info(ctx, tool, dir): +def modules_info(ctx, tool, dir): """ Show developer usage information about a given module. @@ -1024,28 +1038,6 @@ def bump_versions(ctx, tool, dir, all, show_all): sys.exit(1) -# nf-core modules test -@modules.command("test") -@click.pass_context -@click.argument("tool", type=str, required=False, metavar=" or ") -@click.option("-p", "--no-prompts", is_flag=True, default=False, help="Use defaults without prompting") -@click.option("-a", "--pytest_args", type=str, required=False, multiple=True, help="Additional pytest arguments") -def test_module(ctx, tool, no_prompts, pytest_args): - """ - Run module tests locally. - - Given the name of a module, runs the Nextflow test command. - """ - from nf_core.modules import ModulesTest - - try: - meta_builder = ModulesTest(tool, no_prompts, pytest_args) - meta_builder.run() - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) - - # nf-core subworkflows create @subworkflows.command("create") @click.pass_context @@ -1053,7 +1045,8 @@ def test_module(ctx, tool, no_prompts, pytest_args): @click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") @click.option("-a", "--author", type=str, metavar="", help="Module author's GitHub username prefixed with '@'") @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite any files if they already exist") -def create_subworkflow(ctx, subworkflow, dir, author, force): +@click.option("--migrate-pytest", is_flag=True, default=False, help="Migrate a module with pytest tests to nf-test") +def create_subworkflow(ctx, subworkflow, dir, author, force, migrate_pytest): """ Create a new subworkflow from the nf-core template. @@ -1067,7 +1060,7 @@ def create_subworkflow(ctx, subworkflow, dir, author, force): # Run function try: - subworkflow_create = SubworkflowCreate(dir, subworkflow, author, force) + subworkflow_create = SubworkflowCreate(dir, subworkflow, author, force, migrate_pytest) subworkflow_create.create() except UserWarning as e: log.critical(e) @@ -1077,43 +1070,44 @@ def create_subworkflow(ctx, subworkflow, dir, author, force): sys.exit(1) -# nf-core subworkflows create-test-yml -@subworkflows.command("create-test-yml") +# nf-core subworkflows test +@subworkflows.command("test") @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") -@click.option("-t", "--run-tests", is_flag=True, default=False, help="Run the test workflows") -@click.option("-o", "--output", type=str, help="Path for output YAML file") -@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output YAML file if it already exists") +@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") @click.option("-p", "--no-prompts", is_flag=True, default=False, help="Use defaults without prompting") -def create_test_yml(ctx, subworkflow, run_tests, output, force, no_prompts): +@click.option("-u", "--update", is_flag=True, default=False, help="Update existing snapshots") +@click.option("-o", "--once", is_flag=True, default=False, help="Run tests only once. Don't check snapshot stability") +def test_subworkflow(ctx, subworkflow, dir, no_prompts, update, once): """ - Auto-generate a test.yml file for a new subworkflow. + Run nf-test for a subworkflow. - Given the name of a module, runs the Nextflow test command and automatically generate - the required `test.yml` file based on the output files. + Given the name of a subworkflow, runs the nf-test command to test the subworkflow and generate snapshots. """ - from nf_core.subworkflows import SubworkflowTestYmlBuilder + from nf_core.components.components_test import ComponentsTest try: - meta_builder = SubworkflowTestYmlBuilder( - subworkflow=subworkflow, - run_tests=run_tests, - test_yml_output_path=output, - force_overwrite=force, + sw_tester = ComponentsTest( + component_type="subworkflows", + component_name=subworkflow, + directory=dir, no_prompts=no_prompts, + update=update, + once=once, remote_url=ctx.obj["modules_repo_url"], branch=ctx.obj["modules_repo_branch"], + verbose=ctx.obj["verbose"], ) - meta_builder.run() + sw_tester.run() except (UserWarning, LookupError) as e: log.critical(e) sys.exit(1) # nf-core subworkflows list subcommands -@subworkflows.group() +@subworkflows.group("list") @click.pass_context -def list(ctx): +def subworkflows_list(ctx): """ List subworkflows in a local pipeline or remote repository. """ @@ -1121,11 +1115,11 @@ def list(ctx): # nf-core subworkflows list remote -@list.command() +@subworkflows_list.command("remote") @click.pass_context @click.argument("keywords", required=False, nargs=-1, metavar="") @click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") -def remote(ctx, keywords, json): +def subworkflows_list_remote(ctx, keywords, json): """ List subworkflows in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. """ @@ -1147,7 +1141,7 @@ def remote(ctx, keywords, json): # nf-core subworkflows list local -@list.command() +@subworkflows_list.command("local") @click.pass_context @click.argument("keywords", required=False, nargs=-1, metavar="") @click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") @@ -1158,7 +1152,7 @@ def remote(ctx, keywords, json): default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin """ List subworkflows installed locally in a pipeline """ @@ -1179,7 +1173,7 @@ def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin # nf-core subworkflows lint -@subworkflows.command() +@subworkflows.command("lint") @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") @click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") @@ -1203,9 +1197,7 @@ def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin help="Sort lint output by subworkflow or test name.", show_default=True, ) -def lint( - ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by -): # pylint: disable=redefined-outer-name +def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by): """ Lint one or more subworkflows in a directory. @@ -1249,7 +1241,7 @@ def lint( # nf-core subworkflows info -@subworkflows.command() +@subworkflows.command("info") @click.pass_context @click.argument("tool", type=str, required=False, metavar="subworkflow name") @click.option( @@ -1259,7 +1251,7 @@ def lint( default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def info(ctx, tool, dir): +def subworkflows_info(ctx, tool, dir): """ Show developer usage information about a given subworkflow. @@ -1287,30 +1279,8 @@ def info(ctx, tool, dir): sys.exit(1) -# nf-core subworkflows test -@subworkflows.command("test") -@click.pass_context -@click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") -@click.option("-p", "--no-prompts", is_flag=True, default=False, help="Use defaults without prompting") -@click.option("-a", "--pytest_args", type=str, required=False, multiple=True, help="Additional pytest arguments") -def test_subworkflow(ctx, subworkflow, no_prompts, pytest_args): - """ - Run subworkflow tests locally. - - Given the name of a subworkflow, runs the Nextflow test command. - """ - from nf_core.subworkflows import SubworkflowsTest - - try: - meta_builder = SubworkflowsTest(subworkflow, no_prompts, pytest_args) - meta_builder.run() - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) - - # nf-core subworkflows install -@subworkflows.command() +@subworkflows.command("install") @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") @click.option( @@ -1325,7 +1295,7 @@ def test_subworkflow(ctx, subworkflow, no_prompts, pytest_args): "-f", "--force", is_flag=True, default=False, help="Force reinstallation of subworkflow if it already exists" ) @click.option("-s", "--sha", type=str, metavar="", help="Install subworkflow at commit SHA") -def install(ctx, subworkflow, dir, prompt, force, sha): +def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): """ Install DSL2 subworkflow within a pipeline. @@ -1344,16 +1314,15 @@ def install(ctx, subworkflow, dir, prompt, force, sha): ctx.obj["modules_repo_no_pull"], ) exit_status = subworkflow_install.install(subworkflow) - if not exit_status and all: + if not exit_status: sys.exit(1) except (UserWarning, LookupError) as e: log.error(e) - raise sys.exit(1) # nf-core subworkflows remove -@subworkflows.command() +@subworkflows.command("remove") @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") @click.option( @@ -1363,7 +1332,7 @@ def install(ctx, subworkflow, dir, prompt, force, sha): default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -def remove(ctx, dir, subworkflow): +def subworkflows_remove(ctx, dir, subworkflow): """ Remove a subworkflow from a pipeline. """ @@ -1383,7 +1352,7 @@ def remove(ctx, dir, subworkflow): # nf-core subworkflows update -@subworkflows.command() +@subworkflows.command("update") @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") @click.option( @@ -1396,7 +1365,9 @@ def remove(ctx, dir, subworkflow): @click.option("-f", "--force", is_flag=True, default=False, help="Force update of subworkflow") @click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the subworkflow") @click.option("-s", "--sha", type=str, metavar="", help="Install subworkflow at commit SHA") -@click.option("-a", "--all", is_flag=True, default=False, help="Update all subworkflow installed in pipeline") +@click.option( + "-a", "--all", "install_all", is_flag=True, default=False, help="Update all subworkflow installed in pipeline" +) @click.option( "-x/-y", "--preview/--no-preview", @@ -1419,7 +1390,7 @@ def remove(ctx, dir, subworkflow): default=False, help="Automatically update all linked modules and subworkflows without asking for confirmation", ) -def update(ctx, subworkflow, dir, force, prompt, sha, all, preview, save_diff, update_deps): +def subworkflows_update(ctx, subworkflow, dir, force, prompt, sha, install_all, preview, save_diff, update_deps): """ Update DSL2 subworkflow within a pipeline. @@ -1433,7 +1404,7 @@ def update(ctx, subworkflow, dir, force, prompt, sha, all, preview, save_diff, u force, prompt, sha, - all, + install_all, preview, save_diff, update_deps, @@ -1442,7 +1413,7 @@ def update(ctx, subworkflow, dir, force, prompt, sha, all, preview, save_diff, u ctx.obj["modules_repo_no_pull"], ) exit_status = subworkflow_install.update(subworkflow) - if not exit_status and all: + if not exit_status and install_all: sys.exit(1) except (UserWarning, LookupError) as e: log.error(e) @@ -1533,11 +1504,11 @@ def build(dir, no_prompts, web_only, url): # nf-core schema lint -@schema.command() +@schema.command("lint") @click.argument( "schema_path", type=click.Path(exists=True), default="nextflow_schema.json", metavar="" ) -def lint(schema_path): +def schema_lint(schema_path): """ Check that a given pipeline schema is valid. diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index ada7f0b994..40c8f8984f 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -5,16 +5,18 @@ import logging import re from pathlib import Path +from typing import List, Tuple, Union import rich.console import nf_core.utils +from nf_core.utils import Pipeline log = logging.getLogger(__name__) stderr = rich.console.Console(stderr=True, force_terminal=nf_core.utils.rich_force_colors()) -def bump_pipeline_version(pipeline_obj, new_version): +def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: """Bumps a pipeline version number. Args: @@ -30,7 +32,8 @@ def bump_pipeline_version(pipeline_obj, new_version): new_version = new_version[1:] if not current_version: raise UserWarning("Could not find config variable 'manifest.version'") - + if current_version == new_version: + raise UserWarning(f"Current version is already: {current_version}") log.info(f"Changing version number from '{current_version}' to '{new_version}'") # nextflow.config - workflow manifest version @@ -39,24 +42,54 @@ def bump_pipeline_version(pipeline_obj, new_version): pipeline_obj, [ ( - rf"version\s*=\s*[\'\"]?{re.escape(current_version)}[\'\"]?", - f"version = '{new_version}'", + rf"(version\s*=\s*['\"]){re.escape(current_version)}(['\"])", + rf"\g<1>{new_version}\g<2>", ) ], ) # multiqc_config.yaml multiqc_new_version = "dev" if "dev" in new_version else new_version + multiqc_current_version = "dev" if "dev" in current_version else current_version + if multiqc_current_version != "dev" and multiqc_new_version != "dev": + update_file_version( + Path("assets", "multiqc_config.yml"), + pipeline_obj, + [ + ( + f"/releases/tag/{current_version}", + f"/releases/tag/{new_version}", + ) + ], + ) + if multiqc_current_version != "dev" and multiqc_new_version == "dev": + update_file_version( + Path("assets", "multiqc_config.yml"), + pipeline_obj, + [ + ( + f"/releases/tag/{current_version}", + f"/tree/dev", + ) + ], + ) + if multiqc_current_version == "dev" and multiqc_new_version != "dev": + update_file_version( + Path("assets", "multiqc_config.yml"), + pipeline_obj, + [ + ( + f"/tree/dev", + f"/releases/tag/{multiqc_new_version}", + ) + ], + ) update_file_version( Path("assets", "multiqc_config.yml"), pipeline_obj, [ ( - "/dev", - f"/{multiqc_new_version}", - ), - ( - rf"{re.escape(current_version)}", - f"{multiqc_new_version}", + f"/{multiqc_current_version}/", + f"/{multiqc_new_version}/", ), ], ) @@ -76,7 +109,7 @@ def bump_pipeline_version(pipeline_obj, new_version): ) -def bump_nextflow_version(pipeline_obj, new_version): +def bump_nextflow_version(pipeline_obj: Pipeline, new_version: str) -> None: """Bumps the required Nextflow version number of a pipeline. Args: @@ -99,8 +132,8 @@ def bump_nextflow_version(pipeline_obj, new_version): pipeline_obj, [ ( - rf"nextflowVersion\s*=\s*[\'\"]?!>={re.escape(current_version)}[\'\"]?", - f"nextflowVersion = '!>={new_version}'", + rf"(nextflowVersion\s*=\s*[\'\"]?!>=\s*)({re.escape(current_version)})([\'\"]?)", + rf"\g<1>{new_version}\g<3>", ) ], ) @@ -114,7 +147,7 @@ def bump_nextflow_version(pipeline_obj, new_version): # example: # NXF_VER: # - "20.04.0" - rf"- [\"]{re.escape(current_version)}[\"]", + rf"- \"{re.escape(current_version)}\"", f'- "{new_version}"', ) ], @@ -138,7 +171,7 @@ def bump_nextflow_version(pipeline_obj, new_version): ) -def update_file_version(filename, pipeline_obj, patterns): +def update_file_version(filename: Union[str, Path], pipeline_obj: Pipeline, patterns: List[Tuple[str, str]]) -> None: """Updates the version number in a requested file. Args: @@ -146,7 +179,6 @@ def update_file_version(filename, pipeline_obj, patterns): pipeline_obj (nf_core.lint.PipelineLint): A PipelineLint object that holds information about the pipeline contents and build files. pattern (str): Regex pattern to apply. - newstr (str): The replaced string. Raises: ValueError, if the version number cannot be found. @@ -186,7 +218,7 @@ def update_file_version(filename, pipeline_obj, patterns): if found_match: content = "\n".join(newcontent) + "\n" else: - log.error(f"Could not find version number in {filename}: '{pattern}'") + log.error(f"Could not find version number in {filename}: `{pattern}`") log.info(f"Updated version in '{filename}'") for replacement in replacements: diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 775b205cf5..44924a2704 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -3,8 +3,7 @@ import os import shutil from pathlib import Path - -import yaml +from typing import Dict, List, Optional, Union import nf_core.utils from nf_core.modules.modules_json import ModulesJson @@ -20,7 +19,16 @@ class ComponentCommand: Base class for the 'nf-core modules' and 'nf-core subworkflows' commands """ - def __init__(self, component_type, dir, remote_url=None, branch=None, no_pull=False, hide_progress=False): + def __init__( + self, + component_type: str, + dir: str, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + hide_progress: bool = False, + no_prompts: bool = False, + ) -> None: """ Initialise the ComponentClass object """ @@ -28,9 +36,10 @@ def __init__(self, component_type, dir, remote_url=None, branch=None, no_pull=Fa self.dir = dir self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) self.hide_progress = hide_progress + self.no_prompts = no_prompts self._configure_repo_and_paths() - def _configure_repo_and_paths(self, nf_dir_req=True): + def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ Determine the repo type and set some default paths. If this is a modules repo, determine the org_path too. @@ -38,9 +47,10 @@ def _configure_repo_and_paths(self, nf_dir_req=True): Args: nf_dir_req (bool, optional): Whether this command requires being run in the nf-core modules repo or a nf-core pipeline repository. Defaults to True. """ + try: if self.dir: - self.dir, self.repo_type, self.org = get_repo_info(self.dir, use_prompt=nf_dir_req) + self.dir, self.repo_type, self.org = get_repo_info(self.dir, use_prompt=not self.no_prompts) else: self.repo_type = None self.org = "" @@ -54,7 +64,7 @@ def _configure_repo_and_paths(self, nf_dir_req=True): self.default_subworkflows_path = Path("subworkflows", self.org) self.default_subworkflows_tests_path = Path("tests", "subworkflows", self.org) - def get_local_components(self): + def get_local_components(self) -> List[str]: """ Get the local modules/subworkflows in a pipeline """ @@ -63,7 +73,7 @@ def get_local_components(self): str(path.relative_to(local_component_dir)) for path in local_component_dir.iterdir() if path.suffix == ".nf" ] - def get_components_clone_modules(self): + def get_components_clone_modules(self) -> List[str]: """ Get the modules/subworkflows repository available in a clone of nf-core/modules """ @@ -77,7 +87,7 @@ def get_components_clone_modules(self): if "main.nf" in files ] - def has_valid_directory(self): + def has_valid_directory(self) -> bool: """Check that we were given a pipeline or clone of nf-core/modules""" if self.repo_type == "modules": return True @@ -92,14 +102,14 @@ def has_valid_directory(self): log.warning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.dir}'") return True - def has_modules_file(self): + def has_modules_file(self) -> None: """Checks whether a module.json file has been created and creates one if it is missing""" modules_json_path = os.path.join(self.dir, "modules.json") if not os.path.exists(modules_json_path): log.info("Creating missing 'module.json' file.") ModulesJson(self.dir).create() - def clear_component_dir(self, component_name, component_dir): + def clear_component_dir(self, component_name: str, component_dir: str) -> bool: """ Removes all files in the module/subworkflow directory @@ -127,7 +137,7 @@ def clear_component_dir(self, component_name, component_dir): log.error(f"Could not remove {self.component_type[:-1]} {component_name}: {e}") return False - def components_from_repo(self, install_dir): + def components_from_repo(self, install_dir: str) -> List[str]: """ Gets the modules/subworkflows installed from a certain repository @@ -145,7 +155,9 @@ def components_from_repo(self, install_dir): str(Path(dir_path).relative_to(repo_dir)) for dir_path, _, files in os.walk(repo_dir) if "main.nf" in files ] - def install_component_files(self, component_name, component_version, modules_repo, install_dir): + def install_component_files( + self, component_name: str, component_version: str, modules_repo: ModulesRepo, install_dir: str + ) -> bool: """ Installs a module/subworkflow into the given directory @@ -160,7 +172,7 @@ def install_component_files(self, component_name, component_version, modules_rep """ return modules_repo.install_component(component_name, install_dir, component_version, self.component_type) - def load_lint_config(self): + def load_lint_config(self) -> None: """Parse a pipeline lint config file. Load the '.nf-core.yml' config file and extract @@ -171,7 +183,7 @@ def load_lint_config(self): _, tools_config = nf_core.utils.load_tools_config(self.dir) self.lint_config = tools_config.get("lint", {}) - def check_modules_structure(self): + def check_modules_structure(self) -> None: """ Check that the structure of the modules directory in a pipeline is the correct one: modules/nf-core/TOOL/SUBTOOL @@ -180,7 +192,7 @@ def check_modules_structure(self): modules/nf-core/modules/TOOL/SUBTOOL """ if self.repo_type == "pipeline": - wrong_location_modules = [] + wrong_location_modules: List[Path] = [] for directory, _, files in os.walk(Path(self.dir, "modules")): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.dir, "modules")) @@ -201,14 +213,14 @@ def check_modules_structure(self): modules_dir = Path("modules").resolve() correct_dir = Path(modules_dir, self.modules_repo.repo_path, Path(*module.parts[2:])) wrong_dir = Path(modules_dir, module) - shutil.move(wrong_dir, correct_dir) + shutil.move(str(wrong_dir), str(correct_dir)) log.info(f"Moved {wrong_dir} to {correct_dir}.") shutil.rmtree(Path(self.dir, "modules", self.modules_repo.repo_path, "modules")) # Regenerate modules.json file modules_json = ModulesJson(self.dir) modules_json.check_up_to_date() - def check_patch_paths(self, patch_path, module_name): + def check_patch_paths(self, patch_path: Path, module_name: str) -> None: """ Check that paths in patch files are updated to the new modules path """ @@ -239,7 +251,7 @@ def check_patch_paths(self, patch_path, module_name): ][module_name]["patch"] = str(patch_path.relative_to(Path(self.dir).resolve())) modules_json.dump() - def check_if_in_include_stmts(self, component_path): + def check_if_in_include_stmts(self, component_path: str) -> Dict[str, List[Dict[str, Union[int, str]]]]: """ Checks for include statements in the main.nf file of the pipeline and a list of line numbers where the component is included Args: @@ -248,7 +260,7 @@ def check_if_in_include_stmts(self, component_path): Returns: (list): A list of dictionaries, with the workflow file and the line number where the component is included """ - include_stmts = {} + include_stmts: Dict[str, List[Dict[str, Union[int, str]]]] = {} if self.repo_type == "pipeline": workflow_files = Path(self.dir, "workflows").glob("*.nf") for workflow_file in workflow_files: diff --git a/nf_core/components/components_test.py b/nf_core/components/components_test.py index 37e275aea8..3294c2878b 100644 --- a/nf_core/components/components_test.py +++ b/nf_core/components/components_test.py @@ -1,89 +1,108 @@ +""" +The ComponentsTest class handles the generation and testing of nf-test snapshots. +""" + + import logging import os -import sys +import re from pathlib import Path -from shutil import which +from typing import List, Optional -import pytest import questionary -import rich -from git import InvalidGitRepositoryError, Repo +from rich import print +from rich.panel import Panel +from rich.prompt import Confirm +from rich.syntax import Syntax +from rich.text import Text -import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.components_command import ComponentCommand -from nf_core.modules.modules_json import ModulesJson log = logging.getLogger(__name__) -class ComponentsTest(ComponentCommand): +class ComponentsTest(ComponentCommand): # type: ignore[misc] """ - Class to run module and subworkflow pytests. + Class to generate and test nf-test snapshots for modules. ... Attributes ---------- + component_type : str + type of component to test (modules or subworkflows) component_name : str name of the tool to run tests for + directory: str + path to modules repository directory no_prompts : bool flat indicating if prompts are used - pytest_args : tuple - additional arguments passed to pytest command + remote_url : str + URL of the remote repository + branch : str + branch of the remote repository + verbose : bool + flag indicating if verbose output should be used + update : bool + flag indicating if the existing snapshot should be updated + once : bool + flag indicating if the test should be run only once Methods ------- run(): Run test steps - _check_inputs(): + check_inputs(): Check inputs. Ask for component_name if not provided and check that the directory exists - _set_profile(): - Set software profile - _run_pytests(self): - Run pytest + generate_snapshot(): + Generate the nf-test snapshot using `nf-test test` command + check_snapshot_stability(): + Run the nf-test twice and check if the snapshot changes """ def __init__( self, - component_type, - component_name=None, - no_prompts=False, - pytest_args="", - remote_url=None, - branch=None, - no_pull=False, + component_type: str, + component_name: Optional[str] = None, + directory: str = ".", + no_prompts: bool = False, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + verbose: bool = False, + update: bool = False, + once: bool = False, ): - super().__init__(component_type=component_type, dir=".", remote_url=remote_url, branch=branch, no_pull=no_pull) + super().__init__(component_type, directory, remote_url, branch, no_prompts=no_prompts) self.component_name = component_name - self.no_prompts = no_prompts - self.pytest_args = pytest_args - - def run(self): - """Run test steps""" - if not self.no_prompts: - log.info( - "[yellow]Press enter to use default values [cyan bold](shown in brackets) [yellow]or type your own responses" - ) - self._check_inputs() - self._set_profile() - self._check_profile() - self._run_pytests() + self.remote_url = remote_url + self.branch = branch + self.errors: List[str] = [] + self.verbose = verbose + self.obsolete_snapshots: bool = False + self.update = update + self.once = once + + def run(self) -> None: + """Run build steps""" + self.check_inputs() + os.environ["NFT_DIFF"] = "pdiff" # set nf-test differ to pdiff to get a better diff output + os.environ[ + "NFT_DIFF_ARGS" + ] = "--line-numbers --expand-tabs=2" # taken from https://code.askimed.com/nf-test/docs/assertions/snapshots/#snapshot-differences + with nf_core.utils.set_wd(Path(self.dir)): + self.check_snapshot_stability() + if len(self.errors) > 0: + errors = "\n - ".join(self.errors) + raise UserWarning(f"Ran, but found errors:\n - {errors}") + else: + log.info("All tests passed!") - def _check_inputs(self): + def check_inputs(self) -> None: """Do more complex checks about supplied flags.""" # Check modules directory structure - self.check_modules_structure() - - # Retrieving installed modules - if self.repo_type == "modules": - installed_components = self.get_components_clone_modules() - else: - modules_json = ModulesJson(self.dir) - modules_json.check_up_to_date() - installed_components = modules_json.get_all_components(self.component_type).get( - self.modules_repo.remote_url - ) + if self.component_type == "modules": + self.check_modules_structure() # Get the component name if not specified if self.component_name is None: @@ -91,110 +110,139 @@ def _check_inputs(self): raise UserWarning( f"{self.component_type[:-1].title()} name not provided and prompts deactivated. Please provide the {self.component_type[:-1]} name{' as TOOL/SUBTOOL or TOOL' if self.component_type == 'modules' else ''}." ) - if not installed_components: - if self.component_type == "modules": - dir_structure_message = f"modules/{self.modules_repo.repo_path}/TOOL/SUBTOOL/ and tests/modules/{self.modules_repo.repo_path}/TOOLS/SUBTOOL/" - elif self.component_type == "subworkflows": - dir_structure_message = f"subworkflows/{self.modules_repo.repo_path}/SUBWORKFLOW/ and tests/subworkflows/{self.modules_repo.repo_path}/SUBWORKFLOW/" - raise UserWarning( - f"No installed {self.component_type} were found from '{self.modules_repo.remote_url}'.\n" - f"Are you running the tests inside the repository root directory?\n" - f"Make sure that the directory structure is {dir_structure_message}" - ) - self.component_name = questionary.autocomplete( - f"{self.component_type[:-1]} name:", - choices=installed_components, - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - - # Sanity check that the module directory exists - self._validate_folder_structure() - - def _validate_folder_structure(self): - """Validate that the modules follow the correct folder structure to run the tests: - - modules/nf-core/TOOL/SUBTOOL/ - - tests/modules/nf-core/TOOL/SUBTOOL/ - or - - subworkflows/nf-core/SUBWORKFLOW/ - - tests/subworkflows/nf-core/SUBWORKFLOW/ - """ - if self.component_type == "modules": - component_path = Path(self.default_modules_path) / self.component_name - test_path = Path(self.default_tests_path) / self.component_name - elif self.component_type == "subworkflows": - component_path = Path(self.default_subworkflows_path) / self.component_name - test_path = Path(self.default_subworkflows_tests_path) / self.component_name - - if not (self.dir / component_path).is_dir(): - raise UserWarning( - f"Cannot find directory '{component_path}'. Should be {'TOOL/SUBTOOL or TOOL' if self.component_type == 'modules' else 'SUBWORKFLOW'}. Are you running the tests inside the modules repository root directory?" - ) - if not (self.dir / test_path).is_dir(): + else: + try: + self.component_name = questionary.autocomplete( + "Tool name:" if self.component_type == "modules" else "Subworkflow name:", + choices=self.components_from_repo(self.org), + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + except LookupError: + raise + + self.component_dir = Path(self.component_type, self.modules_repo.repo_path, *self.component_name.split("/")) + + # First, sanity check that the module directory exists + if not Path(self.dir, self.component_dir).is_dir(): raise UserWarning( - f"Cannot find directory '{test_path}'. Should be {'TOOL/SUBTOOL or TOOL' if self.component_type == 'modules' else 'SUBWORKFLOW'}. " - "Are you running the tests inside the modules repository root directory? " - "Do you have tests for the specified module?" + f"Cannot find directory '{self.component_dir}'.{' Should be TOOL/SUBTOOL or TOOL' if self.component_type == 'modules' else ''}" ) - def _set_profile(self): - """Set $PROFILE env variable. - The config expects $PROFILE and Nextflow fails if it's not set. - """ + # Check container software to use if os.environ.get("PROFILE") is None: os.environ["PROFILE"] = "" if self.no_prompts: log.info( - "Setting environment variable '$PROFILE' to an empty string as not set.\n" - "Tests will run with Docker by default. " + "Setting environment variable '$PROFILE' to Docker as not set otherwise.\n" "To use Singularity set 'export PROFILE=singularity' in your shell before running this command." ) + os.environ["PROFILE"] = "docker" else: question = { "type": "list", "name": "profile", - "message": "Choose software profile", + "message": "Choose container software to run the test with", "choices": ["Docker", "Singularity", "Conda"], } answer = questionary.unsafe_prompt([question], style=nf_core.utils.nfcore_question_style) profile = answer["profile"].lower() os.environ["PROFILE"] = profile - log.info(f"Setting environment variable '$PROFILE' to '{profile}'") - - def _check_profile(self): - """Check if profile is available""" - profile = os.environ.get("PROFILE") - # Make sure the profile read from the environment is a valid Nextflow profile. - valid_nextflow_profiles = ["docker", "singularity", "conda"] - if profile in valid_nextflow_profiles: - if not which(profile): - raise UserWarning(f"Command '{profile}' not found - is it installed?") - else: - raise UserWarning( - f"The PROFILE '{profile}' set in the shell environment is not valid.\n" - f"Valid Nextflow profiles are '{', '.join(valid_nextflow_profiles)}'." - ) - def _run_pytests(self): - """Given a module/subworkflow name, run tests.""" - # Print nice divider line - console = rich.console.Console() - console.rule(self.component_name, style="black") - - # Check uncommitted changed - try: - repo = Repo(self.dir) - if repo.is_dirty(): - log.warning("You have uncommitted changes. Make sure to commit last changes before running the tests.") - except InvalidGitRepositoryError: - pass - - # Set pytest arguments - tag = self.component_name - if self.component_type == "subworkflows": - tag = "subworkflows/" + tag - command_args = ["--tag", f"{tag}", "--symlink", "--keep-workflow-wd", "--git-aware"] - command_args += self.pytest_args - - # Run pytest - log.info(f"Running pytest for {self.component_type[:-1]} '{self.component_name}'") - sys.exit(pytest.main(command_args)) + def display_nftest_output(self, nftest_out: bytes, nftest_err: bytes) -> None: + nftest_output = Text.from_ansi(nftest_out.decode()) + print(Panel(nftest_output, title="nf-test output")) + if nftest_err: + syntax = Syntax(nftest_err.decode(), "diff", theme="ansi_dark") + print(Panel(syntax, title="nf-test error")) + if "Different Snapshot:" in nftest_err.decode(): + log.error("nf-test failed due to differences in the snapshots") + # prompt to update snapshot + if self.no_prompts: + log.info("Updating snapshot") + self.update = True + elif self.update is None: + answer = Confirm.ask( + "[bold][blue]?[/] nf-test found differences in the snapshot. Do you want to update it?", + default=True, + ) + if answer: + log.info("Updating snapshot") + self.update = True + else: + log.debug("Snapshot not updated") + if self.update: + # update snapshot using nf-test --update-snapshot + self.generate_snapshot() + + else: + self.errors.append("nf-test failed") + + def generate_snapshot(self) -> bool: + """Generate the nf-test snapshot using `nf-test test` command + + returns True if the test was successful, False otherwise + """ + + log.debug("Running nf-test test") + + # set verbose flag if self.verbose is True + verbose = "--verbose --debug" if self.verbose else "" + update = "--update-snapshot" if self.update else "" + self.update = False # reset self.update to False to test if the new snapshot is stable + tag = f"subworkflows/{self.component_name}" if self.component_type == "subworkflows" else self.component_name + + result = nf_core.utils.run_cmd( + "nf-test", + f"test --tag {tag} --profile {os.environ['PROFILE']} {verbose} {update}", + ) + if result is not None: + nftest_out, nftest_err = result + self.display_nftest_output(nftest_out, nftest_err) + # check if nftest_out contains obsolete snapshots + pattern = r"Snapshot Summary:.*?(\d+)\s+obsolete" + compiled_pattern = re.compile(pattern, re.DOTALL) # re.DOTALL to allow . to match newlines + obsolete_snapshots = compiled_pattern.search(nftest_out.decode()) + if obsolete_snapshots: + self.obsolete_snapshots = True + + # check if nf-test was successful + if "Assertion failed:" in nftest_out.decode(): + return False + elif "no valid tests found." in nftest_out.decode(): + log.error("Test file 'main.nf.test' not found") + self.errors.append("Test file 'main.nf.test' not found") + return False + else: + log.debug("nf-test successful") + return True + else: + log.error("nf-test failed") + self.errors.append("nf-test failed") + return False + + def check_snapshot_stability(self) -> bool: + """Run the nf-test twice and check if the snapshot changes""" + log.info("Generating nf-test snapshot") + if not self.generate_snapshot(): + return False # stop here if the first run failed + elif self.once: + return True # stop here if the test should be run only once + log.info("Generating nf-test snapshot again to check stability") + if not self.generate_snapshot(): + log.error("nf-test snapshot is not stable") + self.errors.append("nf-test snapshot is not stable") + return False + else: + if self.obsolete_snapshots: + # ask if the user wants to remove obsolete snapshots using nf-test --clean-snapshot + if self.no_prompts or Confirm.ask( + "nf-test found obsolete snapshots. Do you want to remove them?", default=True + ): + log.info("Removing obsolete snapshots") + nf_core.utils.run_cmd( + "nf-test", + f"test --tag {self.component_name} --profile {os.environ['PROFILE']} --clean-snapshot", + ) + else: + log.debug("Obsolete snapshots not removed") + return True diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 73378d12ca..eec533ce60 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -1,31 +1,33 @@ import logging -import os import re from pathlib import Path +from typing import List, Optional, Tuple import questionary import rich.prompt import nf_core.utils +from nf_core.modules.modules_repo import ModulesRepo log = logging.getLogger(__name__) -def get_repo_info(directory, use_prompt=True): +def get_repo_info(directory: str, use_prompt: Optional[bool] = True) -> Tuple[str, Optional[str], str]: """ Determine whether this is a pipeline repository or a clone of nf-core/modules """ + # Verify that the pipeline dir exists if directory is None or not Path(directory).is_dir(): raise UserWarning(f"Could not find directory: {directory}") # Try to find the root directory - base_dir = nf_core.utils.determine_base_dir(directory) + base_dir: str = nf_core.utils.determine_base_dir(directory) # Figure out the repository type from the .nf-core.yml config file if we can config_fn, tools_config = nf_core.utils.load_tools_config(base_dir) - repo_type = tools_config.get("repository_type", None) + repo_type: Optional[str] = tools_config.get("repository_type", None) # If not set, prompt the user if not repo_type and use_prompt: @@ -55,7 +57,6 @@ def get_repo_info(directory, use_prompt=True): raise UserWarning(f"Invalid repository type: '{repo_type}'") # Check for org if modules repo - org = None if repo_type == "pipeline": org = "" elif repo_type == "modules": @@ -77,10 +78,12 @@ def get_repo_info(directory, use_prompt=True): raise UserWarning("Organisation path could not be established") # It was set on the command line, return what we were given - return [base_dir, repo_type, org] + return (base_dir, repo_type, org) -def prompt_component_version_sha(component_name, component_type, modules_repo, installed_sha=None): +def prompt_component_version_sha( + component_name: str, component_type: str, modules_repo: ModulesRepo, installed_sha: Optional[str] = None +) -> str: """ Creates an interactive questionary prompt for selecting the module/subworkflow version Args: @@ -107,17 +110,20 @@ def prompt_component_version_sha(component_name, component_type, modules_repo, i next_page_commits = [next(all_commits, None) for _ in range(10)] next_page_commits = [commit for commit in next_page_commits if commit is not None] if all(commit is None for commit in next_page_commits): - next_page_commits = None + next_page_commits = [] choices = [] - for title, sha in map(lambda commit: (commit["trunc_message"], commit["git_sha"]), commits): - display_color = "fg:ansiblue" if sha != installed_sha else "fg:ansired" - message = f"{title} {sha}" - if installed_sha == sha: - message += " (installed version)" - commit_display = [(display_color, message), ("class:choice-default", "")] - choices.append(questionary.Choice(title=commit_display, value=sha)) - if next_page_commits is not None: + for commit in commits: + if commit: + title = commit["trunc_message"] + sha = commit["git_sha"] + display_color = "fg:ansiblue" if sha != installed_sha else "fg:ansired" + message = f"{title} {sha}" + if installed_sha == sha: + message += " (installed version)" + commit_display = [(display_color, message), ("class:choice-default", "")] + choices.append(questionary.Choice(title=commit_display, value=sha)) + if next_page_commits: choices += [older_commits_choice] git_sha = questionary.select( f"Select '{component_name}' commit:", choices=choices, style=nf_core.utils.nfcore_question_style @@ -126,7 +132,7 @@ def prompt_component_version_sha(component_name, component_type, modules_repo, i return git_sha -def get_components_to_install(subworkflow_dir): +def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str]]: """ Parse the subworkflow main.nf file to retrieve all imported modules and subworkflows. """ diff --git a/nf_core/components/create.py b/nf_core/components/create.py index e626de4aaa..568ca22af5 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -7,9 +7,11 @@ import glob import json import logging -import os import re +import shutil import subprocess +from pathlib import Path +from typing import Dict, Optional import jinja2 import questionary @@ -20,6 +22,7 @@ import nf_core import nf_core.utils from nf_core.components.components_command import ComponentCommand +from nf_core.lint_utils import run_prettier_on_file log = logging.getLogger(__name__) @@ -27,16 +30,17 @@ class ComponentCreate(ComponentCommand): def __init__( self, - component_type, - directory=".", - component="", - author=None, - process_label=None, - has_meta=None, - force=False, - conda_name=None, - conda_version=None, - empty_template=False, + component_type: str, + directory: str = ".", + component: str = "", + author: Optional[str] = None, + process_label: Optional[str] = None, + has_meta: Optional[str] = None, + force: bool = False, + conda_name: Optional[str] = None, + conda_version: Optional[str] = None, + empty_template: bool = False, + migrate_pytest: bool = False, ): super().__init__(component_type, directory) self.directory = directory @@ -48,7 +52,6 @@ def __init__( self.subtool = None self.tool_conda_name = conda_name self.tool_conda_version = conda_version - self.tool_licence = None self.tool_licence = "" self.tool_description = "" self.tool_doc_url = "" @@ -56,8 +59,9 @@ def __init__( self.bioconda = None self.singularity_container = None self.docker_container = None - self.file_paths = {} + self.file_paths: Dict[str, Path] = {} self.not_empty_template = not empty_template + self.migrate_pytest = migrate_pytest def create(self): """ @@ -71,36 +75,38 @@ def create(self): If is a pipeline, this function creates a file called: '/modules/local/tool.nf' - OR + OR '/modules/local/tool_subtool.nf' - OR for subworkflows + OR for subworkflows '/subworkflows/local/subworkflow_name.nf' If is a clone of nf-core/modules, it creates or modifies the following files: For modules: - modules/modules/nf-core/tool/subtool/ - * main.nf - * meta.yml - modules/tests/modules/nf-core/tool/subtool/ - * main.nf - * test.yml - * nextflow.config - tests/config/pytest_modules.yml + ```tree + modules/nf-core/tool/subtool/ + ├── main.nf + ├── meta.yml + ├── environment.yml + └── tests + ├── main.nf.test + └── tags.yml + ``` The function will attempt to automatically find a Bioconda package called and matching Docker / Singularity images from BioContainers. For subworkflows: - subworkflows/nf-core/subworkflow_name/ - * main.nf - * meta.yml - tests/subworkflows/nf-core/subworkflow_name/ - * main.nf - * test.yml - * nextflow.config - tests/config/pytest_modules.yml + + ```tree + subworkflows/nf-core/tool/subtool/ + ├── main.nf + ├── meta.yml + └── tests + ├── main.nf.test + └── tags.yml + ``` """ @@ -123,57 +129,48 @@ def create(self): # Determine the component name self.component_name = self.component - self.component_dir = self.component + self.component_dir = Path(self.component) if self.subtool: self.component_name = f"{self.component}/{self.subtool}" - self.component_dir = os.path.join(self.component, self.subtool) + self.component_dir = Path(self.component, self.subtool) self.component_name_underscore = self.component_name.replace("/", "_") # Check existence of directories early for fast-fail self.file_paths = self._get_component_dirs() - if self.component_type == "modules": - # Try to find a bioconda package for 'component' - self._get_bioconda_tool() + if self.migrate_pytest: + # Rename the component directory to old + component_old_dir = Path(str(self.component_dir) + "_old") + component_parent_path = Path(self.directory, self.component_type, self.org) + component_old_path = component_parent_path / component_old_dir + component_path = component_parent_path / self.component_dir - # Prompt for GitHub username - self._get_username() + component_path.rename(component_old_path) + else: + if self.component_type == "modules": + # Try to find a bioconda package for 'component' + self._get_bioconda_tool() - if self.component_type == "modules": - self._get_module_structure_components() + # Prompt for GitHub username + self._get_username() + + if self.component_type == "modules": + self._get_module_structure_components() # Create component template with jinja2 self._render_template() + log.info(f"Created component template: '{self.component_name}'") - if self.repo_type == "modules": - # Add entry to pytest_modules.yml - try: - with open(os.path.join(self.directory, "tests", "config", "pytest_modules.yml"), "r") as fh: - pytest_modules_yml = yaml.safe_load(fh) - if self.subtool: - pytest_modules_yml[self.component_name] = [ - f"modules/{self.org}/{self.component}/{self.subtool}/**", - f"tests/modules/{self.org}/{self.component}/{self.subtool}/**", - ] - else: - pytest_modules_yml[ - ("" if self.component_type == "modules" else self.component_type + "/") + self.component_name - ] = [ - f"{self.component_type}/{self.org}/{self.component}/**", - f"tests/{self.component_type}/{self.org}/{self.component}/**", - ] - pytest_modules_yml = dict(sorted(pytest_modules_yml.items())) - with open(os.path.join(self.directory, "tests", "config", "pytest_modules.yml"), "w") as fh: - yaml.dump(pytest_modules_yml, fh, sort_keys=True, Dumper=nf_core.utils.custom_yaml_dumper()) - except FileNotFoundError: - raise UserWarning("Could not open 'tests/config/pytest_modules.yml' file!") - - new_files = list(self.file_paths.values()) - if self.repo_type == "modules": - new_files.append(os.path.join(self.directory, "tests", "config", "pytest_modules.yml")) - log.info("Created / edited following files:\n " + "\n ".join(new_files)) + if self.migrate_pytest: + self._copy_old_files(component_old_path) + log.info("Migrate pytest tests: Copied original module files to new module") + shutil.rmtree(component_old_path) + self._print_and_delete_pytest_files() + + new_files = [str(path) for path in self.file_paths.values()] + log.info("Created following files:\n " + "\n ".join(new_files)) def _get_bioconda_tool(self): """ @@ -273,19 +270,24 @@ def _render_template(self): log.debug(f"Rendering template file: '{template_fn}'") j_template = env.get_template(template_fn) object_attrs["nf_core_version"] = nf_core.__version__ - rendered_output = j_template.render(object_attrs) + try: + rendered_output = j_template.render(object_attrs) + except Exception as e: + log.error(f"Could not render template file '{template_fn}':\n{e}") + raise e # Write output to the target file - os.makedirs(os.path.dirname(dest_fn), exist_ok=True) + log.debug(f"Writing output to: '{dest_fn}'") + dest_fn.parent.mkdir(exist_ok=True, parents=True) with open(dest_fn, "w") as fh: log.debug(f"Writing output to: '{dest_fn}'") fh.write(rendered_output) # Mirror file permissions - template_stat = os.stat( - os.path.join(os.path.dirname(nf_core.__file__), f"{self.component_type[:-1]}-template", template_fn) - ) - os.chmod(dest_fn, template_stat.st_mode) + template_stat = ( + Path(nf_core.__file__).parent / f"{self.component_type[:-1]}-template" / template_fn + ).stat() + dest_fn.chmod(template_stat.st_mode) def _collect_name_prompt(self): """ @@ -336,17 +338,17 @@ def _get_component_dirs(self): """ file_paths = {} if self.repo_type == "pipeline": - local_component_dir = os.path.join(self.directory, self.component_type, "local") + local_component_dir = Path(self.directory, self.component_type, "local") # Check whether component file already exists - component_file = os.path.join(local_component_dir, f"{self.component_name}.nf") - if os.path.exists(component_file) and not self.force_overwrite: + component_file = local_component_dir / f"{self.component_name}.nf" + if component_file.exists() and not self.force_overwrite: raise UserWarning( f"{self.component_type[:-1].title()} file exists already: '{component_file}'. Use '--force' to overwrite" ) if self.component_type == "modules": # If a subtool, check if there is a module called the base tool name already - if self.subtool and os.path.exists(os.path.join(local_component_dir, f"{self.component}.nf")): + if self.subtool and (local_component_dir / f"{self.component}.nf").exists(): raise UserWarning( f"Module '{self.component}' exists already, cannot make subtool '{self.component_name}'" ) @@ -359,55 +361,42 @@ def _get_component_dirs(self): ) # Set file paths - file_paths[os.path.join(self.component_type, "main.nf")] = component_file + file_paths["main.nf"] = component_file if self.repo_type == "modules": - software_dir = os.path.join(self.directory, self.component_type, self.org, self.component_dir) - test_dir = os.path.join(self.directory, "tests", self.component_type, self.org, self.component_dir) + component_dir = Path(self.directory, self.component_type, self.org, self.component_dir) # Check if module/subworkflow directories exist already - if os.path.exists(software_dir) and not self.force_overwrite: - raise UserWarning( - f"{self.component_type[:-1]} directory exists: '{software_dir}'. Use '--force' to overwrite" - ) - if os.path.exists(test_dir) and not self.force_overwrite: + if component_dir.exists() and not self.force_overwrite and not self.migrate_pytest: raise UserWarning( - f"{self.component_type[:-1]} test directory exists: '{test_dir}'. Use '--force' to overwrite" + f"{self.component_type[:-1]} directory exists: '{component_dir}'. Use '--force' to overwrite" ) if self.component_type == "modules": # If a subtool, check if there is a module called the base tool name already - parent_tool_main_nf = os.path.join( - self.directory, self.component_type, self.org, self.component, "main.nf" - ) - parent_tool_test_nf = os.path.join( - self.directory, self.component_type, self.org, self.component, "main.nf" - ) - if self.subtool and os.path.exists(parent_tool_main_nf): + parent_tool_main_nf = Path(self.directory, self.component_type, self.org, self.component, "main.nf") + if self.subtool and parent_tool_main_nf.exists() and not self.migrate_pytest: raise UserWarning( f"Module '{parent_tool_main_nf}' exists already, cannot make subtool '{self.component_name}'" ) - if self.subtool and os.path.exists(parent_tool_test_nf): - raise UserWarning( - f"Module '{parent_tool_test_nf}' exists already, cannot make subtool '{self.component_name}'" - ) # If no subtool, check that there isn't already a tool/subtool tool_glob = glob.glob( - f"{os.path.join(self.directory, self.component_type, self.org, self.component)}/*/main.nf" + f"{Path(self.directory, self.component_type, self.org, self.component)}/*/main.nf" ) - if not self.subtool and tool_glob: + if not self.subtool and tool_glob and not self.migrate_pytest: raise UserWarning( f"Module subtool '{tool_glob[0]}' exists already, cannot make tool '{self.component_name}'" ) # Set file paths # For modules - can be tool/ or tool/subtool/ so can't do in template directory structure - file_paths[os.path.join(self.component_type, "main.nf")] = os.path.join(software_dir, "main.nf") - file_paths[os.path.join(self.component_type, "meta.yml")] = os.path.join(software_dir, "meta.yml") - file_paths[os.path.join("tests", "main.nf")] = os.path.join(test_dir, "main.nf") - file_paths[os.path.join("tests", "test.yml")] = os.path.join(test_dir, "test.yml") - file_paths[os.path.join("tests", "nextflow.config")] = os.path.join(test_dir, "nextflow.config") + file_paths["main.nf"] = component_dir / "main.nf" + file_paths["meta.yml"] = component_dir / "meta.yml" + if self.component_type == "modules": + file_paths["environment.yml"] = component_dir / "environment.yml" + file_paths["tests/tags.yml"] = component_dir / "tests" / "tags.yml" + file_paths["tests/main.nf.test"] = component_dir / "tests" / "main.nf.test" return file_paths @@ -418,8 +407,7 @@ def _get_username(self): # Try to guess the current user if `gh` is installed author_default = None try: - with open(os.devnull, "w") as devnull: - gh_auth_user = json.loads(subprocess.check_output(["gh", "api", "/user"], stderr=devnull)) + gh_auth_user = json.loads(subprocess.check_output(["gh", "api", "/user"], stderr=subprocess.DEVNULL)) author_default = f"@{gh_auth_user['login']}" except Exception as e: log.debug(f"Could not find GitHub username using 'gh' cli command: [red]{e}") @@ -433,3 +421,65 @@ def _get_username(self): f"[violet]GitHub Username:[/]{' (@author)' if author_default is None else ''}", default=author_default, ) + + def _copy_old_files(self, component_old_path): + """Copy files from old module to new module""" + log.debug("Copying original main.nf file") + shutil.copyfile(component_old_path / "main.nf", self.file_paths["main.nf"]) + log.debug("Copying original meta.yml file") + shutil.copyfile(component_old_path / "meta.yml", self.file_paths["meta.yml"]) + if self.component_type == "modules": + log.debug("Copying original environment.yml file") + shutil.copyfile(component_old_path / "environment.yml", self.file_paths["environment.yml"]) + if (component_old_path / "templates").is_dir(): + log.debug("Copying original templates directory") + shutil.copytree( + component_old_path / "templates", self.file_paths["environment.yml"].parent / "templates" + ) + # Create a nextflow.config file if it contains information other than publishDir + pytest_dir = Path(self.directory, "tests", self.component_type, self.org, self.component_dir) + nextflow_config = pytest_dir / "nextflow.config" + if nextflow_config.is_file(): + with open(nextflow_config, "r") as fh: + config_lines = "" + for line in fh: + if "publishDir" not in line: + config_lines += line + if len(config_lines) > 0: + log.debug("Copying nextflow.config file from pytest tests") + with open( + Path(self.directory, self.component_type, self.org, self.component_dir, "tests", "nextflow.config"), + "w+", + ) as ofh: + ofh.write(config_lines) + + def _print_and_delete_pytest_files(self): + """Prompt if pytest files should be deleted and printed to stdout""" + pytest_dir = Path(self.directory, "tests", self.component_type, self.org, self.component_dir) + if rich.prompt.Confirm.ask( + "[violet]Do you want to delete the pytest files?[/]\nPytest file 'main.nf' will be printed to standard output to allow migrating the tests manually to 'main.nf.test'.", + default=False, + ): + with open(pytest_dir / "main.nf", "r") as fh: + log.info(fh.read()) + shutil.rmtree(pytest_dir) + log.info( + "[yellow]Please convert the pytest tests to nf-test in 'main.nf.test'.[/]\n" + "You can find more information about nf-test [link=https://nf-co.re/docs/contributing/modules#migrating-from-pytest-to-nf-test]at the nf-core web[/link]. " + ) + else: + log.info( + "[yellow]Please migrate the pytest tests to nf-test in 'main.nf.test'.[/]\n" + "You can find more information about nf-test [link=https://nf-co.re/docs/contributing/modules#migrating-from-pytest-to-nf-test]at the nf-core web[/link].\n" + f"Once done, make sure to delete the module pytest files to avoid linting errors: {pytest_dir}" + ) + # Delete tags from pytest_modules.yml + modules_yml = Path(self.directory, "tests", "config", "pytest_modules.yml") + with open(modules_yml, "r") as fh: + yml_file = yaml.safe_load(fh) + yml_key = str(self.component_dir) if self.component_type == "modules" else f"subworkflows/{self.component_dir}" + if yml_key in yml_file: + del yml_file[yml_key] + with open(modules_yml, "w") as fh: + yaml.dump(yml_file, fh) + run_prettier_on_file(modules_yml) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index f7a5fe6680..6385ee4092 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -127,6 +127,8 @@ def install(self, component, silent=False): self.install_included_components(component_dir) if not silent: + modules_json.load() + modules_json.dump(run_prettier=True) # Print include statement component_name = "_".join(component.upper().split("/")) log.info(f"Use the following statement to include this {self.component_type[:-1]}:") diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index ed0730d7b4..efffc28e85 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -10,7 +10,9 @@ import os from pathlib import Path -import rich +import rich.box +import rich.console +import rich.panel from rich.markdown import Markdown from rich.table import Table @@ -144,6 +146,7 @@ def __init__( def get_all_module_lint_tests(is_pipeline): if is_pipeline: return [ + "environment_yml", "module_patch", "module_version", "main_nf", @@ -153,7 +156,7 @@ def get_all_module_lint_tests(is_pipeline): "module_changes", ] else: - return ["main_nf", "meta_yml", "module_todos", "module_deprecations", "module_tests"] + return ["environment_yml", "main_nf", "meta_yml", "module_todos", "module_deprecations", "module_tests"] @staticmethod def get_all_subworkflow_lint_tests(is_pipeline): @@ -208,7 +211,7 @@ def _print_results(self, show_passed=False, sort_by="test"): self.failed.sort(key=operator.attrgetter(*sort_order)) # Find maximum module name length - max_name_len = 40 + max_name_len = len(self.component_type[:-1] + " name") for tests in [self.passed, self.warned, self.failed]: try: for lint_result in tests: @@ -263,7 +266,7 @@ def format_result(test_results, table): table = Table(style="yellow", box=rich.box.MINIMAL, pad_edge=False, border_style="dim") table.add_column(f"{self.component_type[:-1].title()} name", width=max_name_len) table.add_column("File path") - table.add_column("Test message") + table.add_column("Test message", overflow="fold") table = format_result(self.warned, table) console.print( rich.panel.Panel( @@ -277,10 +280,15 @@ def format_result(test_results, table): # Table of failing tests if len(self.failed) > 0: - table = Table(style="red", box=rich.box.MINIMAL, pad_edge=False, border_style="dim") + table = Table( + style="red", + box=rich.box.MINIMAL, + pad_edge=False, + border_style="dim", + ) table.add_column(f"{self.component_type[:-1].title()} name", width=max_name_len) table.add_column("File path") - table.add_column("Test message") + table.add_column("Test message", overflow="fold") table = format_result(self.failed, table) console.print( rich.panel.Panel( diff --git a/nf_core/components/list.py b/nf_core/components/list.py index 1b12615dec..47c0eaad62 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -1,7 +1,8 @@ import json import logging +from typing import Dict, List, Optional, Tuple, Union, cast -import rich +import rich.table from nf_core.components.components_command import ComponentCommand from nf_core.modules.modules_json import ModulesJson @@ -11,11 +12,19 @@ class ComponentList(ComponentCommand): - def __init__(self, component_type, pipeline_dir, remote=True, remote_url=None, branch=None, no_pull=False): + def __init__( + self, + component_type: str, + pipeline_dir: str, + remote: bool = True, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + ) -> None: super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.remote = remote - def list_components(self, keywords=None, print_json=False): + def list_components(self, keywords: Optional[List[str]] = None, print_json=False) -> Union[rich.table.Table, str]: keywords = keywords or [] """ Get available modules/subworkflows names from GitHub tree for repo @@ -25,11 +34,11 @@ def list_components(self, keywords=None, print_json=False): # self.check_component_structure(self.component_type) # Initialise rich table - table = rich.table.Table() + table: rich.table.Table = rich.table.Table() table.add_column(f"{self.component_type[:-1].capitalize()} Name") - components = [] + components: List[str] = [] - def pattern_msg(keywords): + def pattern_msg(keywords: List[str]) -> str: if len(keywords) == 0: return "" if len(keywords) == 1: @@ -78,11 +87,11 @@ def pattern_msg(keywords): return "" # Verify that 'modules.json' is consistent with the installed modules - modules_json = ModulesJson(self.dir) + modules_json: ModulesJson = ModulesJson(self.dir) modules_json.check_up_to_date() # Filter by keywords - repos_with_comps = { + repos_with_comps: Dict[str, List[Tuple[str, str]]] = { repo_url: [comp for comp in components if all(k in comp[1] for k in keywords)] for repo_url, components in modules_json.get_all_components(self.component_type).items() } @@ -98,34 +107,40 @@ def pattern_msg(keywords): table.add_column("Date") # Load 'modules.json' - modules_json = modules_json.modules_json + modules_json_file = modules_json.modules_json for repo_url, component_with_dir in sorted(repos_with_comps.items()): - repo_entry = modules_json["repos"].get(repo_url, {}) - for install_dir, component in sorted(component_with_dir): - repo_modules = repo_entry.get(self.component_type) - component_entry = repo_modules.get(install_dir).get(component) - - if component_entry: - version_sha = component_entry["git_sha"] - try: - # pass repo_name to get info on modules even outside nf-core/modules - message, date = ModulesRepo( - remote_url=repo_url, - branch=component_entry["branch"], - ).get_commit_info(version_sha) - except LookupError as e: - log.warning(e) + repo_entry: Dict[str, Dict[str, Dict[str, Dict[str, Union[str, List[str]]]]]] + if modules_json_file is None: + log.warning(f"Modules JSON file '{modules_json.modules_json_path}' is missing. ") + continue + else: + repo_entry = modules_json_file["repos"].get(repo_url, {}) + for install_dir, component in sorted(component_with_dir): + # Use cast() to predict the return type of recursive get():s + repo_modules = cast(dict, repo_entry.get(self.component_type)) + component_entry = cast(dict, cast(dict, repo_modules.get(install_dir)).get(component)) + + if component_entry: + version_sha = component_entry["git_sha"] + try: + # pass repo_name to get info on modules even outside nf-core/modules + message, date = ModulesRepo( + remote_url=repo_url, + branch=component_entry["branch"], + ).get_commit_info(version_sha) + except LookupError as e: + log.warning(e) + date = "[red]Not Available" + message = "[red]Not Available" + else: + log.warning( + f"Commit SHA for {self.component_type[:-1]} '{install_dir}/{self.component_type}' is missing from 'modules.json'" + ) + version_sha = "[red]Not Available" date = "[red]Not Available" message = "[red]Not Available" - else: - log.warning( - f"Commit SHA for {self.component_type[:-1]} '{install_dir}/{self.component_type}' is missing from 'modules.json'" - ) - version_sha = "[red]Not Available" - date = "[red]Not Available" - message = "[red]Not Available" - table.add_row(component, repo_url, version_sha, message, date) + table.add_row(component, repo_url, version_sha, message, date) if print_json: return json.dumps(components, sort_keys=True, indent=4) diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 4413854128..874fa570bc 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -1,7 +1,12 @@ """ The NFCoreComponent class holds information and utility functions for a single module or subworkflow """ +import logging +import re from pathlib import Path +from typing import Union + +log = logging.getLogger(__name__) class NFCoreComponent: @@ -44,14 +49,16 @@ def __init__( if remote_component: # Initialize the important files - self.main_nf = self.component_dir / "main.nf" - self.meta_yml = self.component_dir / "meta.yml" + self.main_nf = Path(self.component_dir, "main.nf") + self.meta_yml = Path(self.component_dir, "meta.yml") + self.process_name = "" + self.environment_yml = Path(self.component_dir, "environment.yml") repo_dir = self.component_dir.parts[: self.component_dir.parts.index(self.component_name.split("/")[0])][-1] self.org = repo_dir - self.test_dir = Path(self.base_dir, "tests", component_type, repo_dir, self.component_name) - self.test_yml = self.test_dir / "test.yml" - self.test_main_nf = self.test_dir / "main.nf" + self.nftest_testdir = Path(self.component_dir, "tests") + self.nftest_main_nf = Path(self.nftest_testdir, "main.nf.test") + self.tags_yml = Path(self.nftest_testdir, "tags.yml") if self.repo_type == "pipeline": patch_fn = f"{self.component_name.replace('/', '-')}.diff" @@ -65,7 +72,122 @@ def __init__( self.component_name = self.component_dir.stem # These attributes are only used by nf-core modules # so just initialize them to None - self.meta_yml = None + self.meta_yml = "" + self.environment_yml = "" self.test_dir = None self.test_yml = None self.test_main_nf = None + + def _get_main_nf_tags(self, test_main_nf: Union[Path, str]): + """Collect all tags from the main.nf.test file.""" + tags = [] + with open(test_main_nf, "r") as fh: + for line in fh: + if line.strip().startswith("tag"): + tags.append(line.strip().split()[1].strip('"')) + return tags + + def _get_included_components(self, main_nf: Union[Path, str]): + """Collect all included components from the main.nf file.""" + included_components = [] + with open(main_nf, "r") as fh: + for line in fh: + if line.strip().startswith("include"): + # get tool/subtool or subworkflow name from include statement, can be in the form + #'../../../modules/nf-core/hisat2/align/main' + #'../bam_sort_stats_samtools/main' + #'../subworkflows/nf-core/bam_sort_stats_samtools/main' + #'plugin/nf-validation' + component = line.strip().split()[-1].split(self.org)[-1].split("main")[0].strip("/") + component = component.replace("'../", "subworkflows/") + component = component.replace("'", "") + included_components.append(component) + return included_components + + def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, str]): + """Collect all included components from the main.nf file.""" + included_components = [] + with open(main_nf_test, "r") as fh: + for line in fh: + if line.strip().startswith("script"): + # get tool/subtool or subworkflow name from script statement, can be: + # if the component is a module TOOL/SUBTOOL: + # '../../SUBTOOL/main.nf' + # '../../../TOOL/SUBTOOL/main.nf' + # '../../../TOOL/main.nf' + # if the component is a module TOOL: + # '../../TOOL/main.nf' + # '../../TOOL/SUBTOOL/main.nf' + # if the component uses full paths or is a subworkflow: + # '(../../)modules/nf-core/TOOL/(SUBTOOL/)main.nf' + # '(../../)subworkflows/nf-core/TOOL/(SUBTOOL/)main.nf' + # the line which uses the current component script: + # '../main.nf' + component = ( + line.strip() + .split("../")[-1] + .split(self.org)[-1] + .split("main.nf")[0] + .strip("'") + .strip('"') + .strip("/") + ) + if ( + "/" in self.component_name + and "/" not in component + and line.count("../") == 2 + and self.org not in line + and component != "" + ): + # Add the current component name "TOOL" to the tag + component = f"{self.component_name.split('/')[0]}/{component}" + if "subworkflows" in line: + # Add the subworkflows prefix to the tag + component = f"subworkflows/{component}" + if component != "": + included_components.append(component) + return included_components + + def get_inputs_from_main_nf(self): + """Collect all inputs from the main.nf file.""" + inputs = [] + with open(self.main_nf, "r") as f: + data = f.read() + # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo + # regex matches: + # val(foo) + # path(bar) + # val foo + # val bar + # path bar + # path foo + # don't match anything inside comments or after "output:" + if "input:" not in data: + log.info(f"Could not find any inputs in {self.main_nf}") + return inputs + input_data = data.split("input:")[1].split("output:")[0] + regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + matches = re.finditer(regex, input_data, re.MULTILINE) + for matchNum, match in enumerate(matches, start=1): + if match.group(3): + inputs.append(match.group(3)) + elif match.group(4): + inputs.append(match.group(4)) + log.info(f"Found {len(inputs)} inputs in {self.main_nf}") + self.inputs = inputs + + def get_outputs_from_main_nf(self): + outputs = [] + with open(self.main_nf, "r") as f: + data = f.read() + # get output values from main.nf after "output:". the names are always after "emit:" + if "output:" not in data: + log.info(f"Could not find any outputs in {self.main_nf}") + return outputs + output_data = data.split("output:")[1].split("when:")[0] + regex = r"emit:\s*([^)\s,]+)" + matches = re.finditer(regex, output_data, re.MULTILINE) + for matchNum, match in enumerate(matches, start=1): + outputs.append(match.group(1)) + log.info(f"Found {len(outputs)} outputs in {self.main_nf}") + self.outputs = outputs diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py new file mode 100644 index 0000000000..28f2f886b1 --- /dev/null +++ b/nf_core/components/patch.py @@ -0,0 +1,224 @@ +import logging +import os +import shutil +import tempfile +from pathlib import Path + +import questionary + +import nf_core.utils +from nf_core.components.components_command import ComponentCommand +from nf_core.modules.modules_differ import ModulesDiffer +from nf_core.modules.modules_json import ModulesJson + +log = logging.getLogger(__name__) + + +class ComponentPatch(ComponentCommand): + def __init__(self, pipeline_dir, component_type, remote_url=None, branch=None, no_pull=False, installed_by=False): + super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) + + self.modules_json = ModulesJson(pipeline_dir) + + def _parameter_checks(self, component): + """Checks the compatibility of the supplied parameters. + + Raises: + UserWarning: if any checks fail. + """ + if not self.has_valid_directory(): + raise UserWarning("The command was not run in a valid pipeline directory.") + + components = self.modules_json.get_all_components(self.component_type).get(self.modules_repo.remote_url) + component_names = [component for _, component in components] + + if component is not None and component not in component_names: + component_dir = [dir for dir, m in components if m == component][0] + raise UserWarning( + f"{self.component_type[:-1].title()} '{Path(self.component_type, component_dir, module)}' does not exist in the pipeline" + ) + + def patch(self, component=None): + # Check modules directory structure + self.check_modules_structure() + + # Verify that 'modules.json' is consistent with the installed modules + self.modules_json.check_up_to_date() + self._parameter_checks(component) + components = self.modules_json.get_all_components(self.component_type).get(self.modules_repo.remote_url) + + if component is None: + choices = [ + component if directory == self.modules_repo.repo_path else f"{directory}/{component}" + for directory, component in components + ] + component = questionary.autocomplete( + f"{self.component_type[:-1].title()} name:", + choices=sorted(choices), + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + component_dir = [dir for dir, m in components if m == component][0] + component_fullname = str(Path(self.component_type, self.modules_repo.repo_path, component)) + + # Verify that the component has an entry in the modules.json file + if not self.modules_json.module_present(component, self.modules_repo.remote_url, component_dir): + raise UserWarning( + f"The '{component_fullname}' {self.component_type[:-1]} does not have an entry in the 'modules.json' file. Cannot compute patch" + ) + + component_version = self.modules_json.get_component_version( + self.component_type, component, self.modules_repo.remote_url, self.modules_repo.repo_path + ) + if component_version is None: + raise UserWarning( + f"The '{component_fullname}' {self.component_type[:-1]} does not have a valid version in the 'modules.json' file. Cannot compute patch" + ) + # Get the component branch and reset it in the ModulesRepo object + component_branch = self.modules_json.get_component_branch( + self.component_type, component, self.modules_repo.remote_url, component_dir + ) + if component_branch != self.modules_repo.branch: + self.modules_repo.setup_branch(component_branch) + + # Set the diff filename based on the module name + patch_filename = f"{component.replace('/', '-')}.diff" + component_relpath = Path(self.component_type, component_dir, component) + patch_relpath = Path(component_relpath, patch_filename) + component_current_dir = Path(self.dir, component_relpath) + patch_path = Path(self.dir, patch_relpath) + + if patch_path.exists(): + remove = questionary.confirm( + f"Patch exists for {self.component_type[:-1]} '{component_fullname}'. Do you want to regenerate it?", + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + if remove: + os.remove(patch_path) + else: + return + + # Create a temporary directory for storing the unchanged version of the module + install_dir = tempfile.mkdtemp() + component_install_dir = Path(install_dir, component) + if not self.install_component_files(component, component_version, self.modules_repo, install_dir): + raise UserWarning( + f"Failed to install files of {self.component_type[:-1]} '{component}' from remote ({self.modules_repo.remote_url})." + ) + + # Write the patch to a temporary location (otherwise it is printed to the screen later) + patch_temp_path = tempfile.mktemp() + try: + ModulesDiffer.write_diff_file( + patch_temp_path, + component, + self.modules_repo.repo_path, + component_install_dir, + component_current_dir, + for_git=False, + dsp_from_dir=component_relpath, + dsp_to_dir=component_relpath, + ) + log.debug(f"Patch file wrote to a temporary directory {patch_temp_path}") + except UserWarning: + raise UserWarning(f"{self.component_type[:-1]} '{component_fullname}' is unchanged. No patch to compute") + + # Write changes to modules.json + self.modules_json.add_patch_entry(component, self.modules_repo.remote_url, component_dir, patch_relpath) + log.debug(f"Wrote patch path for {self.component_type[:-1]} {component} to modules.json") + + # Show the changes made to the module + ModulesDiffer.print_diff( + component, + self.modules_repo.repo_path, + component_install_dir, + component_current_dir, + dsp_from_dir=component_current_dir, + dsp_to_dir=component_current_dir, + ) + + # Finally move the created patch file to its final location + shutil.move(patch_temp_path, patch_path) + log.info(f"Patch file of '{component_fullname}' written to '{patch_path}'") + + def remove(self, component): + # Check modules directory structure + self.check_modules_structure() + + self.modules_json.check_up_to_date() + self._parameter_checks(component) + components = self.modules_json.get_all_components(self.component_type).get(self.modules_repo.remote_url) + + if component is None: + choices = [ + component if directory == self.modules_repo.repo_path else f"{directory}/{component}" + for directory, component in components + ] + component = questionary.autocomplete( + f"{self.component_type[:-1]} name:", + choices, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + component_dir = [dir for dir, m in components if m == component][0] + component_fullname = str(Path(self.component_type, component_dir, component)) + + # Verify that the component has an entry in the modules.json file + if not self.modules_json.module_present(component, self.modules_repo.remote_url, component_dir): + raise UserWarning( + f"The '{component_fullname}' {self.component_type[:-1]} does not have an entry in the 'modules.json' file. Cannot compute patch" + ) + + component_version = self.modules_json.get_component_version( + self.component_type, component, self.modules_repo.remote_url, self.modules_repo.repo_path + ) + if component_version is None: + raise UserWarning( + f"The '{component_fullname}' {self.component_type[:-1]} does not have a valid version in the 'modules.json' file. Cannot compute patch" + ) + # Get the module branch and reset it in the ModulesRepo object + component_branch = self.modules_json.get_component_branch( + self.component_type, component, self.modules_repo.remote_url, component_dir + ) + if component_branch != self.modules_repo.branch: + self.modules_repo.setup_branch(component_branch) + + # Set the diff filename based on the component name + patch_filename = f"{component.replace('/', '-')}.diff" + component_relpath = Path(self.component_type, component_dir, component) + patch_relpath = Path(component_relpath, patch_filename) + patch_path = Path(self.dir, patch_relpath) + component_path = Path(self.dir, component_relpath) + + if patch_path.exists(): + remove = questionary.confirm( + f"Patch exists for {self.component_type[:-1]} '{component_fullname}'. Are you sure you want to remove?", + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + if not remove: + return + + # Try to apply the patch in reverse and move resulting files to module dir + temp_component_dir = self.modules_json.try_apply_patch_reverse( + component, self.modules_repo.repo_path, patch_relpath, component_path + ) + try: + for file in Path(temp_component_dir).glob("*"): + file.rename(component_path.joinpath(file.name)) + os.rmdir(temp_component_dir) + except Exception as err: + raise UserWarning(f"There was a problem reverting the patched file: {err}") + + log.info(f"Patch for {component} reverted!") + # Remove patch file if we could revert the patch + patch_path.unlink() + # Write changes to module.json + self.modules_json.remove_patch_entry(component, self.modules_repo.remote_url, component_dir) + + if not all( + self.modules_repo.component_files_identical( + component, component_path, component_version, self.component_type + ).values() + ): + log.error( + f"Module files do not appear to match the remote for the commit sha in the 'module.json': {component_version}\n" + f"Recommend reinstalling with 'nf-core modules install --force --sha {component_version} {module}' " + ) diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 6fc6e03544..077cb2b840 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -288,20 +288,21 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr updated.append(component) recursive_update = True modules_to_update, subworkflows_to_update = self.get_components_to_update(component) - if not silent and not self.update_all and len(modules_to_update + subworkflows_to_update) > 0: - log.warning( - f"All modules and subworkflows linked to the updated {self.component_type[:-1]} will be {'asked for update' if self.show_diff else 'automatically updated'}.\n" - "It is advised to keep all your modules and subworkflows up to date.\n" - "It is not guaranteed that a subworkflow will continue working as expected if all modules/subworkflows used in it are not up to date.\n" - ) - if self.update_deps: - recursive_update = True - else: - recursive_update = questionary.confirm( - "Would you like to continue updating all modules and subworkflows?", - default=True, - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() + if not silent and len(modules_to_update + subworkflows_to_update) > 0: + if not self.update_all: + log.warning( + f"All modules and subworkflows linked to the updated {self.component_type[:-1]} will be {'asked for update' if self.show_diff else 'automatically updated'}.\n" + "It is advised to keep all your modules and subworkflows up to date.\n" + "It is not guaranteed that a subworkflow will continue working as expected if all modules/subworkflows used in it are not up to date.\n" + ) + if self.update_deps: + recursive_update = True + else: + recursive_update = questionary.confirm( + "Would you like to continue updating all modules and subworkflows?", + default=True, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() if recursive_update and len(modules_to_update + subworkflows_to_update) > 0: # Update linked components self.update_linked_components(modules_to_update, subworkflows_to_update, updated) @@ -323,8 +324,12 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr ) elif not all_patches_successful and not silent: log.info(f"Updates complete. Please apply failed patch{plural_es(components_info)} manually.") + self.modules_json.load() + self.modules_json.dump(run_prettier=True) elif not silent: log.info("Updates complete :sparkles:") + self.modules_json.load() + self.modules_json.dump(run_prettier=True) return exit_value diff --git a/nf_core/create.py b/nf_core/create.py index 470623f551..56d0912a07 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -11,7 +11,7 @@ import time from pathlib import Path -import filetype +import filetype # type: ignore import git import jinja2 import questionary diff --git a/nf_core/download.py b/nf_core/download.py index 9ca786b5e3..08bef935ba 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -493,7 +493,7 @@ def prompt_singularity_cachedir_utilization(self): ): stderr.print( "\nIf you are working on the same system where you will run Nextflow, you can amend the downloaded images to the ones in the" - "[blue not bold]$NXF_SINGULARITY_CACHEDIR[/] folder, Nextflow will automatically find them." + "[blue not bold]$NXF_SINGULARITY_CACHEDIR[/] folder, Nextflow will automatically find them. " "However if you will transfer the downloaded files to a different system then they should be copied to the target folder." ) self.container_cache_utilisation = questionary.select( @@ -1081,7 +1081,7 @@ def get_singularity_images(self, current_revision=""): continue except ContainerError.ImageNotFound as e: # Try other registries - if e.error_log.absoluteURI: + if e.error_log.absolute_URI: break # there no point in trying other registries if absolute URI was specified. else: continue @@ -1092,7 +1092,7 @@ def get_singularity_images(self, current_revision=""): # Try other registries log.error(e.message) log.error(e.helpmessage) - if e.error_log.absoluteURI: + if e.error_log.absolute_URI: break # there no point in trying other registries if absolute URI was specified. else: continue @@ -1247,7 +1247,7 @@ def singularity_pull_image(self, container, out_path, cache_path, library, progr # Thus, if an explicit registry is specified, the provided -l value is ignored. container_parts = container.split("/") if len(container_parts) > 2: - address = container + address = f"docker://{container}" absolute_URI = True else: address = f"docker://{library}/{container.replace('docker://', '')}" diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index 47b27bb514..e721f210d0 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -1,3 +1,6 @@ +# Test build locally before making a PR +# docker build -t gitpod:test -f nf_core/gitpod/gitpod.Dockerfile . + FROM gitpod/workspace-base USER root @@ -43,16 +46,17 @@ RUN conda config --add channels defaults && \ conda config --add channels bioconda && \ conda config --add channels conda-forge && \ conda config --set channel_priority strict && \ - conda install --quiet --yes --name base mamba && \ - mamba install --quiet --yes --name base \ + conda install --quiet --yes --name base \ + mamba \ nextflow \ nf-core \ nf-test \ black \ prettier \ pre-commit \ + openjdk \ pytest-workflow && \ - mamba clean --all -f -y + conda clean --all --force-pkgs-dirs --yes # Update Nextflow RUN nextflow self-update diff --git a/nf_core/lint/__init__.py b/nf_core/lint/__init__.py index 70f7ea925f..797ebbcc91 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/lint/__init__.py @@ -164,26 +164,30 @@ class PipelineLint(nf_core.utils.Pipeline): warned (list): A list of tuples of the form: ``(, )`` """ - from .actions_awsfulltest import actions_awsfulltest - from .actions_awstest import actions_awstest - from .actions_ci import actions_ci - from .actions_schema_validation import actions_schema_validation - from .files_exist import files_exist - from .files_unchanged import files_unchanged - from .merge_markers import merge_markers - from .modules_json import modules_json - from .modules_structure import modules_structure - from .multiqc_config import multiqc_config - from .nextflow_config import nextflow_config - from .pipeline_name_conventions import pipeline_name_conventions - from .pipeline_todos import pipeline_todos - from .readme import readme - from .schema_description import schema_description - from .schema_lint import schema_lint - from .schema_params import schema_params - from .system_exit import system_exit - from .template_strings import template_strings - from .version_consistency import version_consistency + from .actions_awsfulltest import actions_awsfulltest # type: ignore[misc] + from .actions_awstest import actions_awstest # type: ignore[misc] + from .actions_ci import actions_ci # type: ignore[misc] + from .actions_schema_validation import ( # type: ignore[misc] + actions_schema_validation, + ) + from .files_exist import files_exist # type: ignore[misc] + from .files_unchanged import files_unchanged # type: ignore[misc] + from .merge_markers import merge_markers # type: ignore[misc] + from .modules_json import modules_json # type: ignore[misc] + from .modules_structure import modules_structure # type: ignore[misc] + from .multiqc_config import multiqc_config # type: ignore[misc] + from .nextflow_config import nextflow_config # type: ignore[misc] + from .pipeline_name_conventions import ( # type: ignore[misc] + pipeline_name_conventions, + ) + from .pipeline_todos import pipeline_todos # type: ignore[misc] + from .readme import readme # type: ignore[misc] + from .schema_description import schema_description # type: ignore[misc] + from .schema_lint import schema_lint # type: ignore[misc] + from .schema_params import schema_params # type: ignore[misc] + from .system_exit import system_exit # type: ignore[misc] + from .template_strings import template_strings # type: ignore[misc] + from .version_consistency import version_consistency # type: ignore[misc] def __init__( self, wf_path, release_mode=False, fix=(), key=None, fail_ignored=False, fail_warned=False, hide_progress=False @@ -367,10 +371,10 @@ def format_result(test_results): Given an list of error message IDs and the message texts, return a nicely formatted string for the terminal with appropriate ASCII colours. """ + tools_version = __version__ + if "dev" in __version__: + tools_version = "latest" for eid, msg in test_results: - tools_version = __version__ - if "dev" in __version__: - tools_version = "latest" yield Markdown( f"[{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html): {msg}" ) @@ -462,6 +466,10 @@ def _get_results_md(self): if len(self.failed) > 0: overall_result = "Failed :x:" + tools_version = __version__ + if "dev" in __version__: + tools_version = "latest" + # List of tests for details test_failure_count = "" test_failures = "" @@ -470,7 +478,7 @@ def _get_results_md(self): test_failures = "### :x: Test failures:\n\n{}\n\n".format( "\n".join( [ - f"* [{eid}](https://nf-co.re/tools-docs/lint_tests/{eid}.html) - " + f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html) - " f"{strip_ansi_codes(msg, '`')}" for eid, msg in self.failed ] @@ -484,7 +492,7 @@ def _get_results_md(self): test_ignored = "### :grey_question: Tests ignored:\n\n{}\n\n".format( "\n".join( [ - f"* [{eid}](https://nf-co.re/tools-docs/lint_tests/{eid}.html) - " + f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html) - " f"{strip_ansi_codes(msg, '`')}" for eid, msg in self.ignored ] @@ -498,7 +506,7 @@ def _get_results_md(self): test_fixed = "### :grey_question: Tests fixed:\n\n{}\n\n".format( "\n".join( [ - f"* [{eid}](https://nf-co.re/tools-docs/lint_tests/{eid}.html) - " + f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html) - " f"{strip_ansi_codes(msg, '`')}" for eid, msg in self.fixed ] @@ -512,7 +520,7 @@ def _get_results_md(self): test_warnings = "### :heavy_exclamation_mark: Test warnings:\n\n{}\n\n".format( "\n".join( [ - f"* [{eid}](https://nf-co.re/tools-docs/lint_tests/{eid}.html) - " + f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html) - " f"{strip_ansi_codes(msg, '`')}" for eid, msg in self.warned ] @@ -527,7 +535,7 @@ def _get_results_md(self): "\n".join( [ ( - f"* [{eid}](https://nf-co.re/tools-docs/lint_tests/{eid}.html)" + f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html)" f" - {strip_ansi_codes(msg, '`')}" ) for eid, msg in self.passed diff --git a/nf_core/lint/actions_ci.py b/nf_core/lint/actions_ci.py index 9aa18135d9..e669eceb8c 100644 --- a/nf_core/lint/actions_ci.py +++ b/nf_core/lint/actions_ci.py @@ -25,7 +25,7 @@ def actions_ci(self): release: types: [published] - * The minimum Nextflow version specified in the pipeline's ``nextflow.config`` matches that defined by ``nxf_ver`` in the test matrix: + * The minimum Nextflow version specified in the pipeline's ``nextflow.config`` matches that defined by ``NXF_VER`` in the test matrix: .. code-block:: yaml :emphasize-lines: 4 @@ -33,9 +33,9 @@ def actions_ci(self): strategy: matrix: # Nextflow versions: check pipeline minimum and current latest - nxf_ver: ['19.10.0', ''] + NXF_VER: ['19.10.0', ''] - .. note:: These ``matrix`` variables run the test workflow twice, varying the ``nxf_ver`` variable each time. + .. note:: These ``matrix`` variables run the test workflow twice, varying the ``NXF_VER`` variable each time. This is used in the ``nextflow run`` commands to test the pipeline with both the latest available version of the pipeline (``''``) and the stated minimum required version. """ diff --git a/nf_core/lint/multiqc_config.py b/nf_core/lint/multiqc_config.py index a1b831ae2d..cbbeae07a8 100644 --- a/nf_core/lint/multiqc_config.py +++ b/nf_core/lint/multiqc_config.py @@ -1,9 +1,10 @@ import os +from typing import Dict, List import yaml -def multiqc_config(self): +def multiqc_config(self) -> Dict[str, List[str]]: """Make sure basic multiQC plugins are installed and plots are exported Basic template: @@ -20,8 +21,8 @@ def multiqc_config(self): export_plots: true """ - passed = [] - failed = [] + passed: List[str] = [] + failed: List[str] = [] # Remove field that should be ignored according to the linting config ignore_configs = self.lint_config.get("multiqc_config", []) @@ -38,10 +39,16 @@ def multiqc_config(self): except Exception as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} - # Check that the report_comment exists and matches + # check if requried sections are present + required_sections = ["report_section_order", "export_plots", "report_comment"] + for section in required_sections: + if section not in mqc_yml and section not in ignore_configs: + failed.append(f"'assets/multiqc_config.yml' does not contain `{section}`") + return {"passed": passed, "failed": failed} + else: + passed.append(f"'assets/multiqc_config.yml' contains `{section}`") + try: - if "report_section_order" not in mqc_yml: - raise AssertionError() orders = {} summary_plugin_name = f"{self.pipeline_prefix}-{self.pipeline_name}-summary" min_plugins = ["software_versions", summary_plugin_name] @@ -70,27 +77,37 @@ def multiqc_config(self): if "report_comment" not in ignore_configs: # Check that the minimum plugins exist and are coming first in the summary - try: - version = self.nf_config.get("manifest.version", "").strip(" '\"") - version = "dev" if "dev" in version else version - if "report_comment" not in mqc_yml: - raise AssertionError() - if mqc_yml["report_comment"].strip() != ( - f'This report has been generated by the nf-core/{self.pipeline_name} analysis pipeline. For information about how to ' - f'interpret these results, please see the documentation.' - ): - raise AssertionError() - except (AssertionError, KeyError, TypeError): - failed.append("'assets/multiqc_config.yml' does not contain a matching 'report_comment'.") + version = self.nf_config.get("manifest.version", "").strip(" '\"") + if "dev" in version: + version = "dev" + report_comments = ( + f'This report has been generated by the nf-core/{self.pipeline_name}' + f" analysis pipeline. For information about how to interpret these results, please see the " + f'documentation.' + ) + + else: + report_comments = ( + f'This report has been generated by the nf-core/{self.pipeline_name}' + f" analysis pipeline. For information about how to interpret these results, please see the " + f'documentation.' + ) + + if mqc_yml["report_comment"].strip() != report_comments: + # find where the report_comment is wrong and give it as a hint + hint = report_comments + failed.append( + f"'assets/multiqc_config.yml' does not contain a matching 'report_comment'. \n" + f"The expected comment is: \n" + f"```{hint}``` \n" + f"The current comment is: \n" + f"```{ mqc_yml['report_comment'].strip()}```" + ) else: passed.append("'assets/multiqc_config.yml' contains a matching 'report_comment'.") # Check that export_plots is activated try: - if "export_plots" not in mqc_yml: - raise AssertionError() if not mqc_yml["export_plots"]: raise AssertionError() except (AssertionError, KeyError, TypeError): diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index d22fa944ed..24f1e5c12f 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -108,6 +108,11 @@ def nextflow_config(self): lint: nextflow_config: False + + **The configuration should contain the following or the test will fail:** + + * A ``test`` configuration profile should exist. + """ passed = [] warned = [] @@ -198,6 +203,8 @@ def nextflow_config(self): # Check the variables that should be set to 'true' for k in ["timeline.enabled", "report.enabled", "trace.enabled", "dag.enabled"]: + if k in ignore_configs: + continue if self.nf_config.get(k) == "true": passed.append(f"Config ``{k}`` had correct value: ``{self.nf_config.get(k)}``") else: @@ -312,4 +319,32 @@ def nextflow_config(self): ) ) + # Check for the availability of the "test" configuration profile by parsing nextflow.config + with open(os.path.join(self.wf_path, "nextflow.config"), "r") as f: + content = f.read() + + # Remove comments + cleaned_content = re.sub(r"//.*", "", content) + cleaned_content = re.sub(r"/\*.*?\*/", "", content, flags=re.DOTALL) + + match = re.search(r"\bprofiles\s*{", cleaned_content) + if not match: + failed.append("nextflow.config does not contain `profiles` scope, but `test` profile is required") + else: + # Extract profiles scope content and check for test profile + start = match.end() + end = start + brace_count = 1 + while brace_count > 0 and end < len(content): + if cleaned_content[end] == "{": + brace_count += 1 + elif cleaned_content[end] == "}": + brace_count -= 1 + end += 1 + profiles_content = cleaned_content[start : end - 1].strip() + if re.search(r"\btest\s*{", profiles_content): + passed.append("nextflow.config contains configuration profile `test`") + else: + failed.append("nextflow.config does not contain configuration profile `test`") + return {"passed": passed, "warned": warned, "failed": failed, "ignored": ignored} diff --git a/nf_core/list.py b/nf_core/list.py index 77a9ac3919..94d9d8e043 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -64,10 +64,10 @@ def get_local_wf(workflow, revision=None): # Wasn't local, fetch it log.info(f"Downloading workflow: {workflow} ({revision})") - pull_cmd = f"nextflow pull {workflow}" + pull_cmd = f"pull {workflow}" if revision is not None: pull_cmd += f" -r {revision}" - nf_core.utils.nextflow_cmd(pull_cmd) + nf_core.utils.run_cmd("nextflow", pull_cmd) local_wf = LocalWorkflow(workflow) local_wf.get_local_nf_workflow_details() return local_wf.local_path @@ -128,12 +128,14 @@ def get_local_nf_workflows(self): # Fetch details about local cached pipelines with `nextflow list` else: log.debug("Getting list of local nextflow workflows") - nflist_raw = nf_core.utils.nextflow_cmd("nextflow list") - for wf_name in nflist_raw.splitlines(): - if not str(wf_name).startswith("nf-core/"): - self.local_unmatched.append(wf_name) - else: - self.local_workflows.append(LocalWorkflow(wf_name)) + result = nf_core.utils.run_cmd("nextflow", "list") + if result is not None: + nflist_raw, _ = result + for wf_name in nflist_raw.splitlines(): + if not str(wf_name).startswith("nf-core/"): + self.local_unmatched.append(wf_name) + else: + self.local_workflows.append(LocalWorkflow(wf_name)) # Find additional information about each workflow by checking its git history log.debug(f"Fetching extra info about {len(self.local_workflows)} local workflows") @@ -225,9 +227,10 @@ def sort_pulled_date(wf): for wf in filtered_workflows: wf_name = f"[bold][link=https://nf-co.re/{wf.name}]{wf.name}[/link]" version = "[yellow]dev" + published = "[dim]-" if len(wf.releases) > 0: - version = f"[blue]{wf.releases[-1]['tag_name']}" - published = wf.releases[-1]["published_at_pretty"] if len(wf.releases) > 0 else "[dim]-" + version = f"[blue]{wf.releases[0]['tag_name']}" + published = wf.releases[0]["published_at_pretty"] pulled = wf.local_wf.last_pull_pretty if wf.local_wf is not None else "[dim]-" if wf.local_wf is not None: revision = "" @@ -341,12 +344,14 @@ def get_local_nf_workflow_details(self): # Use `nextflow info` to get more details about the workflow else: - nfinfo_raw = str(nf_core.utils.nextflow_cmd(f"nextflow info -d {self.full_name}")) - re_patterns = {"repository": r"repository\s*: (.*)", "local_path": r"local path\s*: (.*)"} - for key, pattern in re_patterns.items(): - m = re.search(pattern, nfinfo_raw) - if m: - setattr(self, key, m.group(1)) + result = nf_core.utils.run_cmd("nextflow", f"info -d {self.full_name}") + if result is not None: + nfinfo_raw, _ = result + re_patterns = {"repository": r"repository\s*: (.*)", "local_path": r"local path\s*: (.*)"} + for key, pattern in re_patterns.items(): + m = re.search(pattern, str(nfinfo_raw)) + if m: + setattr(self, key, m.group(1)) # Pull information from the local git repository if self.local_path is not None: diff --git a/nf_core/module-template/environment.yml b/nf_core/module-template/environment.yml new file mode 100644 index 0000000000..dcf510affb --- /dev/null +++ b/nf_core/module-template/environment.yml @@ -0,0 +1,9 @@ +--- +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json +name: "{{ component_name_underscore }}" +channels: + - conda-forge + - bioconda + - defaults +dependencies: + - "{{ bioconda if bioconda else 'YOUR-TOOL-HERE' }}" diff --git a/nf_core/module-template/modules/main.nf b/nf_core/module-template/main.nf similarity index 95% rename from nf_core/module-template/modules/main.nf rename to nf_core/module-template/main.nf index 404d38094d..5258403e8f 100644 --- a/nf_core/module-template/modules/main.nf +++ b/nf_core/module-template/main.nf @@ -27,7 +27,7 @@ process {{ component_name_underscore|upper }} { // For Conda, the build (i.e. "h9402c20_2") must be EXCLUDED to support installation on different operating systems. // TODO nf-core: See section in main README for further information regarding finding and adding container addresses to the section below. {% endif -%} - conda "{{ bioconda if bioconda else 'YOUR-TOOL-HERE' }}" + conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? '{{ singularity_container if singularity_container else 'https://depot.galaxyproject.org/singularity/YOUR-TOOL-HERE' }}': '{{ docker_container if docker_container else 'biocontainers/YOUR-TOOL-HERE' }}' }" @@ -91,7 +91,7 @@ process {{ component_name_underscore|upper }} { cat <<-END_VERSIONS > versions.yml "${task.process}": - {{ tool }}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' )) + {{ component }}: \$(samtools --version |& sed '1!d ; s/samtools //') END_VERSIONS """ @@ -113,7 +113,7 @@ process {{ component_name_underscore|upper }} { cat <<-END_VERSIONS > versions.yml "${task.process}": - {{ tool }}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' )) + {{ component }}: \$(samtools --version |& sed '1!d ; s/samtools //') END_VERSIONS """ } diff --git a/nf_core/module-template/modules/meta.yml b/nf_core/module-template/meta.yml similarity index 90% rename from nf_core/module-template/modules/meta.yml rename to nf_core/module-template/meta.yml index aea3c36aa3..9d3f3c1c12 100644 --- a/nf_core/module-template/modules/meta.yml +++ b/nf_core/module-template/meta.yml @@ -1,5 +1,5 @@ --- -# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/yaml-schema.json +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/meta-schema.json name: "{{ component_name_underscore }}" {% if not_empty_template -%} ## TODO nf-core: Add a description of the module and list keywords @@ -19,7 +19,7 @@ tools: documentation: "{{ tool_doc_url }}" tool_dev_url: "{{ tool_dev_url }}" doi: "" - licence: "{{ tool_licence }}" + licence: {{ tool_licence }} {% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as input @@ -30,7 +30,7 @@ input: type: map description: | Groovy Map containing sample information - e.g. `[ id:'test', single_end:false ]` + e.g. `[ id:'sample1', single_end:false ]` {% endif %} {% if not_empty_template -%} ## TODO nf-core: Delete / customise this example input @@ -49,7 +49,7 @@ output: type: map description: | Groovy Map containing sample information - e.g. `[ id:'test', single_end:false ]` + e.g. `[ id:'sample1', single_end:false ]` {% endif %} - versions: type: file @@ -65,3 +65,5 @@ output: authors: - "{{ author }}" +maintainers: + - "{{ author }}" diff --git a/nf_core/module-template/tests/main.nf b/nf_core/module-template/tests/main.nf deleted file mode 100644 index fcb7195fe4..0000000000 --- a/nf_core/module-template/tests/main.nf +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env nextflow - -nextflow.enable.dsl = 2 - -include { {{ component_name_underscore|upper }} } from '../../../../{{ "../" if subtool else "" }}modules/{{ org }}/{{ component_dir }}/main.nf' - -workflow test_{{ component_name_underscore }} { - {% if has_meta %} - input = [ - [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) - ] - {%- else %} - input = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) - {%- endif %} - - {{ component_name_underscore|upper }} ( input ) -} diff --git a/nf_core/module-template/tests/main.nf.test b/nf_core/module-template/tests/main.nf.test new file mode 100644 index 0000000000..5a2e6cdc63 --- /dev/null +++ b/nf_core/module-template/tests/main.nf.test @@ -0,0 +1,82 @@ +// TODO nf-core: Once you have added the required tests, please run the following command to build this file: +// nf-core modules test {{ component_name }} +nextflow_process { + + name "Test Process {{ component_name_underscore|upper }}" + script "../main.nf" + process "{{ component_name_underscore|upper }}" + + tag "modules" + tag "modules_nfcore" + {%- if subtool %} + tag "{{ component }}" + {%- endif %} + tag "{{ component_name }}" + + // TODO nf-core: Change the test name preferably indicating the test-data and file-format used + test("sarscov2 - bam") { + + // TODO nf-core: If you are created a test for a chained module + // (the module requires running more than one process to generate the required output) + // add the 'setup' method here. + // You can find more information about how to use a 'setup' method in the docs (https://nf-co.re/docs/contributing/modules#steps-for-creating-nf-test-for-chained-modules). + + when { + process { + """ + // TODO nf-core: define inputs of the process here. Example: + {% if has_meta %} + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + {%- else %} + input = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + {%- endif %} + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + //TODO nf-core: Add all required assertions to verify the test output. + // See https://nf-co.re/docs/contributing/tutorials/nf-test_assertions for more information and examples. + ) + } + + } + + // TODO nf-core: Change the test name preferably indicating the test-data and file-format used but keep the " - stub" suffix. + test("sarscov2 - bam - stub") { + + options "-stub" + + when { + process { + """ + // TODO nf-core: define inputs of the process here. Example: + {% if has_meta %} + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + {%- else %} + input = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + {%- endif %} + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + //TODO nf-core: Add all required assertions to verify the test output. + ) + } + + } + +} diff --git a/nf_core/module-template/tests/nextflow.config b/nf_core/module-template/tests/nextflow.config deleted file mode 100644 index 50f50a7a35..0000000000 --- a/nf_core/module-template/tests/nextflow.config +++ /dev/null @@ -1,5 +0,0 @@ -process { - - publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } - -} \ No newline at end of file diff --git a/nf_core/module-template/tests/tags.yml b/nf_core/module-template/tests/tags.yml new file mode 100644 index 0000000000..e7fac9f5b9 --- /dev/null +++ b/nf_core/module-template/tests/tags.yml @@ -0,0 +1,2 @@ +{{ component_dir }}: + - "modules/{{ org }}/{{ component_dir }}/**" diff --git a/nf_core/module-template/tests/test.yml b/nf_core/module-template/tests/test.yml deleted file mode 100644 index a2cedb73d3..0000000000 --- a/nf_core/module-template/tests/test.yml +++ /dev/null @@ -1,18 +0,0 @@ -{%- if not_empty_template -%} -## TODO nf-core: Please run the following command to build this file: -# nf-core modules create-test-yml {{ tool }}{%- if subtool %}/{{ subtool }}{%- endif %} -{% endif -%} -- name: "{{ component }}{{ ' '+subtool if subtool else '' }}" - command: nextflow run ./tests/modules/{{ org }}/{{ component_dir }} -entry test_{{ component_name_underscore }} -c ./tests/config/nextflow.config - tags: - - "{{ component }}{% if subtool -%}" - - "{{ component }}/{{ subtool }}{%- endif %}" - files: - {% if not_empty_template -%} - - path: "output/{{ component }}/test.bam" - md5sum: e667c7caad0bc4b7ac383fd023c654fc - - path: "output/{{ component }}/versions.yml" - md5sum: a01fe51bc4c6a3a6226fbf77b2c7cf3b - {% else -%} - - path: "" - {%- endif %} diff --git a/nf_core/modules/__init__.py b/nf_core/modules/__init__.py index 47af637d02..4b36f302bd 100644 --- a/nf_core/modules/__init__.py +++ b/nf_core/modules/__init__.py @@ -6,9 +6,7 @@ from .list import ModuleList from .modules_json import ModulesJson from .modules_repo import ModulesRepo -from .modules_test import ModulesTest from .modules_utils import ModuleException from .patch import ModulePatch from .remove import ModuleRemove -from .test_yml_builder import ModulesTestYmlBuilder from .update import ModuleUpdate diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index e8c5060c89..25259f1a16 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -9,33 +9,48 @@ import logging import os import re +from typing import Any, Dict, List, Optional, Tuple, Union import questionary -import rich +import yaml +from rich.box import ROUNDED from rich.console import Console from rich.markdown import Markdown +from rich.panel import Panel +from rich.progress import BarColumn, Progress from rich.table import Table import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.components_command import ComponentCommand +from nf_core.components.nfcore_component import NFCoreComponent +from nf_core.utils import custom_yaml_dumper from nf_core.utils import plural_s as _s from nf_core.utils import rich_force_colors log = logging.getLogger(__name__) -class ModuleVersionBumper(ComponentCommand): - def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False): +class ModuleVersionBumper(ComponentCommand): # type: ignore[misc] + def __init__( + self, + pipeline_dir: str, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + ): super().__init__("modules", pipeline_dir, remote_url, branch, no_pull) - self.up_to_date = None - self.updated = None - self.failed = None - self.show_up_to_date = None - self.tools_config = {} + self.up_to_date: List[Tuple[str, str]] = [] + self.updated: List[Tuple[str, str]] = [] + self.failed: List[Tuple[str, str]] = [] + self.ignored: List[Tuple[str, str]] = [] + self.show_up_to_date: Optional[bool] = None + self.tools_config: Dict[str, Any] = {} - def bump_versions(self, module=None, all_modules=False, show_uptodate=False): + def bump_versions( + self, module: Union[str, None] = None, all_modules: bool = False, show_uptodate: bool = False + ) -> None: """ Bump the container and conda version of single module or all modules @@ -97,9 +112,9 @@ def bump_versions(self, module=None, all_modules=False, show_uptodate=False): if len(nfcore_modules) == 0: raise nf_core.modules.modules_utils.ModuleException(f"Could not find the specified module: '{module}'") - progress_bar = rich.progress.Progress( + progress_bar = Progress( "[bold blue]{task.description}", - rich.progress.BarColumn(bar_width=None), + BarColumn(bar_width=None), "[magenta]{task.completed} of {task.total}[reset] » [bold yellow]{task.fields[test_name]}", transient=True, disable=os.environ.get("HIDE_PROGRESS", None) is not None, @@ -116,7 +131,7 @@ def bump_versions(self, module=None, all_modules=False, show_uptodate=False): self._print_results() - def bump_module_version(self, module): + def bump_module_version(self, module: NFCoreComponent) -> bool: """ Bump the bioconda and container version of a single NFCoreComponent @@ -124,8 +139,21 @@ def bump_module_version(self, module): module: NFCoreComponent """ config_version = None - # Extract bioconda version from `main.nf` - bioconda_packages = self.get_bioconda_version(module) + bioconda_packages = [] + try: + # Extract bioconda version from `environment.yml` + bioconda_packages = self.get_bioconda_version(module) + except FileNotFoundError: + # try it in the main.nf instead + try: + with open(module.main_nf, "r") as fh: + for l in fh: + if "bioconda::" in l: + bioconda_packages = [b for b in l.split() if "bioconda::" in b] + except FileNotFoundError: + log.error( + f"Neither `environment.yml` nor `main.nf` of {module.component_name} module could be read to get bioconada version of used tools." + ) # If multiple versions - don't update! (can't update mulled containers) if not bioconda_packages or len(bioconda_packages) > 1: @@ -173,7 +201,6 @@ def bump_module_version(self, module): return False patterns = [ - (bioconda_packages[0], f"'bioconda::{bioconda_tool_name}={last_ver}'"), (rf"biocontainers/{bioconda_tool_name}:[^'\"\s]+", docker_img), ( rf"https://depot.galaxyproject.org/singularity/{bioconda_tool_name}:[^'\"\s]+", @@ -213,6 +240,13 @@ def bump_module_version(self, module): with open(module.main_nf, "w") as fh: fh.write(content) + # change version in environment.yml + with open(module.environment_yml, "r") as fh: + env_yml = yaml.safe_load(fh) + re.sub(bioconda_packages[0], f"'bioconda::{bioconda_tool_name}={last_ver}'", env_yml["dependencies"]) + with open(module.environment_yml, "w") as fh: + yaml.dump(env_yml, fh, default_flow_style=False, Dumper=custom_yaml_dumper()) + self.updated.append( ( f"Module updated: {bioconda_version} --> {last_ver}", @@ -225,23 +259,22 @@ def bump_module_version(self, module): self.up_to_date.append((f"Module version up to date: {module.component_name}", module.component_name)) return True - def get_bioconda_version(self, module): + def get_bioconda_version(self, module: NFCoreComponent) -> List[str]: """ Extract the bioconda version from a module """ # Check whether file exists and load it - bioconda_packages = False + bioconda_packages = [] try: - with open(module.main_nf, "r") as fh: - for l in fh: - if "bioconda::" in l: - bioconda_packages = [b for b in l.split() if "bioconda::" in b] + with open(module.environment_yml, "r") as fh: + env_yml = yaml.safe_load(fh) + bioconda_packages = env_yml.get("dependencies", []) except FileNotFoundError: - log.error(f"Could not read `main.nf` of {module.component_name} module.") + log.error(f"Could not read `environment.yml` of {module.component_name} module.") return bioconda_packages - def _print_results(self): + def _print_results(self) -> None: """ Print the results for the bump_versions command Uses the ``rich`` library to print a set of formatted tables to the command line @@ -259,13 +292,13 @@ def _print_results(self): except: pass - def format_result(module_updates, table): + def format_result(module_updates: List[Tuple[str, str]], table: Table) -> Table: """ Create rows for module updates """ # TODO: Row styles don't work current as table-level style overrides. # I'd like to make an issue about this on the rich repo so leaving here in case there is a future fix - last_modname = False + last_modname = "" row_style = None for module_update in module_updates: if last_modname and module_update[1] != last_modname: @@ -284,12 +317,12 @@ def format_result(module_updates, table): # Table of up to date modules if len(self.up_to_date) > 0 and self.show_up_to_date: console.print( - rich.panel.Panel( + Panel( rf"[!] {len(self.up_to_date)} Module{_s(self.up_to_date)} version{_s(self.up_to_date)} up to date.", style="bold green", ) ) - table = Table(style="green", box=rich.box.ROUNDED) + table = Table(style="green", box=ROUNDED) table.add_column("Module name", width=max_mod_name_len) table.add_column("Update Message") table = format_result(self.up_to_date, table) @@ -297,10 +330,8 @@ def format_result(module_updates, table): # Table of updated modules if len(self.updated) > 0: - console.print( - rich.panel.Panel(rf"[!] {len(self.updated)} Module{_s(self.updated)} updated", style="bold yellow") - ) - table = Table(style="yellow", box=rich.box.ROUNDED) + console.print(Panel(rf"[!] {len(self.updated)} Module{_s(self.updated)} updated", style="bold yellow")) + table = Table(style="yellow", box=ROUNDED) table.add_column("Module name", width=max_mod_name_len) table.add_column("Update message") table = format_result(self.updated, table) @@ -308,10 +339,8 @@ def format_result(module_updates, table): # Table of modules that couldn't be updated if len(self.failed) > 0: - console.print( - rich.panel.Panel(rf"[!] {len(self.failed)} Module update{_s(self.failed)} failed", style="bold red") - ) - table = Table(style="red", box=rich.box.ROUNDED) + console.print(Panel(rf"[!] {len(self.failed)} Module update{_s(self.failed)} failed", style="bold red")) + table = Table(style="red", box=ROUNDED) table.add_column("Module name", width=max_mod_name_len) table.add_column("Update message") table = format_result(self.failed, table) @@ -319,10 +348,8 @@ def format_result(module_updates, table): # Table of modules ignored due to `.nf-core.yml` if len(self.ignored) > 0: - console.print( - rich.panel.Panel(rf"[!] {len(self.ignored)} Module update{_s(self.ignored)} ignored", style="grey58") - ) - table = Table(style="grey58", box=rich.box.ROUNDED) + console.print(Panel(rf"[!] {len(self.ignored)} Module update{_s(self.ignored)} ignored", style="grey58")) + table = Table(style="grey58", box=ROUNDED) table.add_column("Module name", width=max_mod_name_len) table.add_column("Update message") table = format_result(self.ignored, table) diff --git a/nf_core/modules/create.py b/nf_core/modules/create.py index b5368130ce..a5e0795a9f 100644 --- a/nf_core/modules/create.py +++ b/nf_core/modules/create.py @@ -17,6 +17,7 @@ def __init__( conda_name=None, conda_version=None, empty_template=False, + migrate_pytest=False, ): super().__init__( "modules", @@ -29,4 +30,5 @@ def __init__( conda_name, conda_version, empty_template, + migrate_pytest, ) diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 83d583dffb..68a38cc0cd 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -29,14 +29,15 @@ class ModuleLint(ComponentLint): """ # Import lint functions - from .main_nf import main_nf - from .meta_yml import meta_yml - from .module_changes import module_changes - from .module_deprecations import module_deprecations - from .module_patch import module_patch - from .module_tests import module_tests - from .module_todos import module_todos - from .module_version import module_version + from .environment_yml import environment_yml # type: ignore[misc] + from .main_nf import main_nf # type: ignore[misc] + from .meta_yml import meta_yml # type: ignore[misc] + from .module_changes import module_changes # type: ignore[misc] + from .module_deprecations import module_deprecations # type: ignore[misc] + from .module_patch import module_patch # type: ignore[misc] + from .module_tests import module_tests # type: ignore[misc] + from .module_todos import module_todos # type: ignore[misc] + from .module_version import module_version # type: ignore[misc] def __init__( self, diff --git a/nf_core/modules/lint/environment_yml.py b/nf_core/modules/lint/environment_yml.py new file mode 100644 index 0000000000..a052425539 --- /dev/null +++ b/nf_core/modules/lint/environment_yml.py @@ -0,0 +1,113 @@ +import json +import logging +from pathlib import Path + +import yaml +from jsonschema import exceptions, validators + +from nf_core.components.lint import ComponentLint +from nf_core.components.nfcore_component import NFCoreComponent +from nf_core.utils import custom_yaml_dumper + +log = logging.getLogger(__name__) + + +def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None: + """ + Lint an ``environment.yml`` file. + + The lint test checks that the ``dependencies`` section + in the environment.yml file is valid YAML and that it + is sorted alphabetically. + """ + env_yml = None + # load the environment.yml file + try: + with open(Path(module.component_dir, "environment.yml"), "r") as fh: + env_yml = yaml.safe_load(fh) + + module.passed.append(("environment_yml_exists", "Module's `environment.yml` exists", module.environment_yml)) + + except FileNotFoundError: + # check if the module's main.nf requires a conda environment + with open(Path(module.component_dir, "main.nf"), "r") as fh: + main_nf = fh.read() + if 'conda "${moduleDir}/environment.yml"' in main_nf: + module.failed.append( + ("environment_yml_exists", "Module's `environment.yml` does not exist", module.environment_yml) + ) + else: + module.passed.append( + ( + "environment_yml_exists", + "Module's `environment.yml` does not exist, but it is also not included in the main.nf", + module.environment_yml, + ) + ) + + # Confirm that the environment.yml file is valid according to the JSON schema + if env_yml: + valid_env_yml = False + try: + with open( + Path(module_lint_object.modules_repo.local_repo_dir, "modules/environment-schema.json"), "r" + ) as fh: + schema = json.load(fh) + validators.validate(instance=env_yml, schema=schema) + module.passed.append( + ("environment_yml_valid", "Module's `environment.yml` is valid", module.environment_yml) + ) + valid_env_yml = True + except exceptions.ValidationError as e: + hint = "" + if len(e.path) > 0: + hint = f"\nCheck the entry for `{e.path[0]}`." + if e.schema.get("message"): + e.message = e.schema["message"] + module.failed.append( + ( + "environment_yml_valid", + f"The `environment.yml` of the module {module.component_name} is not valid: {e.message}.{hint}", + module.environment_yml, + ) + ) + + if valid_env_yml: + # Check that the dependencies section is sorted alphabetically + if sorted(env_yml["dependencies"]) == env_yml["dependencies"]: + module.passed.append( + ( + "environment_yml_sorted", + "The dependencies in the module's `environment.yml` are sorted alphabetically", + module.environment_yml, + ) + ) + else: + # sort it and write it back to the file + log.info( + f"Dependencies in {module.component_name}'s environment.yml were not sorted alphabetically. Sorting them now." + ) + env_yml["dependencies"].sort() + with open(Path(module.component_dir, "environment.yml"), "w") as fh: + yaml.dump(env_yml, fh, Dumper=custom_yaml_dumper()) + + # Check that the name in the environment.yml file matches the name in the meta.yml file + with open(Path(module.component_dir, "meta.yml"), "r") as fh: + meta_yml = yaml.safe_load(fh) + + if env_yml["name"] == meta_yml["name"]: + module.passed.append( + ( + "environment_yml_name", + "The module's `environment.yml` name matches module name", + module.environment_yml, + ) + ) + else: + module.failed.append( + ( + "environment_yml_name", + f"Conflicting process name between environment.yml (`{env_yml['name']}`) and meta.yml (`{module.component_name}`)", + module.environment_yml, + ) + ) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index deb47b5799..56a9e99925 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -9,6 +9,7 @@ from urllib.parse import urlparse, urlunparse import requests +import yaml import nf_core import nf_core.modules.modules_utils @@ -246,16 +247,17 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): check_process_labels(self, lines) # Deprecated enable_conda - for i, l in enumerate(lines): + for i, raw_line in enumerate(lines): url = None - l = l.strip(" \n'\"}:") + l = raw_line.strip(" \n'\"}:") # Catch preceeding "container " if l.startswith("container"): l = l.replace("container", "").strip(" \n'\"}:") if _container_type(l) == "conda": - bioconda_packages = [b for b in l.split() if "bioconda::" in b] + if "bioconda::" in l: + bioconda_packages = [b for b in l.split() if "bioconda::" in b] match = re.search(r"params\.enable_conda", l) if match is None: self.passed.append( @@ -314,34 +316,9 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): l = "/".join([registry, l]).replace("//", "/") url = urlparse(l.split("'")[0]) - # lint double quotes if l.startswith("container") or _container_type(l) == "docker" or _container_type(l) == "singularity": - if l.count('"') > 2: - self.failed.append( - ( - "container_links", - f"Too many double quotes found when specifying container: {l.lstrip('container ')}", - self.main_nf, - ) - ) - else: - self.passed.append( - ( - "container_links", - f"Correct number of double quotes found when specifying container: {l.lstrip('container ')}", - self.main_nf, - ) - ) + check_container_link_line(self, raw_line, registry) - # lint more than one container in the same line - if ("https://containers" in l or "https://depot" in l) and ("biocontainers/" in l or l.startswith(registry)): - self.warned.append( - ( - "container_links", - "Docker and Singularity containers specified in the same line. Only first one checked.", - self.main_nf, - ) - ) # Try to connect to container URLs if url is None: continue @@ -369,6 +346,17 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): ) ) + # Get bioconda packages from environment.yml + try: + with open(Path(self.component_dir, "environment.yml"), "r") as fh: + env_yml = yaml.safe_load(fh) + if "dependencies" in env_yml: + bioconda_packages = [x for x in env_yml["dependencies"] if isinstance(x, str) and "bioconda::" in x] + except FileNotFoundError: + pass + except NotADirectoryError: + pass + # Check that all bioconda packages have build numbers # Also check for newer versions for bp in bioconda_packages: @@ -484,6 +472,68 @@ def check_process_labels(self, lines): self.warned.append(("process_standard_label", "Process label not specified", self.main_nf)) +def check_container_link_line(self, raw_line, registry): + """Look for common problems in the container name / URL, for docker and singularity.""" + + l = raw_line.strip(" \n'\"}:") + + # lint double quotes + if l.count('"') > 2: + self.failed.append( + ( + "container_links", + f"Too many double quotes found when specifying container: {l.lstrip('container ')}", + self.main_nf, + ) + ) + else: + self.passed.append( + ( + "container_links", + f"Correct number of double quotes found when specifying container: {l.lstrip('container ')}", + self.main_nf, + ) + ) + + # Check for spaces in url + single_quoted_items = raw_line.split("'") + double_quoted_items = raw_line.split('"') + # Look for container link as single item surrounded by quotes + # (if there are multiple links, this will be warned in the next check) + container_link = None + if len(single_quoted_items) == 3: + container_link = single_quoted_items[1] + elif len(double_quoted_items) == 3: + container_link = double_quoted_items[1] + if container_link: + if " " in container_link: + self.failed.append( + ( + "container_links", + f"Space character found in container: '{container_link}'", + self.main_nf, + ) + ) + else: + self.passed.append( + ( + "container_links", + f"No space characters found in container: '{container_link}'", + self.main_nf, + ) + ) + + # lint more than one container in the same line + if ("https://containers" in l or "https://depot" in l) and ("biocontainers/" in l or l.startswith(registry)): + self.warned.append( + ( + "container_links", + "Docker and Singularity containers specified in the same line. Only first one checked.", + self.main_nf, + ) + ) + + def _parse_input(self, line_raw): """ Return list of input channel names from an input line. diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 446cf6dbb8..7552c1ceae 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -1,26 +1,45 @@ import json from pathlib import Path -import jsonschema.validators import yaml +from jsonschema import exceptions, validators +from nf_core.components.lint import ComponentLint +from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_differ import ModulesDiffer -def meta_yml(module_lint_object, module): +def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None: """ Lint a ``meta.yml`` file The lint test checks that the module has a ``meta.yml`` file and that it follows the - JSON schema defined in the ``modules/yaml-schema.json`` + JSON schema defined in the ``modules/meta-schema.json`` file in the nf-core/modules repository. In addition it checks that the module name and module input is consistent between the ``meta.yml`` and the ``main.nf``. + If the module has inputs or outputs, they are expected to be + formatted as: + + ..code-block:: + tuple val(foo) path(bar) + val foo + path foo + + or permutations of the above. + + Args: + module_lint_object (ComponentLint): The lint object for the module + module (NFCoreComponent): The module to lint + """ + + module.get_inputs_from_main_nf() + module.get_outputs_from_main_nf() # Check if we have a patch file, get original file in that case meta_yaml = None if module.is_patched: @@ -43,19 +62,26 @@ def meta_yml(module_lint_object, module): return # Confirm that the meta.yml file is valid according to the JSON schema - valid_meta_yml = True + valid_meta_yml = False try: - with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/yaml-schema.json"), "r") as fh: + with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/meta-schema.json"), "r") as fh: schema = json.load(fh) - jsonschema.validators.validate(instance=meta_yaml, schema=schema) + validators.validate(instance=meta_yaml, schema=schema) module.passed.append(("meta_yml_valid", "Module `meta.yml` is valid", module.meta_yml)) - except jsonschema.exceptions.ValidationError as e: - valid_meta_yml = False + valid_meta_yml = True + except exceptions.ValidationError as e: hint = "" if len(e.path) > 0: hint = f"\nCheck the entry for `{e.path[0]}`." if e.message.startswith("None is not of type 'object'") and len(e.path) > 2: - hint = f"\nCheck that the child entries of {e.path[0]+'.'+e.path[2]} are indented correctly." + hint = f"\nCheck that the child entries of {str(e.path[0])+'.'+str(e.path[2])} are indented correctly." + if e.schema.get("message"): + e.message = e.schema["message"] + incorrect_value = meta_yaml + for key in e.path: + incorrect_value = incorrect_value[key] + + hint = hint + f"\nThe current value is `{incorrect_value}`." module.failed.append( ( "meta_yml_valid", @@ -63,7 +89,6 @@ def meta_yml(module_lint_object, module): module.meta_yml, ) ) - return # Confirm that all input and output channels are specified if valid_meta_yml: @@ -71,26 +96,87 @@ def meta_yml(module_lint_object, module): meta_input = [list(x.keys())[0] for x in meta_yaml["input"]] for input in module.inputs: if input in meta_input: - module.passed.append(("meta_input", f"`{input}` specified", module.meta_yml)) + module.passed.append(("meta_input_main_only", f"`{input}` specified", module.meta_yml)) + else: + module.warned.append( + ( + "meta_input_main_only", + f"`{input}` is present as an input in the `main.nf`, but missing in `meta.yml`", + module.meta_yml, + ) + ) + # check if there are any inputs in meta.yml that are not in main.nf + for input in meta_input: + if input in module.inputs: + module.passed.append( + ( + "meta_input_meta_only", + f"`{input}` is present as an input in `meta.yml` and `main.nf`", + module.meta_yml, + ) + ) else: - module.failed.append(("meta_input", f"`{input}` missing in `meta.yml`", module.meta_yml)) + module.warned.append( + ( + "meta_input_meta_only", + f"`{input}` is present as an input in `meta.yml` but not in `main.nf`", + module.meta_yml, + ) + ) - if "output" in meta_yaml: + if "output" in meta_yaml and meta_yaml["output"] is not None: meta_output = [list(x.keys())[0] for x in meta_yaml["output"]] for output in module.outputs: if output in meta_output: - module.passed.append(("meta_output", f"`{output}` specified", module.meta_yml)) + module.passed.append(("meta_output_main_only", f"`{output}` specified", module.meta_yml)) else: - module.failed.append(("meta_output", f"`{output}` missing in `meta.yml`", module.meta_yml)) - + module.warned.append( + ( + "meta_output_main_only", + f"`{output}` is present as an output in the `main.nf`, but missing in `meta.yml`", + module.meta_yml, + ) + ) + # check if there are any outputs in meta.yml that are not in main.nf + for output in meta_output: + if output in module.outputs: + module.passed.append( + ( + "meta_output_meta_only", + f"`{output}` is present as an output in `meta.yml` and `main.nf`", + module.meta_yml, + ) + ) + elif output == "meta": + module.passed.append( + ( + "meta_output_meta_only", + f"`{output}` is skipped for `meta.yml` outputs", + module.meta_yml, + ) + ) + else: + module.warned.append( + ( + "meta_output_meta_only", + f"`{output}` is present as an output in `meta.yml` but not in `main.nf`", + module.meta_yml, + ) + ) # confirm that the name matches the process name in main.nf if meta_yaml["name"].upper() == module.process_name: - module.passed.append(("meta_name", "Correct name specified in `meta.yml`", module.meta_yml)) + module.passed.append( + ( + "meta_name", + "Correct name specified in `meta.yml`.", + module.meta_yml, + ) + ) else: module.failed.append( ( "meta_name", - f"Conflicting process name between meta.yml (`{meta_yaml['name']}`) and main.nf (`{module.process_name}`)", + f"Conflicting `process` name between meta.yml (`{meta_yaml['name']}`) and main.nf (`{module.process_name}`)", module.meta_yml, ) ) diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index 212b378748..87033e3f49 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -1,87 +1,208 @@ """ Lint the tests of a module in nf-core/modules """ +import json import logging -import os +from pathlib import Path import yaml +from nf_core.components.nfcore_component import NFCoreComponent + log = logging.getLogger(__name__) -def module_tests(_, module): +def module_tests(_, module: NFCoreComponent): """ Lint the tests of a module in ``nf-core/modules`` It verifies that the test directory exists - and contains a ``main.nf`` and a ``test.yml``, - and that the module is present in the ``pytest_modules.yml`` - file. + and contains a ``main.nf.test`` a ``main.nf.test.snap`` and ``tags.yml``. """ - - if os.path.exists(module.test_dir): - module.passed.append(("test_dir_exists", "Test directory exists", module.test_dir)) + repo_dir = module.component_dir.parts[: module.component_dir.parts.index(module.component_name.split("/")[0])][-1] + test_dir = Path(module.base_dir, "tests", "modules", repo_dir, module.component_name) + pytest_main_nf = Path(test_dir, "main.nf") + is_pytest = pytest_main_nf.is_file() + if module.nftest_testdir.is_dir(): + module.passed.append(("test_dir_exists", "nf-test test directory exists", module.nftest_testdir)) else: - module.failed.append(("test_dir_exists", "Test directory is missing", module.test_dir)) + if is_pytest: + module.warned.append(("test_dir_exists", "nf-test directory is missing", module.nftest_testdir)) + else: + module.failed.append(("test_dir_exists", "nf-test directory is missing", module.nftest_testdir)) return # Lint the test main.nf file - test_main_nf = os.path.join(module.test_dir, "main.nf") - if os.path.exists(test_main_nf): - module.passed.append(("test_main_exists", "test `main.nf` exists", module.test_main_nf)) + if module.nftest_main_nf.is_file(): + module.passed.append(("test_main_nf_exists", "test `main.nf.test` exists", module.nftest_main_nf)) else: - module.failed.append(("test_main_exists", "test `main.nf` does not exist", module.test_main_nf)) + if is_pytest: + module.warned.append(("test_main_nf_exists", "test `main.nf.test` does not exist", module.nftest_main_nf)) + else: + module.failed.append(("test_main_nf_exists", "test `main.nf.test` does not exist", module.nftest_main_nf)) - # Check that entry in pytest_modules.yml exists - try: - pytest_yml_path = os.path.join(module.base_dir, "tests", "config", "pytest_modules.yml") - with open(pytest_yml_path, "r") as fh: - pytest_yml = yaml.safe_load(fh) - if module.component_name in pytest_yml.keys(): - module.passed.append(("test_pytest_yml", "correct entry in pytest_modules.yml", pytest_yml_path)) + if module.nftest_main_nf.is_file(): + # Check if main.nf.test.snap file exists, if 'snap(' is inside main.nf.test + with open(module.nftest_main_nf, "r") as fh: + if "snapshot(" in fh.read(): + snap_file = module.nftest_testdir / "main.nf.test.snap" + if snap_file.is_file(): + module.passed.append( + ("test_snapshot_exists", "snapshot file `main.nf.test.snap` exists", snap_file) + ) + # Validate no empty files + with open(snap_file, "r") as snap_fh: + try: + snap_content = json.load(snap_fh) + for test_name in snap_content.keys(): + if "d41d8cd98f00b204e9800998ecf8427e" in str(snap_content[test_name]): + if "stub" not in test_name: + module.failed.append( + ( + "test_snap_md5sum", + "md5sum for empty file found: d41d8cd98f00b204e9800998ecf8427e", + snap_file, + ) + ) + else: + module.passed.append( + ( + "test_snap_md5sum", + "md5sum for empty file found, but it is a stub test", + snap_file, + ) + ) + else: + module.passed.append( + ( + "test_snap_md5sum", + "no md5sum for empty file found", + snap_file, + ) + ) + if "7029066c27ac6f5ef18d660d5741979a" in str(snap_content[test_name]): + if "stub" not in test_name: + module.failed.append( + ( + "test_snap_md5sum", + "md5sum for compressed empty file found: 7029066c27ac6f5ef18d660d5741979a", + snap_file, + ) + ) + else: + module.passed.append( + ( + "test_snap_md5sum", + "md5sum for compressed empty file found, but it is a stub test", + snap_file, + ) + ) + else: + module.passed.append( + ( + "test_snap_md5sum", + "no md5sum for compressed empty file found", + snap_file, + ) + ) + except json.decoder.JSONDecodeError as e: + module.failed.append( + ( + "test_snapshot_exists", + f"snapshot file `main.nf.test.snap` can't be read: {e}", + snap_file, + ) + ) + else: + module.failed.append( + ("test_snapshot_exists", "snapshot file `main.nf.test.snap` does not exist", snap_file) + ) + # Verify that tags are correct. + main_nf_tags = module._get_main_nf_tags(module.nftest_main_nf) + required_tags = ["modules", "modules_nfcore", module.component_name] + if module.component_name.count("/") == 1: + required_tags.append(module.component_name.split("/")[0]) + chained_components_tags = module._get_included_components_in_chained_tests(module.nftest_main_nf) + missing_tags = [] + log.debug(f"Required tags: {required_tags}") + log.debug(f"Included components for chained nf-tests: {chained_components_tags}") + for tag in set(required_tags + chained_components_tags): + if tag not in main_nf_tags: + missing_tags.append(tag) + if len(missing_tags) == 0: + module.passed.append(("test_main_tags", "Tags adhere to guidelines", module.nftest_main_nf)) else: - module.failed.append(("test_pytest_yml", "missing entry in pytest_modules.yml", pytest_yml_path)) - except FileNotFoundError: - module.failed.append(("test_pytest_yml", "Could not open pytest_modules.yml file", pytest_yml_path)) - - # Lint the test.yml file - try: - with open(module.test_yml, "r") as fh: - test_yml = yaml.safe_load(fh) - - # Verify that tags are correct - all_tags_correct = True - for test in test_yml: - for tag in test["tags"]: - if not tag in [module.component_name, module.component_name.split("/")[0]]: - all_tags_correct = False + module.failed.append( + ( + "test_main_tags", + f"Tags do not adhere to guidelines. Tags missing in `main.nf.test`: `{','.join(missing_tags)}`", + module.nftest_main_nf, + ) + ) - # Look for md5sums of empty files - for tfile in test.get("files", []): - if tfile.get("md5sum") == "d41d8cd98f00b204e9800998ecf8427e": - module.failed.append( - ( - "test_yml_md5sum", - "md5sum for empty file found: d41d8cd98f00b204e9800998ecf8427e", - module.test_yml, - ) - ) - if tfile.get("md5sum") == "7029066c27ac6f5ef18d660d5741979a": - module.failed.append( - ( - "test_yml_md5sum", - "md5sum for compressed empty file found: 7029066c27ac6f5ef18d660d5741979a", - module.test_yml, - ) + # Check pytest_modules.yml does not contain entries for modules with nf-test + pytest_yml_path = module.base_dir / "tests" / "config" / "pytest_modules.yml" + if pytest_yml_path.is_file() and not is_pytest: + try: + with open(pytest_yml_path, "r") as fh: + pytest_yml = yaml.safe_load(fh) + if module.component_name in pytest_yml.keys(): + module.failed.append( + ( + "test_pytest_yml", + "module with nf-test should not be listed in pytest_modules.yml", + pytest_yml_path, ) + ) + else: + module.passed.append( + ("test_pytest_yml", "module with nf-test not in pytest_modules.yml", pytest_yml_path) + ) + except FileNotFoundError: + module.warned.append(("test_pytest_yml", "Could not open pytest_modules.yml file", pytest_yml_path)) - if all_tags_correct: - module.passed.append(("test_yml_tags", "tags adhere to guidelines", module.test_yml)) + if module.tags_yml.is_file(): + # Check that tags.yml exists and it has the correct entry + module.passed.append(("test_tags_yml_exists", "file `tags.yml` exists", module.tags_yml)) + with open(module.tags_yml, "r") as fh: + tags_yml = yaml.safe_load(fh) + if module.component_name in tags_yml.keys(): + module.passed.append(("test_tags_yml", "correct entry in tags.yml", module.tags_yml)) + if f"modules/{module.org}/{module.component_name}/**" in tags_yml[module.component_name]: + module.passed.append(("test_tags_yml", "correct path in tags.yml", module.tags_yml)) + else: + module.failed.append( + ( + "test_tags_yml", + f"incorrect path in tags.yml, expected `modules/{module.org}/{module.component_name}/**`, got `{tags_yml[module.component_name][0]}`", + module.tags_yml, + ) + ) else: - module.failed.append(("test_yml_tags", "tags do not adhere to guidelines", module.test_yml)) + module.failed.append( + ( + "test_tags_yml", + f"incorrect key in tags.yml, should be `{module.component_name}`, got `{list(tags_yml.keys())[0]}`.", + module.tags_yml, + ) + ) + else: + if is_pytest: + module.warned.append(("test_tags_yml_exists", "file `tags.yml` does not exist", module.tags_yml)) + else: + module.failed.append(("test_tags_yml_exists", "file `tags.yml` does not exist", module.tags_yml)) - # Test that the file exists - module.passed.append(("test_yml_exists", "Test `test.yml` exists", module.test_yml)) - except FileNotFoundError: - module.failed.append(("test_yml_exists", "Test `test.yml` does not exist", module.test_yml)) + # Check that the old test directory does not exist + if not is_pytest: + old_test_dir = Path(module.base_dir, "tests", "modules", module.component_name) + if old_test_dir.is_dir(): + module.failed.append( + ( + "test_old_test_dir", + f"Pytest files are still present at `{Path('tests', 'modules', module.component_name)}`. Please remove this directory and its contents.", + old_test_dir, + ) + ) + else: + module.passed.append(("test_old_test_dir", "Old pytests don't exist for this module", old_test_dir)) diff --git a/nf_core/modules/lint/module_todos.py b/nf_core/modules/lint/module_todos.py index ee12307512..c9c90ec3dc 100644 --- a/nf_core/modules/lint/module_todos.py +++ b/nf_core/modules/lint/module_todos.py @@ -38,10 +38,3 @@ def module_todos(_, module): module.warned.append(("module_todo", warning, mod_results["file_paths"][i])) for i, passed in enumerate(mod_results["passed"]): module.passed.append(("module_todo", passed, module.component_dir)) - - # Module tests directory - test_results = pipeline_todos(None, root_dir=module.test_dir) - for i, warning in enumerate(test_results["warned"]): - module.warned.append(("module_todo", warning, test_results["file_paths"][i])) - for i, passed in enumerate(test_results["passed"]): - module.passed.append(("module_todo", passed, module.test_dir)) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 9c3d1ae9b1..ee912843b6 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -6,9 +6,11 @@ import shutil import tempfile from pathlib import Path +from typing import Union import git import questionary +import rich.prompt from git.exc import GitCommandError import nf_core.utils @@ -41,7 +43,7 @@ def __init__(self, pipeline_dir): self.modules_dir = Path(self.dir, "modules") self.subworkflows_dir = Path(self.dir, "subworkflows") self.modules_json_path = Path(self.dir, "modules.json") - self.modules_json = None + self.modules_json: Union(dict, None) = None self.pipeline_modules = None self.pipeline_subworkflows = None self.pipeline_components = None @@ -67,7 +69,13 @@ def create(self): new_modules_json = {"name": pipeline_name.strip("'"), "homePage": pipeline_url.strip("'"), "repos": {}} if not self.modules_dir.exists(): - raise UserWarning("Can't find a ./modules directory. Is this a DSL2 pipeline?") + if rich.prompt.Confirm.ask( + "[bold][blue]?[/] Can't find a ./modules directory. Would you like me to create one?", default=True + ): + log.info(f"Creating ./modules directory in '{self.dir}'") + self.modules_dir.mkdir() + else: + raise UserWarning("Cannot proceed without a ./modules directory.") # Get repositories repos, _ = self.get_pipeline_module_repositories("modules", self.modules_dir) @@ -678,7 +686,7 @@ def update( repo_component_entry[component_name]["installed_by"] = [installed_by] finally: new_installed_by = repo_component_entry[component_name]["installed_by"] + list(installed_by_log) - repo_component_entry[component_name]["installed_by"] = [*set(new_installed_by)] + repo_component_entry[component_name]["installed_by"] = sorted([*set(new_installed_by)]) # Sort the 'modules.json' repo entries self.modules_json["repos"] = nf_core.utils.sort_dictionary(self.modules_json["repos"]) @@ -1035,13 +1043,17 @@ def get_component_branch(self, component_type, component, repo_url, install_dir) ) return branch - def dump(self): + def dump(self, run_prettier: bool = False): """ Sort the modules.json, and write it to file """ # Sort the modules.json self.modules_json["repos"] = nf_core.utils.sort_dictionary(self.modules_json["repos"]) - dump_json_with_prettier(self.modules_json_path, self.modules_json) + if run_prettier: + dump_json_with_prettier(self.modules_json_path, self.modules_json) + else: + with open(self.modules_json_path, "w") as fh: + json.dump(self.modules_json, fh, indent=4) def resolve_missing_installation(self, missing_installation, component_type): missing_but_in_mod_json = [ diff --git a/nf_core/modules/modules_test.py b/nf_core/modules/modules_test.py deleted file mode 100644 index d1f47dcff9..0000000000 --- a/nf_core/modules/modules_test.py +++ /dev/null @@ -1,30 +0,0 @@ -""" -The ModulesTest class runs the tests locally -""" - -from nf_core.components.components_test import ComponentsTest - - -class ModulesTest(ComponentsTest): - """ - Class to run module pytests. - """ - - def __init__( - self, - module_name=None, - no_prompts=False, - pytest_args="", - remote_url=None, - branch=None, - no_pull=False, - ): - super().__init__( - component_type="modules", - component_name=module_name, - no_prompts=no_prompts, - pytest_args=pytest_args, - remote_url=remote_url, - branch=branch, - no_pull=no_pull, - ) diff --git a/nf_core/modules/modules_utils.py b/nf_core/modules/modules_utils.py index a5af06b996..3ae01e9eef 100644 --- a/nf_core/modules/modules_utils.py +++ b/nf_core/modules/modules_utils.py @@ -1,7 +1,8 @@ import logging import os -import urllib from pathlib import Path +from typing import List, Optional, Tuple +from urllib.parse import urlparse from ..components.nfcore_component import NFCoreComponent @@ -14,31 +15,29 @@ class ModuleException(Exception): pass -def repo_full_name_from_remote(remote_url): +def repo_full_name_from_remote(remote_url: str) -> str: """ Extracts the path from the remote URL See https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-clone.html#URLS for the possible URL patterns """ # Check whether we have a https or ssh url if remote_url.startswith("https"): - path = urllib.parse.urlparse(remote_url) - path = path.path + path = urlparse(remote_url).path # Remove the intial '/' path = path[1:] # Remove extension path = os.path.splitext(path)[0] else: # Remove the initial `git@`` - path = remote_url.split("@") - path = path[-1] if len(path) > 1 else path[0] - path = urllib.parse.urlparse(path) - path = path.path + split_path: list = remote_url.split("@") + path = split_path[-1] if len(split_path) > 1 else split_path[0] + path = urlparse(path).path # Remove extension path = os.path.splitext(path)[0] return path -def get_installed_modules(dir, repo_type="modules"): +def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], List[NFCoreComponent]]: """ Make a list of all modules installed in this repository @@ -52,9 +51,9 @@ def get_installed_modules(dir, repo_type="modules"): returns (local_modules, nfcore_modules) """ # initialize lists - local_modules = [] - nfcore_modules = [] - local_modules_dir = None + local_modules: List[str] = [] + nfcore_modules_names: List[str] = [] + local_modules_dir: Optional[str] = None nfcore_modules_dir = os.path.join(dir, "modules", "nf-core") # Get local modules @@ -77,12 +76,14 @@ def get_installed_modules(dir, repo_type="modules"): # Not a module, but contains sub-modules if not "main.nf" in m_content: for tool in m_content: - nfcore_modules.append(os.path.join(m, tool)) + nfcore_modules_names.append(os.path.join(m, tool)) else: - nfcore_modules.append(m) + nfcore_modules_names.append(m) # Make full (relative) file paths and create NFCoreComponent objects - local_modules = [os.path.join(local_modules_dir, m) for m in local_modules] + if local_modules_dir: + local_modules = [os.path.join(local_modules_dir, m) for m in local_modules] + nfcore_modules = [ NFCoreComponent( m, @@ -92,7 +93,7 @@ def get_installed_modules(dir, repo_type="modules"): base_dir=Path(dir), component_type="modules", ) - for m in nfcore_modules + for m in nfcore_modules_names ] return local_modules, nfcore_modules diff --git a/nf_core/modules/patch.py b/nf_core/modules/patch.py index 198bb70de5..b4e86f2d19 100644 --- a/nf_core/modules/patch.py +++ b/nf_core/modules/patch.py @@ -1,211 +1,10 @@ import logging -import os -import shutil -import tempfile -from pathlib import Path -import questionary - -import nf_core.utils -from nf_core.components.components_command import ComponentCommand - -from .modules_differ import ModulesDiffer -from .modules_json import ModulesJson +from nf_core.components.patch import ComponentPatch log = logging.getLogger(__name__) -class ModulePatch(ComponentCommand): - def __init__(self, dir, remote_url=None, branch=None, no_pull=False): - super().__init__("modules", dir, remote_url, branch, no_pull) - - self.modules_json = ModulesJson(dir) - - def param_check(self, module): - if not self.has_valid_directory(): - raise UserWarning() - - modules = self.modules_json.get_all_components(self.component_type)[self.modules_repo.remote_url] - module_names = [module for _, module in modules] - - if module is not None and module not in module_names: - module_dir = [dir for dir, m in modules if m == module][0] - raise UserWarning(f"Module '{Path('modules', module_dir, module)}' does not exist in the pipeline") - - def patch(self, module=None): - # Check modules directory structure - self.check_modules_structure() - - self.modules_json.check_up_to_date() - self.param_check(module) - modules = self.modules_json.get_all_components(self.component_type)[self.modules_repo.remote_url] - - if module is None: - choices = [ - module if directory == self.modules_repo.repo_path else f"{directory}/{module}" - for directory, module in modules - ] - module = questionary.autocomplete( - "Tool:", - choices, - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - module_dir = [dir for dir, m in modules if m == module][0] - module_fullname = str(Path("modules", module_dir, module)) - - # Verify that the module has an entry in the modules.json file - if not self.modules_json.module_present(module, self.modules_repo.remote_url, module_dir): - raise UserWarning( - f"The '{module_fullname}' module does not have an entry in the 'modules.json' file. Cannot compute patch" - ) - - module_version = self.modules_json.get_module_version(module, self.modules_repo.remote_url, module_dir) - if module_version is None: - raise UserWarning( - f"The '{module_fullname}' module does not have a valid version in the 'modules.json' file. Cannot compute patch" - ) - # Get the module branch and reset it in the ModulesRepo object - module_branch = self.modules_json.get_component_branch( - self.component_type, module, self.modules_repo.remote_url, module_dir - ) - if module_branch != self.modules_repo.branch: - self.modules_repo.setup_branch(module_branch) - - # Set the diff filename based on the module name - patch_filename = f"{module.replace('/', '-')}.diff" - module_relpath = Path("modules", module_dir, module) - patch_relpath = Path(module_relpath, patch_filename) - module_current_dir = Path(self.dir, module_relpath) - patch_path = Path(self.dir, patch_relpath) - - if patch_path.exists(): - remove = questionary.confirm( - f"Patch exists for module '{module_fullname}'. Do you want to regenerate it?", - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - if remove: - os.remove(patch_path) - else: - return - - # Create a temporary directory for storing the unchanged version of the module - install_dir = tempfile.mkdtemp() - module_install_dir = Path(install_dir, module) - if not self.install_component_files(module, module_version, self.modules_repo, install_dir): - raise UserWarning( - f"Failed to install files of module '{module}' from remote ({self.modules_repo.remote_url})." - ) - - # Write the patch to a temporary location (otherwise it is printed to the screen later) - patch_temp_path = tempfile.mktemp() - try: - ModulesDiffer.write_diff_file( - patch_temp_path, - module, - self.modules_repo.repo_path, - module_install_dir, - module_current_dir, - for_git=False, - dsp_from_dir=module_relpath, - dsp_to_dir=module_relpath, - ) - log.debug(f"Patch file wrote to a temporary directory {patch_temp_path}") - except UserWarning: - raise UserWarning(f"Module '{module_fullname}' is unchanged. No patch to compute") - - # Write changes to modules.json - self.modules_json.add_patch_entry(module, self.modules_repo.remote_url, module_dir, patch_relpath) - log.debug(f"Wrote patch path for module {module} to modules.json") - - # Show the changes made to the module - ModulesDiffer.print_diff( - module, - self.modules_repo.repo_path, - module_install_dir, - module_current_dir, - dsp_from_dir=module_current_dir, - dsp_to_dir=module_current_dir, - ) - - # Finally move the created patch file to its final location - shutil.move(patch_temp_path, patch_path) - log.info(f"Patch file of '{module_fullname}' written to '{patch_path}'") - - def remove(self, module): - # Check modules directory structure - self.check_modules_structure() - - self.modules_json.check_up_to_date() - self.param_check(module) - modules = self.modules_json.get_all_components(self.component_type)[self.modules_repo.remote_url] - - if module is None: - choices = [ - module if directory == self.modules_repo.repo_path else f"{directory}/{module}" - for directory, module in modules - ] - module = questionary.autocomplete( - "Tool:", - choices, - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - module_dir = [dir for dir, m in modules if m == module][0] - module_fullname = str(Path("modules", module_dir, module)) - - # Verify that the module has an entry in the modules.json file - if not self.modules_json.module_present(module, self.modules_repo.remote_url, module_dir): - raise UserWarning( - f"The '{module_fullname}' module does not have an entry in the 'modules.json' file. Cannot compute patch" - ) - - module_version = self.modules_json.get_module_version(module, self.modules_repo.remote_url, module_dir) - if module_version is None: - raise UserWarning( - f"The '{module_fullname}' module does not have a valid version in the 'modules.json' file. Cannot compute patch" - ) - # Get the module branch and reset it in the ModulesRepo object - module_branch = self.modules_json.get_component_branch( - self.component_type, module, self.modules_repo.remote_url, module_dir - ) - if module_branch != self.modules_repo.branch: - self.modules_repo.setup_branch(module_branch) - - # Set the diff filename based on the module name - patch_filename = f"{module.replace('/', '-')}.diff" - module_relpath = Path("modules", module_dir, module) - patch_relpath = Path(module_relpath, patch_filename) - patch_path = Path(self.dir, patch_relpath) - module_path = Path(self.dir, module_relpath) - - if patch_path.exists(): - remove = questionary.confirm( - f"Patch exists for module '{module_fullname}'. Are you sure you want to remove?", - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - if not remove: - return - - # Try to apply the patch in reverse and move resulting files to module dir - temp_module_dir = self.modules_json.try_apply_patch_reverse( - module, self.modules_repo.repo_path, patch_relpath, module_path - ) - try: - for file in Path(temp_module_dir).glob("*"): - file.rename(module_path.joinpath(file.name)) - os.rmdir(temp_module_dir) - except Exception as err: - raise UserWarning(f"There was a problem reverting the patched file: {err}") - - log.info(f"Patch for {module} reverted!") - # Remove patch file if we could revert the patch - patch_path.unlink() - # Write changes to module.json - self.modules_json.remove_patch_entry(module, self.modules_repo.remote_url, module_dir) - - if not all( - self.modules_repo.component_files_identical(module, module_path, module_version, "modules").values() - ): - log.error( - f"Module files do not appear to match the remote for the commit sha in the 'module.json': {module_version}\n" - f"Recommend reinstalling with 'nf-core modules install --force --sha {module_version} {module}' " - ) +class ModulePatch(ComponentPatch): + def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False, installed_by=False): + super().__init__(pipeline_dir, "modules", remote_url, branch, no_pull, installed_by) diff --git a/nf_core/modules/test_yml_builder.py b/nf_core/modules/test_yml_builder.py deleted file mode 100644 index 3c23416fd4..0000000000 --- a/nf_core/modules/test_yml_builder.py +++ /dev/null @@ -1,373 +0,0 @@ -""" -The ModulesTestYmlBuilder class handles automatic generation of the modules test.yml file -along with running the tests and creating md5 sums -""" - -from __future__ import print_function - -import errno -import gzip -import hashlib -import io -import logging -import operator -import os -import re -import shlex -import subprocess -import tempfile - -import questionary -import rich -import yaml -from rich.syntax import Syntax - -import nf_core.utils -from nf_core.components.components_command import ComponentCommand - -from ..lint_utils import run_prettier_on_file -from .modules_repo import ModulesRepo - -log = logging.getLogger(__name__) - - -class ModulesTestYmlBuilder(ComponentCommand): - def __init__( - self, - module_name=None, - directory=".", - run_tests=False, - test_yml_output_path=None, - force_overwrite=False, - no_prompts=False, - remote_url=None, - branch=None, - ): - super().__init__("modules", directory, remote_url, branch) - self.module_name = module_name - self.remote_url = remote_url - self.branch = branch - self.run_tests = run_tests - self.test_yml_output_path = test_yml_output_path - self.force_overwrite = force_overwrite - self.no_prompts = no_prompts - self.module_dir = None - self.module_test_main = None - self.entry_points = [] - self.tests = [] - self.errors = [] - - def run(self): - """Run build steps""" - if not self.no_prompts: - log.info( - "[yellow]Press enter to use default values " - "[cyan bold](shown in brackets) [yellow]or type your own responses" - ) - self.check_inputs() - self.scrape_workflow_entry_points() - self.build_all_tests() - self.print_test_yml() - if len(self.errors) > 0: - errors = "\n - ".join(self.errors) - raise UserWarning(f"Ran, but found errors:\n - {errors}") - - def check_inputs(self): - """Do more complex checks about supplied flags.""" - # Check modules directory structure - self.check_modules_structure() - - # Get the tool name if not specified - if self.module_name is None: - self.module_name = questionary.autocomplete( - "Tool name:", - choices=self.components_from_repo(self.org), - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - self.module_dir = os.path.join(self.default_modules_path, *self.module_name.split("/")) - self.module_test_main = os.path.join(self.default_tests_path, *self.module_name.split("/"), "main.nf") - - # First, sanity check that the module directory exists - if not os.path.isdir(self.module_dir): - raise UserWarning(f"Cannot find directory '{self.module_dir}'. Should be TOOL/SUBTOOL or TOOL") - if not os.path.exists(self.module_test_main): - raise UserWarning(f"Cannot find module test workflow '{self.module_test_main}'") - - # Check that we're running tests if no prompts - if not self.run_tests and self.no_prompts: - log.debug("Setting run_tests to True as running without prompts") - self.run_tests = True - - # Get the output YAML file / check it does not already exist - while self.test_yml_output_path is None: - default_val = f"tests/modules/{self.org}/{self.module_name}/test.yml" - if self.no_prompts: - self.test_yml_output_path = default_val - else: - self.test_yml_output_path = rich.prompt.Prompt.ask( - "[violet]Test YAML output path[/] (- for stdout)", default=default_val - ).strip() - if self.test_yml_output_path == "": - self.test_yml_output_path = None - # Check that the output YAML file does not already exist - if ( - self.test_yml_output_path is not None - and self.test_yml_output_path != "-" - and os.path.exists(self.test_yml_output_path) - and not self.force_overwrite - ): - if rich.prompt.Confirm.ask( - f"[red]File exists! [green]'{self.test_yml_output_path}' [violet]Overwrite?" - ): - self.force_overwrite = True - else: - self.test_yml_output_path = None - if os.path.exists(self.test_yml_output_path) and not self.force_overwrite: - raise UserWarning( - f"Test YAML file already exists! '{self.test_yml_output_path}'. Use '--force' to overwrite." - ) - - def scrape_workflow_entry_points(self): - """Find the test workflow entry points from main.nf""" - log.info(f"Looking for test workflow entry points: '{self.module_test_main}'") - with open(self.module_test_main, "r") as fh: - for line in fh: - match = re.match(r"workflow\s+(\S+)\s+{", line) - if match: - self.entry_points.append(match.group(1)) - if len(self.entry_points) == 0: - raise UserWarning("No workflow entry points found in 'self.module_test_main'") - - def build_all_tests(self): - """ - Go over each entry point and build structure - """ - for entry_point in self.entry_points: - ep_test = self.build_single_test(entry_point) - if ep_test: - self.tests.append(ep_test) - - def build_single_test(self, entry_point): - """Given the supplied cli flags, prompt for any that are missing. - - Returns: Test command - """ - ep_test = { - "name": "", - "command": "", - "tags": [], - "files": [], - } - - # Print nice divider line - console = rich.console.Console() - console.print("[black]" + "─" * console.width) - - log.info(f"Building test meta for entry point '{entry_point}'") - - while ep_test["name"] == "": - default_val = f"{self.module_name.replace('/', ' ')} {entry_point}" - if self.no_prompts: - ep_test["name"] = default_val - else: - ep_test["name"] = rich.prompt.Prompt.ask("[violet]Test name", default=default_val).strip() - - while ep_test["command"] == "": - # Don't think we need the last `-c` flag, but keeping to avoid having to update 100s modules. - # See https://github.com/nf-core/tools/issues/1562 - default_val = ( - f"nextflow run ./tests/modules/{self.org}/{self.module_name} -entry {entry_point} " - f"-c ./tests/config/nextflow.config" - ) - if self.no_prompts: - ep_test["command"] = default_val - else: - ep_test["command"] = rich.prompt.Prompt.ask("[violet]Test command", default=default_val).strip() - - while len(ep_test["tags"]) == 0: - mod_name_parts = self.module_name.split("/") - tag_defaults = [] - for idx in range(0, len(mod_name_parts)): - tag_defaults.append("/".join(mod_name_parts[: idx + 1])) - # Remove duplicates - tag_defaults = list(set(tag_defaults)) - if self.no_prompts: - ep_test["tags"] = tag_defaults - else: - while len(ep_test["tags"]) == 0: - prompt_tags = rich.prompt.Prompt.ask( - "[violet]Test tags[/] (comma separated)", default=",".join(tag_defaults) - ).strip() - ep_test["tags"] = [t.strip() for t in prompt_tags.split(",")] - - ep_test["files"] = self.get_md5_sums(ep_test["command"]) - - return ep_test - - def check_if_empty_file(self, fname): - """Check if the file is empty, or compressed empty""" - if os.path.getsize(fname) == 0: - return True - try: - with open(fname, "rb") as fh: - g_f = gzip.GzipFile(fileobj=fh, mode="rb") - if g_f.read() == b"": - return True - except gzip.BadGzipFile: - pass - - return False - - def _md5(self, fname): - """Generate md5 sum for file""" - hash_md5 = hashlib.md5() - with open(fname, "rb") as f: - for chunk in iter(lambda: f.read(io.DEFAULT_BUFFER_SIZE), b""): - hash_md5.update(chunk) - md5sum = hash_md5.hexdigest() - return md5sum - - def create_test_file_dict(self, results_dir, is_repeat=False): - """Walk through directory and collect md5 sums""" - test_files = [] - for root, _, files in os.walk(results_dir, followlinks=True): - for filename in files: - file_path = os.path.join(root, filename) - # add the key here so that it comes first in the dict - test_file = {"path": file_path} - # Check that this isn't an empty file - if self.check_if_empty_file(file_path): - if not is_repeat: - self.errors.append(f"Empty file found! '{os.path.basename(file_path)}'") - # Add the md5 anyway, linting should fail later and can be manually removed if needed. - # Originally we skipped this if empty, but then it's too easy to miss the warning. - # Equally, if a file is legitimately empty we don't want to prevent this from working. - if filename != "versions.yml": - # Only add md5sum if the file is not versions.yml - file_md5 = self._md5(file_path) - test_file["md5sum"] = file_md5 - # Switch out the results directory path with the expected 'output' directory - test_file["path"] = file_path.replace(results_dir, "output") - test_files.append(test_file) - - test_files = sorted(test_files, key=operator.itemgetter("path")) - - return test_files - - def get_md5_sums(self, command, results_dir=None, results_dir_repeat=None): - """ - Recursively go through directories and subdirectories - and generate tuples of (, ) - returns: list of tuples - """ - - run_this_test = False - while results_dir is None: - if self.run_tests or run_this_test: - results_dir, results_dir_repeat = self.run_tests_workflow(command) - else: - results_dir = rich.prompt.Prompt.ask( - "[violet]Test output folder with results[/] (leave blank to run test)" - ) - if results_dir == "": - results_dir = None - run_this_test = True - elif not os.path.isdir(results_dir): - log.error(f"Directory '{results_dir}' does not exist") - results_dir = None - - test_files = self.create_test_file_dict(results_dir=results_dir) - - # If test was repeated, compare the md5 sums - if results_dir_repeat: - test_files_repeat = self.create_test_file_dict(results_dir=results_dir_repeat, is_repeat=True) - - # Compare both test.yml files - for i in range(len(test_files)): - if test_files[i].get("md5sum") and not test_files[i].get("md5sum") == test_files_repeat[i]["md5sum"]: - test_files[i].pop("md5sum") - test_files[i]["contains"] = [ - "# TODO nf-core: file md5sum was variable, please replace this text with a string found in the file instead " - ] - - if len(test_files) == 0: - raise UserWarning(f"Could not find any test result files in '{results_dir}'") - - return test_files - - def run_tests_workflow(self, command): - """Given a test workflow and an entry point, run the test workflow""" - - # The config expects $PROFILE and Nextflow fails if it's not set - if os.environ.get("PROFILE") is None: - os.environ["PROFILE"] = "" - if self.no_prompts: - log.info( - "Setting env var '$PROFILE' to an empty string as not set.\n" - "Tests will run with Docker by default. " - "To use Singularity set 'export PROFILE=singularity' in your shell before running this command." - ) - else: - question = { - "type": "list", - "name": "profile", - "message": "Choose software profile", - "choices": ["Docker", "Singularity", "Conda"], - } - answer = questionary.unsafe_prompt([question], style=nf_core.utils.nfcore_question_style) - profile = answer["profile"].lower() - if profile in ["singularity", "conda"]: - os.environ["PROFILE"] = profile - log.info(f"Setting env var '$PROFILE' to '{profile}'") - - tmp_dir = tempfile.mkdtemp() - tmp_dir_repeat = tempfile.mkdtemp() - work_dir = tempfile.mkdtemp() - command_repeat = command + f" --outdir {tmp_dir_repeat} -work-dir {work_dir}" - command += f" --outdir {tmp_dir} -work-dir {work_dir}" - - log.info(f"Running '{self.module_name}' test with command:\n[violet]{command}") - try: - nfconfig_raw = subprocess.check_output(shlex.split(command)) - log.info("Repeating test ...") - nfconfig_raw = subprocess.check_output(shlex.split(command_repeat)) - - except OSError as e: - if e.errno == errno.ENOENT and command.strip().startswith("nextflow "): - raise AssertionError( - "It looks like Nextflow is not installed. It is required for most nf-core functions." - ) - except subprocess.CalledProcessError as e: - output = rich.markup.escape(e.output.decode()) - raise UserWarning(f"Error running test workflow (exit code {e.returncode})\n[red]{output}") - except Exception as e: - raise UserWarning(f"Error running test workflow: {e}") - else: - log.info("Test workflow finished!") - try: - log.debug(rich.markup.escape(nfconfig_raw)) - except TypeError: - log.debug(rich.markup.escape(nfconfig_raw.decode("utf-8"))) - - return tmp_dir, tmp_dir_repeat - - def print_test_yml(self): - """ - Generate the test yml file. - """ - with tempfile.NamedTemporaryFile(mode="w+") as fh: - yaml.dump(self.tests, fh, Dumper=nf_core.utils.custom_yaml_dumper(), width=10000000) - run_prettier_on_file(fh.name) - fh.seek(0) - prettified_yml = fh.read() - - if self.test_yml_output_path == "-": - console = rich.console.Console() - console.print("\n", Syntax(prettified_yml, "yaml"), "\n") - else: - try: - log.info(f"Writing to '{self.test_yml_output_path}'") - with open(self.test_yml_output_path, "w") as fh: - fh.write(prettified_yml) - except FileNotFoundError as e: - raise UserWarning(f"Could not create test.yml file: '{e}'") diff --git a/nf_core/params_file.py b/nf_core/params_file.py index 39986b95c2..5c50c53fb9 100644 --- a/nf_core/params_file.py +++ b/nf_core/params_file.py @@ -89,7 +89,7 @@ def __init__( self, pipeline=None, revision=None, - ): + ) -> None: """Initialise the ParamFileBuilder class Args: diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index 6a1b9a9176..881ecbb022 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -31,6 +31,9 @@ If you're not used to this workflow with git, you can start with some [docs from ## Tests +You can optionally test your changes by running the pipeline locally. Then it is recommended to use the `debug` profile to +receive warnings about process selectors and other debug info. Example: `nextflow run . -profile debug,test,docker --outdir `. + When you create a pull request with changes, [GitHub Actions](https://github.com/features/actions) will run automatic tests. Typically, pull-requests are only fully reviewed when these tests are passing, though of course we can help out before then. diff --git a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md index 0f81ebaa4c..4f01a97993 100644 --- a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md +++ b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md @@ -21,6 +21,7 @@ Learn more about contributing: [CONTRIBUTING.md](https://github.com/{{ name }}/t {%- endif %} - [ ] Make sure your code lints (`nf-core lint`). - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker --outdir `). +- [ ] Check for unexpected warnings in debug mode (`nextflow run . -profile debug,test,docker --outdir `). - [ ] Usage Documentation in `docs/usage.md` is updated. - [ ] Output Documentation in `docs/output.md` is updated. - [ ] `CHANGELOG.md` is updated. diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 521f3e664a..3edd49f09d 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -28,7 +28,7 @@ jobs: - "latest-everything" steps: - name: Check out pipeline code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Nextflow uses: nf-core/setup-nextflow@v1 diff --git a/nf_core/pipeline-template/.github/workflows/fix-linting.yml b/nf_core/pipeline-template/.github/workflows/fix-linting.yml index f3dc3e50fe..31e8cd2b36 100644 --- a/nf_core/pipeline-template/.github/workflows/fix-linting.yml +++ b/nf_core/pipeline-template/.github/workflows/fix-linting.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: # Use the @nf-core-bot token to check out so we can push later - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: token: ${{ secrets.nf_core_bot_auth_token }} @@ -24,7 +24,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 - name: Install Prettier run: npm install -g prettier @prettier/plugin-php diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index 29fc466ed6..94aa5278be 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -14,9 +14,9 @@ jobs: EditorConfig: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 - name: Install editorconfig-checker run: npm install -g editorconfig-checker @@ -27,9 +27,9 @@ jobs: Prettier: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 - name: Install Prettier run: npm install -g prettier @@ -40,7 +40,7 @@ jobs: PythonBlack: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Check code lints with Black uses: psf/black@stable @@ -71,7 +71,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out pipeline code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Nextflow uses: nf-core/setup-nextflow@v1 diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index 25488dcc08..acf7269536 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -4,7 +4,9 @@ tasks: command: | pre-commit install --install-hooks nextflow self-update - + - name: unset JAVA_TOOL_OPTIONS + command: | + unset JAVA_TOOL_OPTIONS vscode: extensions: # based on nf-core.nf-core-extensionpack - codezombiech.gitignore # Language support for .gitignore files diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index cddf8d13c6..c874090b51 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -43,11 +43,8 @@ ## Usage -:::note -If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how -to set-up Nextflow. Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) -with `-profile test` before running the workflow on actual data. -::: +> [!NOTE] +> If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how to set-up Nextflow. Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) with `-profile test` before running the workflow on actual data. - + diff --git a/nf_core/pipeline-template/assets/multiqc_config.yml b/nf_core/pipeline-template/assets/multiqc_config.yml index 9ab59067a1..39943ffe49 100644 --- a/nf_core/pipeline-template/assets/multiqc_config.yml +++ b/nf_core/pipeline-template/assets/multiqc_config.yml @@ -1,6 +1,6 @@ report_comment: > {% if 'dev' in version -%} - This report has been generated by the {{ name }} + This report has been generated by the {{ name }} analysis pipeline.{% if branded %} For information about how to interpret these results, please see the documentation.{% endif %} {%- else %} diff --git a/nf_core/pipeline-template/assets/slackreport.json b/nf_core/pipeline-template/assets/slackreport.json index ec03b3968a..96d2cb8afc 100644 --- a/nf_core/pipeline-template/assets/slackreport.json +++ b/nf_core/pipeline-template/assets/slackreport.json @@ -3,7 +3,7 @@ { "fallback": "Plain-text summary of the attachment.", "color": "<% if (success) { %>good<% } else { %>danger<%} %>", - "author_name": "{{ name }} v${version} - ${runName}", + "author_name": "{{ name }} ${version} - ${runName}", "author_icon": "https://www.nextflow.io/docs/latest/_static/favicon.ico", "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors<% } %>", "fields": [ diff --git a/nf_core/pipeline-template/conf/modules.config b/nf_core/pipeline-template/conf/modules.config index 39e8138653..d91c6aba0b 100644 --- a/nf_core/pipeline-template/conf/modules.config +++ b/nf_core/pipeline-template/conf/modules.config @@ -39,7 +39,7 @@ process { } withName: 'MULTIQC' { - ext.args = params.multiqc_title ? "--title \"$params.multiqc_title\"" : '' + ext.args = { params.multiqc_title ? "--title \"$params.multiqc_title\"" : '' } publishDir = [ path: { "${params.outdir}/multiqc" }, mode: params.publish_dir_mode, diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index 6dba3032a4..c908d3d38c 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -24,7 +24,7 @@ You will need to create a samplesheet with information about the samples you wou The `sample` identifiers have to be the same when you have re-sequenced the same sample more than once e.g. to increase sequencing depth. The pipeline will concatenate the raw reads before performing any downstream analysis. Below is an example for the same sample sequenced across 3 lanes: -```console +```csv title="samplesheet.csv" sample,fastq_1,fastq_2 CONTROL_REP1,AEG588A1_S1_L002_R1_001.fastq.gz,AEG588A1_S1_L002_R2_001.fastq.gz CONTROL_REP1,AEG588A1_S1_L003_R1_001.fastq.gz,AEG588A1_S1_L003_R2_001.fastq.gz @@ -37,7 +37,7 @@ The pipeline will auto-detect whether a sample is single- or paired-end using th A final samplesheet file consisting of both single- and paired-end data may look something like the one below. This is for 6 samples, where `TREATMENT_REP3` has been sequenced twice. -```console +```csv title="samplesheet.csv" sample,fastq_1,fastq_2 CONTROL_REP1,AEG588A1_S1_L002_R1_001.fastq.gz,AEG588A1_S1_L002_R2_001.fastq.gz CONTROL_REP2,AEG588A2_S2_L002_R1_001.fastq.gz,AEG588A2_S2_L002_R2_001.fastq.gz diff --git a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy index ff05f37a5d..0f8d021a03 100755 --- a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy +++ b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy @@ -4,6 +4,7 @@ import org.yaml.snakeyaml.Yaml import groovy.json.JsonOutput +import nextflow.extension.FilesEx class NfcoreTemplate { @@ -141,12 +142,14 @@ class NfcoreTemplate { try { if (params.plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') } // Try to send HTML e-mail using sendmail + def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html") + sendmail_tf.withWriter { w -> w << sendmail_html } [ 'sendmail', '-t' ].execute() << sendmail_html log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-" } catch (all) { // Catch failures and try with plaintext def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ] - if ( mqc_report.size() <= max_multiqc_email_size.toBytes() ) { + if ( mqc_report != null && mqc_report.size() <= max_multiqc_email_size.toBytes() ) { mail_cmd += [ '-A', mqc_report ] } mail_cmd.execute() << email_html @@ -155,14 +158,16 @@ class NfcoreTemplate { } // Write summary e-mail HTML to a file - def output_d = new File("${params.outdir}/pipeline_info/") - if (!output_d.exists()) { - output_d.mkdirs() - } - def output_hf = new File(output_d, "pipeline_report.html") + def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html") output_hf.withWriter { w -> w << email_html } - def output_tf = new File(output_d, "pipeline_report.txt") + FilesEx.copyTo(output_hf.toPath(), "${params.outdir}/pipeline_info/pipeline_report.html"); + output_hf.delete() + + // Write summary e-mail TXT to a file + def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt") output_tf.withWriter { w -> w << email_txt } + FilesEx.copyTo(output_tf.toPath(), "${params.outdir}/pipeline_info/pipeline_report.txt"); + output_tf.delete() } // @@ -227,15 +232,14 @@ class NfcoreTemplate { // Dump pipeline parameters in a json file // public static void dump_parameters(workflow, params) { - def output_d = new File("${params.outdir}/pipeline_info/") - if (!output_d.exists()) { - output_d.mkdirs() - } - def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') - def output_pf = new File(output_d, "params_${timestamp}.json") + def filename = "params_${timestamp}.json" + def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") def jsonStr = JsonOutput.toJson(params) - output_pf.text = JsonOutput.prettyPrint(jsonStr) + temp_pf.text = JsonOutput.prettyPrint(jsonStr) + + FilesEx.copyTo(temp_pf.toPath(), "${params.outdir}/pipeline_info/params_${timestamp}.json") + temp_pf.delete() } // diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 2154873150..223bcacc9c 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -7,17 +7,17 @@ "nf-core": { "custom/dumpsoftwareversions": { "branch": "master", - "git_sha": "911696ea0b62df80e900ef244d7867d177971f73", + "git_sha": "bba7e362e4afead70653f84d8700588ea28d0f9e", "installed_by": ["modules"] }, "fastqc": { "branch": "master", - "git_sha": "bd8092b67b5103bdd52e300f75889442275c3117", + "git_sha": "65ad3e0b9a4099592e1102e92e10455dc661cf53", "installed_by": ["modules"] }, "multiqc": { "branch": "master", - "git_sha": "911696ea0b62df80e900ef244d7867d177971f73", + "git_sha": "4ab13872435962dadc239979554d13709e20bf29", "installed_by": ["modules"] } } diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/environment.yml b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/environment.yml new file mode 100644 index 0000000000..f0c63f6984 --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/environment.yml @@ -0,0 +1,7 @@ +name: custom_dumpsoftwareversions +channels: + - conda-forge + - bioconda + - defaults +dependencies: + - bioconda::multiqc=1.17 diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf index ebc8727339..7685b33cde 100644 --- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf @@ -2,10 +2,10 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { label 'process_single' // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container - conda "bioconda::multiqc=1.14" + conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.14--pyhdfd78af_0' : - 'biocontainers/multiqc:1.14--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.17--pyhdfd78af_0' : + 'biocontainers/multiqc:1.17--pyhdfd78af_0' }" input: path versions diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml index c32657de7a..5f15a5fde0 100644 --- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml @@ -1,4 +1,4 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/yaml-schema.json +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/meta-schema.json name: custom_dumpsoftwareversions description: Custom module used to dump software versions within the nf-core pipeline template keywords: @@ -16,7 +16,6 @@ input: type: file description: YML file containing software versions pattern: "*.yml" - output: - yml: type: file @@ -30,7 +29,9 @@ output: type: file description: File containing software versions pattern: "versions.yml" - authors: - "@drpatelh" - "@grst" +maintainers: + - "@drpatelh" + - "@grst" diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test new file mode 100644 index 0000000000..eec1db10a2 --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test @@ -0,0 +1,38 @@ +nextflow_process { + + name "Test Process CUSTOM_DUMPSOFTWAREVERSIONS" + script "../main.nf" + process "CUSTOM_DUMPSOFTWAREVERSIONS" + tag "modules" + tag "modules_nfcore" + tag "custom" + tag "dumpsoftwareversions" + tag "custom/dumpsoftwareversions" + + test("Should run without failures") { + when { + process { + """ + def tool1_version = ''' + TOOL1: + tool1: 0.11.9 + '''.stripIndent() + + def tool2_version = ''' + TOOL2: + tool2: 1.9 + '''.stripIndent() + + input[0] = Channel.of(tool1_version, tool2_version).collectFile() + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } +} diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap new file mode 100644 index 0000000000..4274ed57aa --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap @@ -0,0 +1,27 @@ +{ + "Should run without failures": { + "content": [ + { + "0": [ + "software_versions.yml:md5,1c851188476409cda5752ce971b20b58" + ], + "1": [ + "software_versions_mqc.yml:md5,2570f4ba271ad08357b0d3d32a9cf84d" + ], + "2": [ + "versions.yml:md5,3843ac526e762117eedf8825b40683df" + ], + "mqc_yml": [ + "software_versions_mqc.yml:md5,2570f4ba271ad08357b0d3d32a9cf84d" + ], + "versions": [ + "versions.yml:md5,3843ac526e762117eedf8825b40683df" + ], + "yml": [ + "software_versions.yml:md5,1c851188476409cda5752ce971b20b58" + ] + } + ], + "timestamp": "2023-11-03T14:43:22.157011" + } +} diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/tags.yml b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/tags.yml new file mode 100644 index 0000000000..405aa24ae3 --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/tags.yml @@ -0,0 +1,2 @@ +custom/dumpsoftwareversions: + - modules/nf-core/custom/dumpsoftwareversions/** diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml new file mode 100644 index 0000000000..1787b38a9a --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml @@ -0,0 +1,7 @@ +name: fastqc +channels: + - conda-forge + - bioconda + - defaults +dependencies: + - bioconda::fastqc=0.12.1 diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf index 249f90644d..9e19a74c56 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf @@ -2,10 +2,10 @@ process FASTQC { tag "$meta.id" label 'process_medium' - conda "bioconda::fastqc=0.11.9" + conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' : - 'biocontainers/fastqc:0.11.9--0' }" + 'https://depot.galaxyproject.org/singularity/fastqc:0.12.1--hdfd78af_0' : + 'biocontainers/fastqc:0.12.1--hdfd78af_0' }" input: tuple val(meta), path(reads) @@ -37,7 +37,7 @@ process FASTQC { cat <<-END_VERSIONS > versions.yml "${task.process}": - fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + fastqc: \$( fastqc --version | sed '/FastQC v/!d; s/.*v//' ) END_VERSIONS """ @@ -49,7 +49,7 @@ process FASTQC { cat <<-END_VERSIONS > versions.yml "${task.process}": - fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + fastqc: \$( fastqc --version | sed '/FastQC v/!d; s/.*v//' ) END_VERSIONS """ } diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml index 4da5bb5a06..ee5507e06b 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml @@ -50,3 +50,8 @@ authors: - "@grst" - "@ewels" - "@FelixKrueger" +maintainers: + - "@drpatelh" + - "@grst" + - "@ewels" + - "@FelixKrueger" diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test new file mode 100644 index 0000000000..b9e8f926eb --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test @@ -0,0 +1,109 @@ +nextflow_process { + + name "Test Process FASTQC" + script "../main.nf" + process "FASTQC" + tag "modules" + tag "modules_nfcore" + tag "fastqc" + + test("Single-Read") { + + when { + params { + outdir = "$outputDir" + } + process { + """ + input[0] = [ + [ id: 'test', single_end:true ], + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] + """ + } + } + + then { + assertAll ( + { assert process.success }, + // NOTE The report contains the date inside it, which means that the md5sum is stable per day, but not longer than that. So you can't md5sum it. + // looks like this:
Mon 2 Oct 2023
test.gz
+ // https://github.com/nf-core/modules/pull/3903#issuecomment-1743620039 + { assert process.out.html.get(0).get(1) ==~ ".*/test_fastqc.html" }, + { assert path(process.out.html.get(0).get(1)).getText().contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match("versions") }, + { assert process.out.zip.get(0).get(1) ==~ ".*/test_fastqc.zip" } + ) + } + } +// TODO +// // +// // Test with paired-end data +// // +// workflow test_fastqc_paired_end { +// input = [ +// [id: 'test', single_end: false], // meta map +// [ +// file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), +// file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) +// ] +// ] + +// FASTQC ( input ) +// } + +// // +// // Test with interleaved data +// // +// workflow test_fastqc_interleaved { +// input = [ +// [id: 'test', single_end: false], // meta map +// file(params.test_data['sarscov2']['illumina']['test_interleaved_fastq_gz'], checkIfExists: true) +// ] + +// FASTQC ( input ) +// } + +// // +// // Test with bam data +// // +// workflow test_fastqc_bam { +// input = [ +// [id: 'test', single_end: false], // meta map +// file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) +// ] + +// FASTQC ( input ) +// } + +// // +// // Test with multiple samples +// // +// workflow test_fastqc_multiple { +// input = [ +// [id: 'test', single_end: false], // meta map +// [ +// file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), +// file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true), +// file(params.test_data['sarscov2']['illumina']['test2_1_fastq_gz'], checkIfExists: true), +// file(params.test_data['sarscov2']['illumina']['test2_2_fastq_gz'], checkIfExists: true) +// ] +// ] + +// FASTQC ( input ) +// } + +// // +// // Test with custom prefix +// // +// workflow test_fastqc_custom_prefix { +// input = [ +// [ id:'mysample', single_end:true ], // meta map +// file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) +// ] + +// FASTQC ( input ) +// } +} diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap new file mode 100644 index 0000000000..636a32cead --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap @@ -0,0 +1,10 @@ +{ + "versions": { + "content": [ + [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ] + ], + "timestamp": "2023-10-09T23:40:54+0000" + } +} \ No newline at end of file diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/tags.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/tags.yml new file mode 100644 index 0000000000..7834294ba0 --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/tags.yml @@ -0,0 +1,2 @@ +fastqc: + - modules/nf-core/fastqc/** diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml new file mode 100644 index 0000000000..bc0bdb5b68 --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml @@ -0,0 +1,7 @@ +name: multiqc +channels: + - conda-forge + - bioconda + - defaults +dependencies: + - bioconda::multiqc=1.18 diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf index 1fc387beed..00cc48d275 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf @@ -1,10 +1,10 @@ process MULTIQC { label 'process_single' - conda "bioconda::multiqc=1.14" + conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.14--pyhdfd78af_0' : - 'biocontainers/multiqc:1.14--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.18--pyhdfd78af_0' : + 'biocontainers/multiqc:1.18--pyhdfd78af_0' }" input: path multiqc_files, stageAs: "?/*" @@ -25,12 +25,14 @@ process MULTIQC { def args = task.ext.args ?: '' def config = multiqc_config ? "--config $multiqc_config" : '' def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : '' + def logo = multiqc_logo ? /--cl-config 'custom_logo: "${multiqc_logo}"'/ : '' """ multiqc \\ --force \\ $args \\ $config \\ $extra_config \\ + $logo \\ . cat <<-END_VERSIONS > versions.yml diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml index f93b5ee519..f1aa660eb7 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml @@ -1,5 +1,5 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/yaml-schema.json -name: MultiQC +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/meta-schema.json +name: multiqc description: Aggregate results from bioinformatics analyses across many samples into a single report keywords: - QC @@ -13,7 +13,6 @@ tools: homepage: https://multiqc.info/ documentation: https://multiqc.info/docs/ licence: ["GPL-3.0-or-later"] - input: - multiqc_files: type: file @@ -31,7 +30,6 @@ input: type: file description: Optional logo file for MultiQC pattern: "*.{png}" - output: - report: type: file @@ -54,3 +52,8 @@ authors: - "@bunop" - "@drpatelh" - "@jfy133" +maintainers: + - "@abhi18av" + - "@bunop" + - "@drpatelh" + - "@jfy133" diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test new file mode 100644 index 0000000000..c2dad217c4 --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test @@ -0,0 +1,63 @@ +nextflow_process { + + name "Test Process MULTIQC" + script "../main.nf" + process "MULTIQC" + tag "modules" + tag "modules_nfcore" + tag "multiqc" + + test("MULTIQC: FASTQC") { + + when { + params { + outdir = "$outputDir" + } + process { + """ + input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)]) + input[1] = [] + input[2] = [] + input[3] = [] + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert path(process.out.report.get(0)).exists() }, + { assert path(process.out.data.get(0)).exists() }, + { assert path(process.out.versions.get(0)).getText().contains("multiqc") } + ) + } + + } + + test("MULTIQC: FASTQC and a config file") { + + when { + params { + outdir = "$outputDir" + } + process { + """ + input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)]) + input[1] = Channel.of(file("https://github.com/nf-core/tools/raw/dev/nf_core/pipeline-template/assets/multiqc_config.yml", checkIfExists: true)) + input[2] = [] + input[3] = [] + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert path(process.out.report.get(0)).exists() }, + { assert path(process.out.data.get(0)).exists() }, + { assert path(process.out.versions.get(0)).getText().contains("multiqc") } + ) + } + + } +} diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/tags.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/tags.yml new file mode 100644 index 0000000000..bea6c0d37f --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/tags.yml @@ -0,0 +1,2 @@ +multiqc: + - modules/nf-core/multiqc/** diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 31d6d90ccb..b4402c46a6 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -16,7 +16,7 @@ params { {%- if igenomes %} // References genome = null - igenomes_base = 's3://ngi-igenomes/igenomes' + igenomes_base = 's3://ngi-igenomes/igenomes/' igenomes_ignore = false {% else %} fasta = null @@ -91,6 +91,7 @@ profiles { dumpHashes = true process.beforeScript = 'echo $HOSTNAME' cleanup = false + nextflow.enable.configProcessNamesValidation = true } conda { conda.enabled = true @@ -113,13 +114,13 @@ profiles { } docker { docker.enabled = true - docker.userEmulation = true conda.enabled = false singularity.enabled = false podman.enabled = false shifter.enabled = false charliecloud.enabled = false apptainer.enabled = false + runOptions = '-u $(id -u):$(id -g)' } arm { docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64' @@ -190,7 +191,7 @@ singularity.registry = 'quay.io' // Nextflow plugins plugins { - id 'nf-validation' // Validation of pipeline parameters and creation of an input channel from a sample sheet + id 'nf-validation@1.1.3' // Validation of pipeline parameters and creation of an input channel from a sample sheet } {% if igenomes -%} @@ -216,6 +217,9 @@ env { // Capture exit codes from upstream processes when piping process.shell = ['/bin/bash', '-euo', 'pipefail'] +// Disable process selector warnings by default. Use debug profile to enable warnings. +nextflow.enable.configProcessNamesValidation = false + def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') timeline { enabled = true diff --git a/nf_core/schema.py b/nf_core/schema.py index b00697334b..7e4726f189 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -35,7 +35,7 @@ def __init__(self): self.pipeline_dir = None self.schema_filename = None self.schema_defaults = {} - self.schema_params = [] + self.schema_params = {} self.input_params = {} self.pipeline_params = {} self.invalid_nextflow_config_default_parameters = {} @@ -110,7 +110,7 @@ def load_schema(self): with open(self.schema_filename, "r") as fh: self.schema = json.load(fh) self.schema_defaults = {} - self.schema_params = [] + self.schema_params = {} log.debug(f"JSON file loaded: {self.schema_filename}") def sanitise_param_default(self, param): @@ -141,6 +141,9 @@ def sanitise_param_default(self, param): param["default"] = float(param["default"]) return param + if param["default"] is None: + return param + # Strings param["default"] = str(param["default"]) return param @@ -154,18 +157,20 @@ def get_schema_defaults(self): """ # Top level schema-properties (ungrouped) for p_key, param in self.schema.get("properties", {}).items(): - self.schema_params.append(p_key) + self.schema_params[p_key] = ("properties", p_key) if "default" in param: param = self.sanitise_param_default(param) - self.schema_defaults[p_key] = param["default"] + if param["default"] is not None: + self.schema_defaults[p_key] = param["default"] # Grouped schema properties in subschema definitions - for _, definition in self.schema.get("definitions", {}).items(): + for defn_name, definition in self.schema.get("definitions", {}).items(): for p_key, param in definition.get("properties", {}).items(): - self.schema_params.append(p_key) + self.schema_params[p_key] = ("definitions", defn_name, "properties", p_key) if "default" in param: param = self.sanitise_param_default(param) - self.schema_defaults[p_key] = param["default"] + if param["default"] is not None: + self.schema_defaults[p_key] = param["default"] def save_schema(self, suppress_logging=False): """Save a pipeline schema to a file""" @@ -239,9 +244,9 @@ def validate_default_params(self): except jsonschema.exceptions.ValidationError as e: raise AssertionError(f"Default parameters are invalid: {e.message}") for param, default in self.schema_defaults.items(): - if default in ("null", "", None, "None"): + if default in ("null", "", None, "None") or default is False: log.warning( - f"[yellow][!] Default parameter '{param}' is empty or null. It is advisable to remove the default from the schema" + f"[yellow][!] Default parameter '{param}' is empty, null, or False. It is advisable to remove the default from the schema" ) log.info("[green][✓] Default parameters match schema validation") @@ -762,12 +767,15 @@ def prompt_remove_schema_notfound_config(self, p_key): def add_schema_found_configs(self): """ Add anything that's found in the Nextflow params that's missing in the pipeline schema + Update defaults if they have changed """ params_added = [] params_ignore = self.pipeline_params.get("validationSchemaIgnoreParams", "").strip("\"'").split(",") params_ignore.append("validationSchemaIgnoreParams") for p_key, p_val in self.pipeline_params.items(): + s_key = self.schema_params.get(p_key) # Check if key is in schema parameters + # Key is in pipeline but not in schema or ignored from schema if p_key not in self.schema_params and p_key not in params_ignore: if ( self.no_prompts @@ -782,7 +790,35 @@ def add_schema_found_configs(self): self.schema["properties"][p_key] = self.build_schema_param(p_val) log.debug(f"Adding '{p_key}' to pipeline schema") params_added.append(p_key) - + # Param has a default that does not match the schema + elif p_key in self.schema_defaults and (s_def := self.schema_defaults[p_key]) != ( + p_def := self.build_schema_param(p_val).get("default") + ): + if self.no_prompts or Confirm.ask( + f":sparkles: Default for [bold]'params.{p_key}'[/] in the pipeline config does not match schema. (schema: '{s_def}' | config: '{p_def}'). " + "[blue]Update pipeline schema?" + ): + s_key_def = s_key + ("default",) + if p_def is None: + nf_core.utils.nested_delitem(self.schema, s_key_def) + log.debug(f"Removed '{p_key}' default from pipeline schema") + else: + nf_core.utils.nested_setitem(self.schema, s_key_def, p_def) + log.debug(f"Updating '{p_key}' default to '{p_def}' in pipeline schema") + # There is no default in schema but now there is a default to write + elif ( + s_key + and (p_key not in self.schema_defaults) + and (p_key not in params_ignore) + and (p_def := self.build_schema_param(p_val).get("default")) + ): + if self.no_prompts or Confirm.ask( + f":sparkles: Default for [bold]'params.{p_key}'[/] is not in schema (def='{p_def}'). " + "[blue]Update pipeline schema?" + ): + s_key_def = s_key + ("default",) + nf_core.utils.nested_setitem(self.schema, s_key_def, p_def) + log.debug(f"Updating '{p_key}' default to '{p_def}' in pipeline schema") return params_added def build_schema_param(self, p_val): @@ -806,13 +842,15 @@ def build_schema_param(self, p_val): p_val = None # Booleans - if p_val in ["True", "False"]: - p_val = p_val == "True" # Convert to bool + if p_val in ["true", "false", "True", "False"]: + p_val = p_val in ["true", "True"] # Convert to bool p_type = "boolean" - p_schema = {"type": p_type, "default": p_val} + # Don't return a default for anything false-y except 0 + if not p_val and not (p_val == 0 and p_val is not False): + return {"type": p_type} - return p_schema + return {"type": p_type, "default": p_val} def launch_web_builder(self): """ diff --git a/nf_core/subworkflow-template/subworkflows/main.nf b/nf_core/subworkflow-template/main.nf similarity index 100% rename from nf_core/subworkflow-template/subworkflows/main.nf rename to nf_core/subworkflow-template/main.nf diff --git a/nf_core/subworkflow-template/subworkflows/meta.yml b/nf_core/subworkflow-template/meta.yml similarity index 97% rename from nf_core/subworkflow-template/subworkflows/meta.yml rename to nf_core/subworkflow-template/meta.yml index cb77dc23a3..7c83b3c490 100644 --- a/nf_core/subworkflow-template/subworkflows/meta.yml +++ b/nf_core/subworkflow-template/meta.yml @@ -47,3 +47,5 @@ output: pattern: "versions.yml" authors: - "{{ author }}" +maintainers: + - "{{ author }}" diff --git a/nf_core/subworkflow-template/tests/main.nf b/nf_core/subworkflow-template/tests/main.nf deleted file mode 100644 index e09cc50af8..0000000000 --- a/nf_core/subworkflow-template/tests/main.nf +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env nextflow - -nextflow.enable.dsl = 2 - -include { {{ component_name_underscore|upper }} } from '../../../../subworkflows/{{ org }}/{{ subworkflow_dir }}/main.nf' - -workflow test_{{ component_name_underscore }} { - {% if has_meta %} - input = [ - [ id:'test' ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) - ] - {%- else %} - input = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) - {%- endif %} - - {{ component_name_underscore|upper }} ( input ) -} diff --git a/nf_core/subworkflow-template/tests/main.nf.test b/nf_core/subworkflow-template/tests/main.nf.test new file mode 100644 index 0000000000..c44e19a4e4 --- /dev/null +++ b/nf_core/subworkflow-template/tests/main.nf.test @@ -0,0 +1,43 @@ +// TODO nf-core: Once you have added the required tests, please run the following command to build this file: +// nf-core subworkflows test {{ component_name }} +nextflow_workflow { + + name "Test Subworkflow {{ component_name_underscore|upper }}" + script "../main.nf" + workflow "{{ component_name_underscore|upper }}" + + tag "subworkflows" + tag "subworkflows_nfcore" + tag "subworkflows/{{ component_name }}" + // TODO nf-core: Add tags for all modules used within this subworkflow. Example: + tag "samtools" + tag "samtools/sort" + tag "samtools/index" + + + // TODO nf-core: Change the test name preferably indicating the test-data and file-format used + test("sarscov2 - bam - single_end") { + + when { + workflow { + """ + // TODO nf-core: define inputs of the workflow here. Example: + input[0] = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + ] + input[1] = [ [ id:'genome' ], + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + ] + """ + } + } + + then { + assertAll( + { assert workflow.success}, + { assert snapshot(workflow.out).match()} + //TODO nf-core: Add all required assertions to verify the test output. + ) + } + } +} diff --git a/nf_core/subworkflow-template/tests/nextflow.config b/nf_core/subworkflow-template/tests/nextflow.config deleted file mode 100644 index 8730f1c4b9..0000000000 --- a/nf_core/subworkflow-template/tests/nextflow.config +++ /dev/null @@ -1,5 +0,0 @@ -process { - - publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } - -} diff --git a/nf_core/subworkflow-template/tests/tags.yml b/nf_core/subworkflow-template/tests/tags.yml new file mode 100644 index 0000000000..35cad36785 --- /dev/null +++ b/nf_core/subworkflow-template/tests/tags.yml @@ -0,0 +1,2 @@ +subworkflows/{{ component_name_underscore }}: + - subworkflows/{{ org }}/{{ component_dir }}/** diff --git a/nf_core/subworkflow-template/tests/test.yml b/nf_core/subworkflow-template/tests/test.yml deleted file mode 100644 index 7bca1616f3..0000000000 --- a/nf_core/subworkflow-template/tests/test.yml +++ /dev/null @@ -1,12 +0,0 @@ -## TODO nf-core: Please run the following command to build this file: -# nf-core subworkflows create-test-yml {{ component_name_underscore }} -- name: "{{ component_name_underscore }}" - command: nextflow run ./tests/subworkflows/{{ org }}/{{ subworkflow_dir }} -entry test_{{ component_name_underscore }} -c ./tests/config/nextflow.config - tags: - - "subworkflows" - - "subworkflows/{{ component_name_underscore }}" - files: - - path: "output/{{ component_name_underscore }}/test.bam" - md5sum: e667c7caad0bc4b7ac383fd023c654fc - - path: output/{{ component_name_underscore }}/versions.yml - md5sum: a01fe51bc4c6a3a6226fbf77b2c7cf3b diff --git a/nf_core/subworkflows/__init__.py b/nf_core/subworkflows/__init__.py index 1ceccd021f..88e8a09388 100644 --- a/nf_core/subworkflows/__init__.py +++ b/nf_core/subworkflows/__init__.py @@ -4,6 +4,4 @@ from .lint import SubworkflowLint from .list import SubworkflowList from .remove import SubworkflowRemove -from .subworkflows_test import SubworkflowsTest -from .test_yml_builder import SubworkflowTestYmlBuilder from .update import SubworkflowUpdate diff --git a/nf_core/subworkflows/create.py b/nf_core/subworkflows/create.py index 963076455e..93e9f271be 100644 --- a/nf_core/subworkflows/create.py +++ b/nf_core/subworkflows/create.py @@ -12,6 +12,7 @@ def __init__( component="", author=None, force=False, + migrate_pytest=False, ): super().__init__( "subworkflows", @@ -19,4 +20,5 @@ def __init__( component, author, force=force, + migrate_pytest=migrate_pytest, ) diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index 44c7c21a37..ffba41f9da 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -29,12 +29,12 @@ class SubworkflowLint(ComponentLint): """ # Import lint functions - from .main_nf import main_nf - from .meta_yml import meta_yml - from .subworkflow_changes import subworkflow_changes - from .subworkflow_tests import subworkflow_tests - from .subworkflow_todos import subworkflow_todos - from .subworkflow_version import subworkflow_version + from .main_nf import main_nf # type: ignore[misc] + from .meta_yml import meta_yml # type: ignore[misc] + from .subworkflow_changes import subworkflow_changes # type: ignore[misc] + from .subworkflow_tests import subworkflow_tests # type: ignore[misc] + from .subworkflow_todos import subworkflow_todos # type: ignore[misc] + from .subworkflow_version import subworkflow_version # type: ignore[misc] def __init__( self, diff --git a/nf_core/subworkflows/lint/main_nf.py b/nf_core/subworkflows/lint/main_nf.py index fbdef593cb..c7ce77490d 100644 --- a/nf_core/subworkflows/lint/main_nf.py +++ b/nf_core/subworkflows/lint/main_nf.py @@ -134,7 +134,7 @@ def check_main_section(self, lines, included_components): self.main_nf, ) ) - if component + ".out.versions" not in script: + if component + ".out.versions" in script: self.passed.append( ( "main_nf_include_versions", diff --git a/nf_core/subworkflows/lint/meta_yml.py b/nf_core/subworkflows/lint/meta_yml.py index 2710c632e2..4944b26188 100644 --- a/nf_core/subworkflows/lint/meta_yml.py +++ b/nf_core/subworkflows/lint/meta_yml.py @@ -123,7 +123,7 @@ def meta_yml(subworkflow_lint_object, subworkflow): ) ) else: - subworkflow.failed.append( + subworkflow.passed.append( ( "meta_modules_deprecated", f"Deprecated section 'modules' not found in `meta.yml`", diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index 834876f44e..1ebced6d42 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -1,126 +1,212 @@ """ Lint the tests of a subworkflow in nf-core/modules """ +import json import logging -import os from pathlib import Path import yaml -import nf_core.subworkflows +from nf_core.components.nfcore_component import NFCoreComponent log = logging.getLogger(__name__) -def subworkflow_tests(_, subworkflow): +def subworkflow_tests(_, subworkflow: NFCoreComponent): """ Lint the tests of a subworkflow in ``nf-core/modules`` It verifies that the test directory exists - and contains a ``main.nf`` and a ``test.yml``, - and that the subworkflow is present in the ``pytest_modules.yml`` - file. + and contains a ``main.nf.test`` a ``main.nf.test.snap`` and ``tags.yml``. Additionally, hecks that all included components in test ``main.nf`` are specified in ``test.yml`` """ - if os.path.exists(subworkflow.test_dir): - subworkflow.passed.append(("test_dir_exists", "Test directory exists", subworkflow.test_dir)) + repo_dir = subworkflow.component_dir.parts[ + : subworkflow.component_dir.parts.index(subworkflow.component_name.split("/")[0]) + ][-1] + test_dir = Path(subworkflow.base_dir, "tests", "subworkflows", repo_dir, subworkflow.component_name) + pytest_main_nf = Path(test_dir, "main.nf") + is_pytest = pytest_main_nf.is_file() + log.debug(f"{pytest_main_nf} is pytest: {is_pytest}") + if subworkflow.nftest_testdir.is_dir(): + subworkflow.passed.append(("test_dir_exists", "nf-test test directory exists", subworkflow.nftest_testdir)) else: - subworkflow.failed.append(("test_dir_exists", "Test directory is missing", subworkflow.test_dir)) + if is_pytest: + subworkflow.warned.append(("test_dir_exists", "nf-test directory is missing", subworkflow.nftest_testdir)) + else: + subworkflow.failed.append(("test_dir_exists", "nf-test directory is missing", subworkflow.nftest_testdir)) return # Lint the test main.nf file - test_main_nf = os.path.join(subworkflow.test_dir, "main.nf") - if os.path.exists(test_main_nf): - subworkflow.passed.append(("test_main_exists", "test `main.nf` exists", subworkflow.test_main_nf)) + if subworkflow.nftest_main_nf.is_file(): + subworkflow.passed.append(("test_main_nf_exists", "test `main.nf.test` exists", subworkflow.nftest_main_nf)) else: - subworkflow.failed.append(("test_main_exists", "test `main.nf` does not exist", subworkflow.test_main_nf)) + if is_pytest: + subworkflow.warned.append( + ("test_main_nf_exists", "test `main.nf.test` does not exist", subworkflow.nftest_main_nf) + ) + else: + subworkflow.failed.append( + ("test_main_nf_exists", "test `main.nf.test` does not exist", subworkflow.nftest_main_nf) + ) - # Check that entry in pytest_modules.yml exists - try: - pytest_yml_path = os.path.join(subworkflow.base_dir, "tests", "config", "pytest_modules.yml") - with open(pytest_yml_path, "r") as fh: - pytest_yml = yaml.safe_load(fh) - if "subworkflows/" + subworkflow.component_name in pytest_yml.keys(): - subworkflow.passed.append(("test_pytest_yml", "correct entry in pytest_modules.yml", pytest_yml_path)) + if subworkflow.nftest_main_nf.is_file(): + with open(subworkflow.nftest_main_nf, "r") as fh: + # Check if main.nf.test.snap file exists, if 'snap(' is inside main.nf.test + if "snapshot(" in fh.read(): + snap_file = subworkflow.nftest_testdir / "main.nf.test.snap" + if snap_file.is_file(): + subworkflow.passed.append(("test_snapshot_exists", "test `main.nf.test.snap` exists", snap_file)) + # Validate no empty files + with open(snap_file, "r") as snap_fh: + try: + snap_content = json.load(snap_fh) + for test_name in snap_content.keys(): + if "d41d8cd98f00b204e9800998ecf8427e" in str(snap_content[test_name]): + if "stub" not in test_name: + subworkflow.failed.append( + ( + "test_snap_md5sum", + "md5sum for empty file found: d41d8cd98f00b204e9800998ecf8427e", + snap_file, + ) + ) + else: + subworkflow.passed.append( + ( + "test_snap_md5sum", + "md5sum for empty file found, but it is a stub test", + snap_file, + ) + ) + else: + subworkflow.passed.append( + ( + "test_snap_md5sum", + "no md5sum for empty file found", + snap_file, + ) + ) + if "7029066c27ac6f5ef18d660d5741979a" in str(snap_content[test_name]): + if "stub" not in test_name: + subworkflow.failed.append( + ( + "test_snap_md5sum", + "md5sum for compressed empty file found: 7029066c27ac6f5ef18d660d5741979a", + snap_file, + ) + ) + else: + subworkflow.failed.append( + ( + "test_snap_md5sum", + "md5sum for compressed empty file found, but it is a stub test", + snap_file, + ) + ) + else: + subworkflow.passed.append( + ( + "test_snap_md5sum", + "no md5sum for compressed empty file found", + snap_file, + ) + ) + except json.decoder.JSONDecodeError as e: + subworkflow.failed.append( + ( + "test_snapshot_exists", + f"snapshot file `main.nf.test.snap` can't be read: {e}", + snap_file, + ) + ) + else: + subworkflow.failed.append( + ("test_snapshot_exists", "test `main.nf.test.snap` does not exist", snap_file) + ) + # Verify that tags are correct. + main_nf_tags = subworkflow._get_main_nf_tags(subworkflow.nftest_main_nf) + required_tags = [ + "subworkflows", + f"subworkflows/{subworkflow.component_name}", + "subworkflows_nfcore", + ] + included_components = [] + if subworkflow.main_nf.is_file(): + included_components = subworkflow._get_included_components(subworkflow.main_nf) + chained_components_tags = subworkflow._get_included_components_in_chained_tests(subworkflow.nftest_main_nf) + log.debug(f"Included components: {included_components}") + log.debug(f"Required tags: {required_tags}") + log.debug(f"Included components for chained nf-tests: {chained_components_tags}") + missing_tags = [] + for tag in set(required_tags + included_components + chained_components_tags): + if tag not in main_nf_tags: + missing_tags.append(tag) + if len(missing_tags) == 0: + subworkflow.passed.append(("test_main_tags", "Tags adhere to guidelines", subworkflow.nftest_main_nf)) else: - subworkflow.failed.append(("test_pytest_yml", "missing entry in pytest_modules.yml", pytest_yml_path)) - except FileNotFoundError: - subworkflow.failed.append(("test_pytest_yml", "Could not open pytest_modules.yml file", pytest_yml_path)) + subworkflow.failed.append( + ( + "test_main_tags", + f"Tags do not adhere to guidelines. Tags missing in `main.nf.test`: {missing_tags}", + subworkflow.nftest_main_nf, + ) + ) - # Lint the test.yml file - try: - with open(subworkflow.test_yml, "r") as fh: - test_yml = yaml.safe_load(fh) - - # Verify that tags are correct. All included components in test main.nf should be specified in test.yml so pytests run for all of them - included_components = nf_core.subworkflows.SubworkflowTestYmlBuilder.parse_module_tags( - subworkflow, subworkflow.component_dir - ) - for test in test_yml: - for component in set(included_components): - if component in test["tags"]: - subworkflow.passed.append( - ( - "test_yml_tags", - f"Included module/subworkflow `{component}` specified in `test.yml`", - subworkflow.test_yml, - ) - ) - else: - subworkflow.failed.append( - ( - "test_yml_tags", - f"Included module/subworkflow `{component}` missing in `test.yml`", - subworkflow.test_yml, - ) + # Check pytest_modules.yml does not contain entries for subworkflows with nf-test + pytest_yml_path = subworkflow.base_dir / "tests" / "config" / "pytest_modules.yml" + if pytest_yml_path.is_file() and not is_pytest: + try: + with open(pytest_yml_path, "r") as fh: + pytest_yml = yaml.safe_load(fh) + if "subworkflows/" + subworkflow.component_name in pytest_yml.keys(): + subworkflow.failed.append( + ( + "test_pytest_yml", + "subworkflow with nf-test should not be listed in pytest_modules.yml", + pytest_yml_path, ) - if component.startswith("subworkflows/"): - included_components += nf_core.subworkflows.SubworkflowTestYmlBuilder.parse_module_tags( - _, - Path(subworkflow.component_dir).parent.joinpath(component.replace("subworkflows/", "")), - ) - included_components = list(set(included_components)) + ) + else: + subworkflow.passed.append( + ("test_pytest_yml", "subworkflow with nf-test not in pytest_modules.yml", pytest_yml_path) + ) + except FileNotFoundError: + subworkflow.warned.append(("test_pytest_yml", "Could not open pytest_modules.yml file", pytest_yml_path)) - # Look for md5sums of empty files - for tfile in test.get("files", []): - if tfile.get("md5sum") == "d41d8cd98f00b204e9800998ecf8427e": - subworkflow.failed.append( - ( - "test_yml_md5sum", - "md5sum for empty file found: d41d8cd98f00b204e9800998ecf8427e", - subworkflow.test_yml, - ) - ) - else: - subworkflow.passed.append( - ( - "test_yml_md5sum", - "no md5sum for empty file found", - subworkflow.test_yml, - ) - ) - if tfile.get("md5sum") == "7029066c27ac6f5ef18d660d5741979a": - subworkflow.failed.append( - ( - "test_yml_md5sum", - "md5sum for compressed empty file found: 7029066c27ac6f5ef18d660d5741979a", - subworkflow.test_yml, - ) - ) - else: - subworkflow.passed.append( - ( - "test_yml_md5sum", - "no md5sum for compressed empty file found", - subworkflow.test_yml, - ) - ) + if subworkflow.tags_yml.is_file(): + # Check tags.yml exists and it has the correct entry + subworkflow.passed.append(("test_tags_yml_exists", "file `tags.yml` exists", subworkflow.tags_yml)) + with open(subworkflow.tags_yml, "r") as fh: + tags_yml = yaml.safe_load(fh) + if "subworkflows/" + subworkflow.component_name in tags_yml.keys(): + subworkflow.passed.append(("test_tags_yml", "correct entry in tags.yml", subworkflow.tags_yml)) + if ( + f"subworkflows/{subworkflow.org}/{subworkflow.component_name}/**" + in tags_yml["subworkflows/" + subworkflow.component_name] + ): + subworkflow.passed.append(("test_tags_yml", "correct path in tags.yml", subworkflow.tags_yml)) + else: + subworkflow.failed.append(("test_tags_yml", "incorrect path in tags.yml", subworkflow.tags_yml)) + else: + subworkflow.failed.append( + ( + "test_tags_yml", + "incorrect entry in tags.yml, should be 'subworkflows/'", + subworkflow.tags_yml, + ) + ) + else: + if is_pytest: + subworkflow.warned.append(("test_tags_yml_exists", "file `tags.yml` does not exist", subworkflow.tags_yml)) + else: + subworkflow.failed.append(("test_tags_yml_exists", "file `tags.yml` does not exist", subworkflow.tags_yml)) - # Test that the file exists - subworkflow.passed.append(("test_yml_exists", "Test `test.yml` exists", subworkflow.test_yml)) - except FileNotFoundError: - subworkflow.failed.append(("test_yml_exists", "Test `test.yml` does not exist", subworkflow.test_yml)) - subworkflow.failed.append(("test_yml_exists", "Test `test.yml` does not exist", subworkflow.test_yml)) - subworkflow.failed.append(("test_yml_exists", "Test `test.yml` does not exist", subworkflow.test_yml)) + # Check that the old test directory does not exist + if not is_pytest: + old_test_dir = Path(subworkflow.base_dir, "tests", "subworkflows", subworkflow.component_name) + if old_test_dir.is_dir(): + subworkflow.failed.append(("test_old_test_dir", "old test directory exists", old_test_dir)) + else: + subworkflow.passed.append(("test_old_test_dir", "old test directory does not exist", old_test_dir)) diff --git a/nf_core/subworkflows/lint/subworkflow_todos.py b/nf_core/subworkflows/lint/subworkflow_todos.py index 1de02b18e2..91f9f55b0b 100644 --- a/nf_core/subworkflows/lint/subworkflow_todos.py +++ b/nf_core/subworkflows/lint/subworkflow_todos.py @@ -38,10 +38,3 @@ def subworkflow_todos(_, subworkflow): subworkflow.warned.append(("subworkflow_todo", warning, swf_results["file_paths"][i])) for i, passed in enumerate(swf_results["passed"]): subworkflow.passed.append(("subworkflow_todo", passed, subworkflow.component_dir)) - - # Module tests directory - test_results = pipeline_todos(None, root_dir=subworkflow.test_dir) - for i, warning in enumerate(test_results["warned"]): - subworkflow.warned.append(("subworkflow_todo", warning, test_results["file_paths"][i])) - for i, passed in enumerate(test_results["passed"]): - subworkflow.passed.append(("subworkflow_todo", passed, subworkflow.test_dir)) diff --git a/nf_core/subworkflows/subworkflows_test.py b/nf_core/subworkflows/subworkflows_test.py deleted file mode 100644 index d072ff678a..0000000000 --- a/nf_core/subworkflows/subworkflows_test.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python -""" -The SubworkflowsTest class runs the tests locally -""" - -from nf_core.components.components_test import ComponentsTest - - -class SubworkflowsTest(ComponentsTest): - """ - Class to run module pytests. - """ - - def __init__( - self, - subworkflow_name=None, - no_prompts=False, - pytest_args="", - remote_url=None, - branch=None, - no_pull=False, - ): - super().__init__( - component_type="subworkflows", - component_name=subworkflow_name, - no_prompts=no_prompts, - pytest_args=pytest_args, - remote_url=remote_url, - branch=branch, - no_pull=no_pull, - ) diff --git a/nf_core/subworkflows/test_yml_builder.py b/nf_core/subworkflows/test_yml_builder.py deleted file mode 100644 index 468465c9df..0000000000 --- a/nf_core/subworkflows/test_yml_builder.py +++ /dev/null @@ -1,391 +0,0 @@ -""" -The ModulesTestYmlBuilder class handles automatic generation of the modules test.yml file -along with running the tests and creating md5 sums -""" - -from __future__ import print_function - -import errno -import gzip -import hashlib -import io -import logging -import operator -import os -import re -import shlex -import subprocess -import tempfile -from pathlib import Path - -import questionary -import rich -import yaml -from rich.syntax import Syntax - -import nf_core.utils -from nf_core.components.components_command import ComponentCommand -from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import ModulesRepo - -from ..lint_utils import run_prettier_on_file - -log = logging.getLogger(__name__) - - -class SubworkflowTestYmlBuilder(ComponentCommand): - def __init__( - self, - subworkflow=None, - directory=".", - run_tests=False, - test_yml_output_path=None, - force_overwrite=False, - no_prompts=False, - remote_url=None, - branch=None, - ): - super().__init__("subworkflows", directory) - self.dir = directory - self.subworkflow = subworkflow - self.remote_url = remote_url - self.branch = branch - self.run_tests = run_tests - self.test_yml_output_path = test_yml_output_path - self.force_overwrite = force_overwrite - self.no_prompts = no_prompts - self.subworkflow_dir = None - self.subworkflow_test_main = None - self.entry_points = [] - self.tests = [] - self.errors = [] - self.modules_repo = ModulesRepo(remote_url=self.remote_url, branch=self.branch) - self.modules_json = ModulesJson(self.dir) - - def run(self): - """Run build steps""" - if not self.no_prompts: - log.info( - "[yellow]Press enter to use default values [cyan bold](shown in brackets) [yellow]or type your own responses" - ) - self.check_inputs() - self.scrape_workflow_entry_points() - self.build_all_tests() - self.print_test_yml() - if len(self.errors) > 0: - errors = "\n - ".join(self.errors) - raise UserWarning(f"Ran, but found errors:\n - {errors}") - - def check_inputs(self): - """Do more complex checks about supplied flags.""" - # Get the tool name if not specified - if self.subworkflow is None: - self.subworkflow = questionary.autocomplete( - "Subworkflow name:", - choices=self.components_from_repo(self.org), - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - self.subworkflow_dir = os.path.join("subworkflows", self.modules_repo.repo_path, self.subworkflow) - self.subworkflow_test_main = os.path.join( - "tests", "subworkflows", self.modules_repo.repo_path, self.subworkflow, "main.nf" - ) - - # First, sanity check that the module directory exists - if not os.path.isdir(self.subworkflow_dir): - raise UserWarning(f"Cannot find directory '{self.subworkflow_dir}'.") - if not os.path.exists(self.subworkflow_test_main): - raise UserWarning(f"Cannot find module test workflow '{self.subworkflow_test_main}'") - - # Check that we're running tests if no prompts - if not self.run_tests and self.no_prompts: - log.debug("Setting run_tests to True as running without prompts") - self.run_tests = True - - # Get the output YAML file / check it does not already exist - while self.test_yml_output_path is None: - default_val = f"tests/subworkflows/{self.modules_repo.repo_path}/{self.subworkflow}/test.yml" - if self.no_prompts: - self.test_yml_output_path = default_val - else: - self.test_yml_output_path = rich.prompt.Prompt.ask( - "[violet]Test YAML output path[/] (- for stdout)", default=default_val - ).strip() - if self.test_yml_output_path == "": - self.test_yml_output_path = None - # Check that the output YAML file does not already exist - if ( - self.test_yml_output_path is not None - and self.test_yml_output_path != "-" - and os.path.exists(self.test_yml_output_path) - and not self.force_overwrite - ): - if rich.prompt.Confirm.ask( - f"[red]File exists! [green]'{self.test_yml_output_path}' [violet]Overwrite?" - ): - self.force_overwrite = True - else: - self.test_yml_output_path = None - if os.path.exists(self.test_yml_output_path) and not self.force_overwrite: - raise UserWarning( - f"Test YAML file already exists! '{self.test_yml_output_path}'. Use '--force' to overwrite." - ) - - def scrape_workflow_entry_points(self): - """Find the test workflow entry points from main.nf""" - log.info(f"Looking for test workflow entry points: '{self.subworkflow_test_main}'") - with open(self.subworkflow_test_main, "r") as fh: - for line in fh: - match = re.match(r"workflow\s+(\S+)\s+{", line) - if match: - self.entry_points.append(match.group(1)) - if len(self.entry_points) == 0: - raise UserWarning(f"No workflow entry points found in '{self.subworkflow_test_main}'") - - def build_all_tests(self): - """ - Go over each entry point and build structure - """ - for entry_point in self.entry_points: - ep_test = self.build_single_test(entry_point) - if ep_test: - self.tests.append(ep_test) - - def build_single_test(self, entry_point): - """Given the supplied cli flags, prompt for any that are missing. - - Returns: Test command - """ - ep_test = { - "name": "", - "command": "", - "tags": [], - "files": [], - } - - # Print nice divider line - console = rich.console.Console() - console.print("[black]" + "─" * console.width) - - log.info(f"Building test meta for entry point '{entry_point}'") - - while ep_test["name"] == "": - default_val = f"{self.subworkflow} {entry_point}" - if self.no_prompts: - ep_test["name"] = default_val - else: - ep_test["name"] = rich.prompt.Prompt.ask("[violet]Test name", default=default_val).strip() - - while ep_test["command"] == "": - default_val = f"nextflow run ./tests/subworkflows/{self.modules_repo.repo_path}/{self.subworkflow} -entry {entry_point} -c ./tests/config/nextflow.config" - if self.no_prompts: - ep_test["command"] = default_val - else: - ep_test["command"] = rich.prompt.Prompt.ask("[violet]Test command", default=default_val).strip() - - while len(ep_test["tags"]) == 0: - tag_defaults = ["subworkflows"] - tag_defaults.append("subworkflows/" + self.subworkflow) - tag_defaults += self.parse_module_tags(self.subworkflow_dir) - if self.no_prompts: - ep_test["tags"] = sorted(tag_defaults) - else: - while len(ep_test["tags"]) == 0: - prompt_tags = rich.prompt.Prompt.ask( - "[violet]Test tags[/] (comma separated)", default=",".join(sorted(tag_defaults)) - ).strip() - ep_test["tags"] = [t.strip() for t in prompt_tags.split(",")] - - ep_test["files"] = self.get_md5_sums(ep_test["command"]) - - return ep_test - - def parse_module_tags(self, subworkflow_dir): - """ - Parse the subworkflow main.nf file to retrieve all imported modules for adding tags. - """ - tags = [] - with open(Path(subworkflow_dir, "main.nf"), "r") as fh: - for line in fh: - regex = re.compile( - r"include(?: *{ *)([a-zA-Z\_0-9]*)(?: *as *)?(?:[a-zA-Z\_0-9]*)?(?: *})(?: *from *)(?:'|\")(.*)(?:'|\")" - ) - match = regex.match(line) - if match and len(match.groups()) == 2: - name, link = match.groups() - if link.startswith("../../../"): - name_split = name.lower().split("_") - tags.append("/".join(name_split)) - if len(name_split) > 1: - tags.append(name_split[0]) - elif link.startswith("../"): - tags.append("subworkflows/" + name.lower()) - return list(set(tags)) - - def check_if_empty_file(self, fname): - """Check if the file is empty, or compressed empty""" - if os.path.getsize(fname) == 0: - return True - try: - with open(fname, "rb") as fh: - g_f = gzip.GzipFile(fileobj=fh, mode="rb") - if g_f.read() == b"": - return True - except gzip.BadGzipFile: - pass - - return False - - def _md5(self, fname): - """Generate md5 sum for file""" - hash_md5 = hashlib.md5() - with open(fname, "rb") as f: - for chunk in iter(lambda: f.read(io.DEFAULT_BUFFER_SIZE), b""): - hash_md5.update(chunk) - md5sum = hash_md5.hexdigest() - return md5sum - - def create_test_file_dict(self, results_dir, is_repeat=False): - """Walk through directory and collect md5 sums""" - test_files = [] - for root, _, files in os.walk(results_dir, followlinks=True): - for filename in files: - # Check that the file is not versions.yml - if filename == "versions.yml": - continue - file_path = os.path.join(root, filename) - # add the key here so that it comes first in the dict - test_file = {"path": file_path} - # Check that this isn't an empty file - if self.check_if_empty_file(file_path): - if not is_repeat: - self.errors.append(f"Empty file found! '{os.path.basename(file_path)}'") - # Add the md5 anyway, linting should fail later and can be manually removed if needed. - # Originally we skipped this if empty, but then it's too easy to miss the warning. - # Equally, if a file is legitimately empty we don't want to prevent this from working. - file_md5 = self._md5(file_path) - test_file["md5sum"] = file_md5 - # Switch out the results directory path with the expected 'output' directory - test_file["path"] = file_path.replace(results_dir, "output") - test_files.append(test_file) - - test_files = sorted(test_files, key=operator.itemgetter("path")) - - return test_files - - def get_md5_sums(self, command, results_dir=None, results_dir_repeat=None): - """ - Recursively go through directories and subdirectories - and generate tuples of (, ) - returns: list of tuples - """ - - run_this_test = False - while results_dir is None: - if self.run_tests or run_this_test: - results_dir, results_dir_repeat = self.run_tests_workflow(command) - else: - results_dir = rich.prompt.Prompt.ask( - "[violet]Test output folder with results[/] (leave blank to run test)" - ) - if results_dir == "": - results_dir = None - run_this_test = True - elif not os.path.isdir(results_dir): - log.error(f"Directory '{results_dir}' does not exist") - results_dir = None - - test_files = self.create_test_file_dict(results_dir=results_dir) - - # If test was repeated, compare the md5 sums - if results_dir_repeat: - test_files_repeat = self.create_test_file_dict(results_dir=results_dir_repeat, is_repeat=True) - - # Compare both test.yml files - for i in range(len(test_files)): - if test_files[i].get("md5sum") and not test_files[i].get("md5sum") == test_files_repeat[i]["md5sum"]: - test_files[i].pop("md5sum") - test_files[i]["contains"] = [ - " # TODO nf-core: file md5sum was variable, please replace this text with a string found in the file instead " - ] - - if len(test_files) == 0: - raise UserWarning(f"Could not find any test result files in '{results_dir}'") - - return test_files - - def run_tests_workflow(self, command): - """Given a test workflow and an entry point, run the test workflow""" - - # The config expects $PROFILE and Nextflow fails if it's not set - if os.environ.get("PROFILE") is None: - os.environ["PROFILE"] = "" - if self.no_prompts: - log.info( - "Setting env var '$PROFILE' to an empty string as not set.\n" - "Tests will run with Docker by default. " - "To use Singularity set 'export PROFILE=singularity' in your shell before running this command." - ) - else: - question = { - "type": "list", - "name": "profile", - "message": "Choose software profile", - "choices": ["Docker", "Singularity", "Conda"], - } - answer = questionary.unsafe_prompt([question], style=nf_core.utils.nfcore_question_style) - profile = answer["profile"].lower() - if profile in ["singularity", "conda"]: - os.environ["PROFILE"] = profile - log.info(f"Setting env var '$PROFILE' to '{profile}'") - - tmp_dir = tempfile.mkdtemp() - tmp_dir_repeat = tempfile.mkdtemp() - work_dir = tempfile.mkdtemp() - command_repeat = command + f" --outdir {tmp_dir_repeat} -work-dir {work_dir}" - command += f" --outdir {tmp_dir} -work-dir {work_dir}" - - log.info(f"Running '{self.subworkflow}' test with command:\n[violet]{command}") - try: - nfconfig_raw = subprocess.check_output(shlex.split(command)) - log.info("Repeating test ...") - nfconfig_raw = subprocess.check_output(shlex.split(command_repeat)) - - except OSError as e: - if e.errno == errno.ENOENT and command.strip().startswith("nextflow "): - raise AssertionError( - "It looks like Nextflow is not installed. It is required for most nf-core functions." - ) - except subprocess.CalledProcessError as e: - output = rich.markup.escape(e.output.decode()) - raise UserWarning(f"Error running test workflow (exit code {e.returncode})\n[red]{output}") - except Exception as e: - raise UserWarning(f"Error running test workflow: {e}") - else: - log.info("Test workflow finished!") - try: - log.debug(rich.markup.escape(nfconfig_raw)) - except TypeError: - log.debug(rich.markup.escape(nfconfig_raw.decode("utf-8"))) - - return tmp_dir, tmp_dir_repeat - - def print_test_yml(self): - """ - Generate the test yml file. - """ - with tempfile.NamedTemporaryFile(mode="w+") as fh: - yaml.dump(self.tests, fh, Dumper=nf_core.utils.custom_yaml_dumper(), width=10000000) - run_prettier_on_file(fh.name) - fh.seek(0) - prettified_yml = fh.read() - - if self.test_yml_output_path == "-": - console = rich.console.Console() - console.print("\n", Syntax(prettified_yml, "yaml"), "\n") - else: - try: - log.info(f"Writing to '{self.test_yml_output_path}'") - with open(self.test_yml_output_path, "w") as fh: - fh.write(prettified_yml) - except FileNotFoundError as e: - raise UserWarning(f"Could not create test.yml file: '{e}'") diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 41e0853f2e..a2107f633c 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -3,9 +3,9 @@ import os import shutil from pathlib import Path +from typing import Dict import git -import rich import rich.progress from git.exc import GitCommandError @@ -61,7 +61,7 @@ class SyncedRepo: An object to store details about a locally cached code repository. """ - local_repo_statuses = {} + local_repo_statuses: Dict[str, bool] = {} no_pull_global = False @staticmethod diff --git a/nf_core/utils.py b/nf_core/utils.py index 8b73a10921..bcc8faa3fd 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -16,7 +16,9 @@ import subprocess import sys import time +from contextlib import contextmanager from pathlib import Path +from typing import Generator, Tuple, Union import git import prompt_toolkit @@ -53,10 +55,10 @@ ) NFCORE_CACHE_DIR = os.path.join( - os.environ.get("XDG_CACHE_HOME", os.path.join(os.getenv("HOME"), ".cache")), + os.environ.get("XDG_CACHE_HOME", os.path.join(os.getenv("HOME") or "", ".cache")), "nfcore", ) -NFCORE_DIR = os.path.join(os.environ.get("XDG_CONFIG_HOME", os.path.join(os.getenv("HOME"), ".config")), "nfcore") +NFCORE_DIR = os.path.join(os.environ.get("XDG_CONFIG_HOME", os.path.join(os.getenv("HOME") or "", ".config")), "nfcore") def fetch_remote_version(source_url): @@ -269,14 +271,16 @@ def fetch_wf_config(wf_path, cache_config=True): log.debug("No config cache found") # Call `nextflow config` - nfconfig_raw = nextflow_cmd(f"nextflow config -flat {wf_path}") - for l in nfconfig_raw.splitlines(): - ul = l.decode("utf-8") - try: - k, v = ul.split(" = ", 1) - config[k] = v.strip("'\"") - except ValueError: - log.debug(f"Couldn't find key=value config pair:\n {ul}") + result = run_cmd("nextflow", f"config -flat {wf_path}") + if result is not None: + nfconfig_raw, _ = result + for l in nfconfig_raw.splitlines(): + ul = l.decode("utf-8") + try: + k, v = ul.split(" = ", 1) + config[k] = v.strip("'\"") + except ValueError: + log.debug(f"Couldn't find key=value config pair:\n {ul}") # Scrape main.nf for additional parameter declarations # Values in this file are likely to be complex, so don't both trying to capture them. Just get the param name. @@ -303,18 +307,28 @@ def fetch_wf_config(wf_path, cache_config=True): return config -def nextflow_cmd(cmd): - """Run a Nextflow command and capture the output. Handle errors nicely""" +def run_cmd(executable: str, cmd: str) -> Union[Tuple[bytes, bytes], None]: + """Run a specified command and capture the output. Handle errors nicely.""" + full_cmd = f"{executable} {cmd}" + log.debug(f"Running command: {full_cmd}") try: - nf_proc = subprocess.run(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) - return nf_proc.stdout + proc = subprocess.run(shlex.split(full_cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) + return (proc.stdout, proc.stderr) except OSError as e: if e.errno == errno.ENOENT: - raise AssertionError("It looks like Nextflow is not installed. It is required for most nf-core functions.") + raise RuntimeError( + f"It looks like {executable} is not installed. Please ensure it is available in your PATH." + ) + else: + return None except subprocess.CalledProcessError as e: - raise AssertionError( - f"Command '{cmd}' returned non-zero error code '{e.returncode}':\n[red]> {e.stderr.decode()}{e.stdout.decode()}" - ) + log.debug(f"Command '{full_cmd}' returned non-zero error code '{e.returncode}':\n[red]> {e.stderr.decode()}") + if executable == "nf-test": + return (e.stdout, e.stderr) + else: + raise RuntimeError( + f"Command '{full_cmd}' returned non-zero error code '{e.returncode}':\n[red]> {e.stderr.decode()}{e.stdout.decode()}" + ) def setup_nfcore_dir(): @@ -324,6 +338,7 @@ def setup_nfcore_dir(): """ if not os.path.exists(NFCORE_DIR): os.makedirs(NFCORE_DIR) + return True def setup_requests_cachedir(): @@ -480,6 +495,8 @@ def __call__(self, r): if os.environ.get("GITHUB_TOKEN") is not None and self.auth is None: self.auth_mode = "Bearer token with GITHUB_TOKEN" self.auth = BearerAuth(os.environ["GITHUB_TOKEN"]) + else: + log.warning("Could not find GitHub authentication token. Some API requests may fail.") log.debug(f"Using GitHub auth: {self.auth_mode}") @@ -510,9 +527,16 @@ def safe_get(self, url): if not self.has_init: self.lazy_init() request = self.get(url) - if request.status_code not in self.return_ok: - self.log_content_headers(request) - raise AssertionError(f"GitHub API PR failed - got return code {request.status_code} from {url}") + if request.status_code in self.return_retry: + stderr = rich.console.Console(stderr=True, force_terminal=rich_force_colors()) + try: + r = self.request_retry(url) + except Exception as e: + stderr.print_exception() + raise e + else: + return r + return request def get(self, url, **kwargs): @@ -826,7 +850,7 @@ def prompt_remote_pipeline_name(wfs): # Non nf-core repo on GitHub if pipeline.count("/") == 1: try: - gh_api.get(f"https://api.github.com/repos/{pipeline}") + gh_api.safe_get(f"https://api.github.com/repos/{pipeline}") except Exception: # No repo found - pass and raise error at the end pass @@ -975,7 +999,7 @@ def get_repo_releases_branches(pipeline, wfs): DEPRECATED_CONFIG_PATHS = [".nf-core-lint.yml", ".nf-core-lint.yaml"] -def load_tools_config(directory="."): +def load_tools_config(directory: Union[str, Path] = "."): """ Parse the nf-core.yml configuration file @@ -1013,12 +1037,12 @@ def load_tools_config(directory="."): def determine_base_dir(directory="."): base_dir = start_dir = Path(directory).absolute() - while not get_first_available_path(base_dir, CONFIG_PATHS) and base_dir != base_dir.parent: + while base_dir != base_dir.parent: base_dir = base_dir.parent config_fn = get_first_available_path(base_dir, CONFIG_PATHS) if config_fn: - break - return directory if base_dir == start_dir else base_dir + return directory if base_dir == start_dir else base_dir + return directory def get_first_available_path(directory, paths): @@ -1124,3 +1148,48 @@ def validate_file_md5(file_name, expected_md5hex): raise IOError(f"{file_name} md5 does not match remote: {expected_md5hex} - {file_md5hex}") return True + + +def nested_setitem(d, keys, value): + """Sets the value in a nested dict using a list of keys to traverse + + Args: + d (dict): the nested dictionary to traverse + keys (list[Any]): A list of keys to iteratively traverse + value (Any): The value to be set for the last key in the chain + """ + current = d + for k in keys[:-1]: + current = current[k] + current[keys[-1]] = value + + +def nested_delitem(d, keys): + """Deletes a key from a nested dictionary + + Args: + d (dict): the nested dictionary to traverse + keys (list[Any]): A list of keys to iteratively traverse, deleting the final one + """ + current = d + for k in keys[:-1]: + current = current[k] + del current[keys[-1]] + + +@contextmanager +def set_wd(path: Path) -> Generator[None, None, None]: + """Sets the working directory for this context. + + Arguments + --------- + + path : Path + Path to the working directory to be used inside this context. + """ + start_wd = Path().absolute() + os.chdir(Path(path).resolve()) + try: + yield + finally: + os.chdir(start_wd) diff --git a/requirements-dev.txt b/requirements-dev.txt index 360f6ff87f..13dba6f30d 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,3 +6,10 @@ pytest-datafiles responses Sphinx sphinx-rtd-theme +mypy +types-PyYAML +pyupgrade +types-requests +types-jsonschema +types-Markdown +types-setuptools diff --git a/requirements.txt b/requirements.txt index 9cc7fc6be5..add52f4bc6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,3 +17,4 @@ requests_cache rich-click>=1.6.1 rich>=13.3.1 tabulate +pdiff diff --git a/setup.py b/setup.py index c04d9fd0b1..da6f0ffa61 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "2.10" +version = "2.11" with open("README.md") as f: readme = f.read() diff --git a/tests/components/generate_snapshot.py b/tests/components/generate_snapshot.py new file mode 100644 index 0000000000..46fd63fe3f --- /dev/null +++ b/tests/components/generate_snapshot.py @@ -0,0 +1,139 @@ +"""Test generate a snapshot""" +import json +from pathlib import Path +from unittest.mock import MagicMock + +import pytest + +from nf_core.components.components_test import ComponentsTest +from nf_core.utils import set_wd + +from ..utils import GITLAB_NFTEST_BRANCH, GITLAB_URL + + +def test_generate_snapshot_module(self): + """Generate the snapshot for a module in nf-core/modules clone""" + with set_wd(self.nfcore_modules): + snap_generator = ComponentsTest( + component_type="modules", + component_name="fastqc", + no_prompts=True, + remote_url=GITLAB_URL, + branch=GITLAB_NFTEST_BRANCH, + ) + snap_generator.run() + + snap_path = Path("modules", "nf-core-test", "fastqc", "tests", "main.nf.test.snap") + assert snap_path.exists() + + with open(snap_path, "r") as fh: + snap_content = json.load(fh) + assert "versions" in snap_content + assert "content" in snap_content["versions"] + assert "versions.yml:md5,e1cc25ca8af856014824abd842e93978" in snap_content["versions"]["content"][0] + + +def test_generate_snapshot_subworkflow(self): + """Generate the snapshot for a subworkflows in nf-core/modules clone""" + with set_wd(self.nfcore_modules): + snap_generator = ComponentsTest( + component_type="subworkflows", + component_name="bam_sort_stats_samtools", + no_prompts=True, + remote_url=GITLAB_URL, + branch=GITLAB_NFTEST_BRANCH, + ) + snap_generator.run() + + snap_path = Path("subworkflows", "nf-core-test", "bam_sort_stats_samtools", "tests", "main.nf.test.snap") + assert snap_path.exists() + + with open(snap_path, "r") as fh: + snap_content = json.load(fh) + assert "test_bam_sort_stats_samtools_paired_end_flagstats" in snap_content + assert ( + "test.flagstat:md5,4f7ffd1e6a5e85524d443209ac97d783" + in snap_content["test_bam_sort_stats_samtools_paired_end_flagstats"]["content"][0][0] + ) + assert "test_bam_sort_stats_samtools_paired_end_idxstats" in snap_content + assert ( + "test.idxstats:md5,df60a8c8d6621100d05178c93fb053a2" + in snap_content["test_bam_sort_stats_samtools_paired_end_idxstats"]["content"][0][0] + ) + + +def test_generate_snapshot_once( + self, +): + """Generate the snapshot for a module in nf-core/modules clone only once""" + with set_wd(self.nfcore_modules): + snap_generator = ComponentsTest( + component_type="modules", + component_name="fastqc", + once=True, + no_prompts=True, + remote_url=GITLAB_URL, + branch=GITLAB_NFTEST_BRANCH, + ) + snap_generator.repo_type = "modules" + snap_generator.generate_snapshot = MagicMock() + snap_generator.run() + snap_generator.generate_snapshot.assert_called_once() + + +def test_update_snapshot_module(self): + """Update the snapshot of a module in nf-core/modules clone""" + + with set_wd(self.nfcore_modules): + snap_path = Path("modules", "nf-core-test", "bwa", "mem", "tests", "main.nf.test.snap") + with open(snap_path, "r") as fh: + snap_content = json.load(fh) + original_timestamp = snap_content["Single-End"]["timestamp"] + # delete the timestamp in json + snap_content["Single-End"]["content"][0]["0"][0][1] = "" + with open(snap_path, "w") as fh: + json.dump(snap_content, fh) + snap_generator = ComponentsTest( + component_type="modules", + component_name="bwa/mem", + no_prompts=True, + remote_url=GITLAB_URL, + branch=GITLAB_NFTEST_BRANCH, + update=True, + ) + snap_generator.run() + + with open(snap_path, "r") as fh: + snap_content = json.load(fh) + assert "Single-End" in snap_content + assert snap_content["Single-End"]["timestamp"] != original_timestamp + + +def test_test_not_found(self): + """Generate the snapshot for a module in nf-core/modules clone which doesn't contain tests""" + with set_wd(self.nfcore_modules): + snap_generator = ComponentsTest( + component_type="modules", + component_name="fastp", + no_prompts=True, + remote_url=GITLAB_URL, + branch=GITLAB_NFTEST_BRANCH, + ) + with pytest.raises(UserWarning) as e: + snap_generator.run() + assert "Test file 'main.nf.test' not found" in str(e.value) + + +def test_unstable_snapshot(self): + """Generate the snapshot for a module in nf-core/modules clone with unstable snapshots""" + with set_wd(self.nfcore_modules): + snap_generator = ComponentsTest( + component_type="modules", + component_name="kallisto/quant", + no_prompts=True, + remote_url=GITLAB_URL, + branch=GITLAB_NFTEST_BRANCH, + ) + with pytest.raises(UserWarning) as e: + snap_generator.run() + assert "nf-test snapshot is not stable" in str(e.value) diff --git a/tests/modules/modules_test.py b/tests/components/snapshot_test.py similarity index 50% rename from tests/modules/modules_test.py rename to tests/components/snapshot_test.py index eb207fa28b..d774618476 100644 --- a/tests/modules/modules_test.py +++ b/tests/components/snapshot_test.py @@ -1,41 +1,39 @@ -"""Test the 'modules test' command which runs module pytests.""" -import os +"""Test the 'modules test' or 'subworkflows test' command which runs nf-test test.""" import shutil from pathlib import Path import pytest -import nf_core.modules +from nf_core.components.components_test import ComponentsTest +from nf_core.utils import set_wd -from ..utils import set_wd - -def test_modules_test_check_inputs(self): +def test_components_test_check_inputs(self): """Test the check_inputs() function - raise UserWarning because module doesn't exist""" with set_wd(self.nfcore_modules): - meta_builder = nf_core.modules.ModulesTest("none", True, "") + meta_builder = ComponentsTest(component_type="modules", component_name="none", no_prompts=True) with pytest.raises(UserWarning) as excinfo: - meta_builder._check_inputs() + meta_builder.check_inputs() assert "Cannot find directory" in str(excinfo.value) -def test_modules_test_no_name_no_prompts(self): +def test_components_test_no_name_no_prompts(self): """Test the check_inputs() function - raise UserWarning prompts are deactivated and module name is not provided.""" with set_wd(self.nfcore_modules): - meta_builder = nf_core.modules.ModulesTest(None, True, "") + meta_builder = ComponentsTest(component_type="modules", component_name=None, no_prompts=True) with pytest.raises(UserWarning) as excinfo: - meta_builder._check_inputs() + meta_builder.check_inputs() assert "Module name not provided and prompts deactivated." in str(excinfo.value) -def test_modules_test_no_installed_modules(self): +def test_components_test_no_installed_modules(self): """Test the check_inputs() function - raise UserWarning because installed modules were not found""" with set_wd(self.nfcore_modules): module_dir = Path(self.nfcore_modules, "modules") shutil.rmtree(module_dir) module_dir.mkdir() - meta_builder = nf_core.modules.ModulesTest(None, False, "") + meta_builder = ComponentsTest(component_type="modules", component_name=None, no_prompts=False) meta_builder.repo_type = "modules" - with pytest.raises(UserWarning) as excinfo: - meta_builder._check_inputs() - assert "No installed modules were found" in str(excinfo.value) + with pytest.raises(LookupError) as excinfo: + meta_builder.check_inputs() + assert "Nothing installed from" in str(excinfo.value) diff --git a/tests/lint/multiqc_config.py b/tests/lint/multiqc_config.py new file mode 100644 index 0000000000..446b4378b0 --- /dev/null +++ b/tests/lint/multiqc_config.py @@ -0,0 +1,106 @@ +from pathlib import Path + +import yaml + +import nf_core.lint + + +def test_multiqc_config_exists_ignore(self): + """Test that linting fails if the multiqc_config.yml file is missing""" + # Delete the file + new_pipeline = self._make_pipeline_copy() + Path(Path(new_pipeline, "assets", "multiqc_config.yml")).unlink() + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + result = lint_obj.multiqc_config() + assert result["ignored"] == ["'assets/multiqc_config.yml' not found"] + + +def test_multiqc_config_missing_report_section_order(self): + """Test that linting fails if the multiqc_config.yml file is missing the report_section_order""" + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml + mqc_yml.pop("report_section_order") + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + # Reset the file + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert result["failed"] == ["'assets/multiqc_config.yml' does not contain `report_section_order`"] + + +def test_multiqc_incorrect_export_plots(self): + """Test that linting fails if the multiqc_config.yml file has an incorrect value for export_plots""" + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml + mqc_yml["export_plots"] = False + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + # Reset the file + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert result["failed"] == ["'assets/multiqc_config.yml' does not contain 'export_plots: true'."] + + +def test_multiqc_config_report_comment_fail(self): + """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment""" + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml + mqc_yml["report_comment"] = "This is a test" + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + # Reset the file + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith("'assets/multiqc_config.yml' does not contain a matching 'report_comment'.") + + +def test_multiqc_config_report_comment_release_fail(self): + """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment for a release version""" + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + # bump version + lint_obj.nf_config["manifest.version"] = "1.0" + result = lint_obj.multiqc_config() + # Reset the file + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith("'assets/multiqc_config.yml' does not contain a matching 'report_comment'.") + + +def test_multiqc_config_report_comment_release_succeed(self): + """Test that linting fails if the multiqc_config.yml file has a correct report_comment for a release version""" + + import nf_core.bump_version + + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + # bump version using the bump_version function + nf_core.bump_version.bump_pipeline_version(lint_obj, "1.0") + # lint again + lint_obj._load() + result = lint_obj.multiqc_config() + assert "'assets/multiqc_config.yml' contains a matching 'report_comment'." in result["passed"] diff --git a/tests/lint/nextflow_config.py b/tests/lint/nextflow_config.py index f53765dce9..1542b8cf65 100644 --- a/tests/lint/nextflow_config.py +++ b/tests/lint/nextflow_config.py @@ -1,3 +1,6 @@ +import os +import re + import nf_core.create import nf_core.lint @@ -33,3 +36,20 @@ def test_nextflow_config_dev_in_release_mode_failed(self): result = lint_obj.nextflow_config() assert len(result["failed"]) > 0 assert len(result["warned"]) == 0 + + +def test_nextflow_config_missing_test_profile_failed(self): + """Test failure if config file does not contain `test` profile.""" + new_pipeline = self._make_pipeline_copy() + # Change the name of the test profile so there is no such profile + nf_conf_file = os.path.join(new_pipeline, "nextflow.config") + with open(nf_conf_file, "r") as f: + content = f.read() + fail_content = re.sub(r"\btest\b", "testfail", content) + with open(nf_conf_file, "w") as f: + f.write(fail_content) + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) > 0 + assert len(result["warned"]) == 0 diff --git a/tests/modules/bump_versions.py b/tests/modules/bump_versions.py index 65569efd51..3c19041f63 100644 --- a/tests/modules/bump_versions.py +++ b/tests/modules/bump_versions.py @@ -2,6 +2,7 @@ import re import pytest +import yaml import nf_core.modules from nf_core.modules.modules_utils import ModuleException @@ -10,11 +11,11 @@ def test_modules_bump_versions_single_module(self): """Test updating a single module""" # Change the bpipe/test version to an older version - main_nf_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf") - with open(main_nf_path, "r") as fh: + env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") + with open(env_yml_path, "r") as fh: content = fh.read() new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content) - with open(main_nf_path, "w") as fh: + with open(env_yml_path, "w") as fh: fh.write(new_content) version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) version_bumper.bump_versions(module="bpipe/test") @@ -39,11 +40,11 @@ def test_modules_bump_versions_fail(self): def test_modules_bump_versions_fail_unknown_version(self): """Fail because of an unknown version""" # Change the bpipe/test version to an older version - main_nf_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf") - with open(main_nf_path, "r") as fh: + env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") + with open(env_yml_path, "r") as fh: content = fh.read() new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content) - with open(main_nf_path, "w") as fh: + with open(env_yml_path, "w") as fh: fh.write(new_content) version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) version_bumper.bump_versions(module="bpipe/test") diff --git a/tests/modules/create.py b/tests/modules/create.py index 98e498c1b0..74e5ec3896 100644 --- a/tests/modules/create.py +++ b/tests/modules/create.py @@ -1,11 +1,22 @@ +import filecmp import os +import shutil +from pathlib import Path +from unittest import mock import pytest import requests_cache import responses +import yaml +from git.repo import Repo import nf_core.modules -from tests.utils import mock_anaconda_api_calls, mock_biocontainers_api_calls +from tests.utils import ( + GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, + GITLAB_URL, + mock_anaconda_api_calls, + mock_biocontainers_api_calls, +) def test_modules_create_succeed(self): @@ -48,7 +59,7 @@ def test_modules_create_nfcore_modules(self): with requests_cache.disabled(): module_create.create() assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "main.nf")) - assert os.path.exists(os.path.join(self.nfcore_modules, "tests", "modules", "nf-core", "fastqc", "main.nf")) + assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "tests", "main.nf.test")) def test_modules_create_nfcore_modules_subtool(self): @@ -62,4 +73,69 @@ def test_modules_create_nfcore_modules_subtool(self): with requests_cache.disabled(): module_create.create() assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "main.nf")) - assert os.path.exists(os.path.join(self.nfcore_modules, "tests", "modules", "nf-core", "star", "index", "main.nf")) + assert os.path.exists( + os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "tests", "main.nf.test") + ) + + +@mock.patch("rich.prompt.Confirm.ask") +def test_modules_migrate(self, mock_rich_ask): + """Create a module with the --migrate-pytest option to convert pytest to nf-test""" + pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") + module_dir = Path(self.nfcore_modules, "modules", "nf-core", "samtools", "sort") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + with open(module_dir / "main.nf", "r") as fh: + old_main_nf = fh.read() + with open(module_dir / "meta.yml", "r") as fh: + old_meta_yml = fh.read() + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = True + module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create.create() + + with open(module_dir / "main.nf", "r") as fh: + new_main_nf = fh.read() + with open(module_dir / "meta.yml", "r") as fh: + new_meta_yml = fh.read() + nextflow_config = module_dir / "tests" / "nextflow.config" + + # Check that old files have been copied to the new module + assert old_main_nf == new_main_nf + assert old_meta_yml == new_meta_yml + assert nextflow_config.is_file() + + # Check that pytest folder is deleted + assert not pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "samtools/sort" not in modules_yml.keys() + + +@mock.patch("rich.prompt.Confirm.ask") +def test_modules_migrate_no_delete(self, mock_rich_ask): + """Create a module with the --migrate-pytest option to convert pytest to nf-test. + Test that pytest directory is not deleted.""" + pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = False + module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create.create() + + # Check that pytest folder is not deleted + assert pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "samtools/sort" not in modules_yml.keys() diff --git a/tests/modules/create_test_yml.py b/tests/modules/create_test_yml.py deleted file mode 100644 index 243378af78..0000000000 --- a/tests/modules/create_test_yml.py +++ /dev/null @@ -1,60 +0,0 @@ -import os -from pathlib import Path - -import pytest - -import nf_core.modules - -from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL, with_temporary_folder - - -@with_temporary_folder -def test_modules_custom_yml_dumper(self, out_dir): - """Try to create a yml file with the custom yml dumper""" - yml_output_path = Path(out_dir, "test.yml") - meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", self.pipeline_dir, False, "./", False, True) - meta_builder.test_yml_output_path = yml_output_path - meta_builder.tests = [{"testname": "myname"}] - meta_builder.print_test_yml() - assert Path(yml_output_path).is_file() - - -@with_temporary_folder -def test_modules_test_file_dict(self, test_file_dir): - """Create dict of test files and create md5 sums""" - meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", self.pipeline_dir, False, "./", False, True) - with open(Path(test_file_dir, "test_file.txt"), "w") as fh: - fh.write("this line is just for testing") - test_files = meta_builder.create_test_file_dict(test_file_dir) - assert len(test_files) == 1 - assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" - - -@with_temporary_folder -def test_modules_create_test_yml_get_md5(self, test_file_dir): - """Get md5 sums from a dummy output""" - meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", self.pipeline_dir, False, "./", False, True) - with open(Path(test_file_dir, "test_file.txt"), "w") as fh: - fh.write("this line is just for testing") - test_files = meta_builder.get_md5_sums(command="dummy", results_dir=test_file_dir, results_dir_repeat=test_file_dir) - assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" - - -def test_modules_create_test_yml_entry_points(self): - """Test extracting test entry points from a main.nf file""" - meta_builder = nf_core.modules.ModulesTestYmlBuilder("bpipe/test", self.pipeline_dir, False, "./", False, True) - meta_builder.module_test_main = Path(self.nfcore_modules, "tests", "modules", "nf-core", "bpipe", "test", "main.nf") - meta_builder.scrape_workflow_entry_points() - assert meta_builder.entry_points[0] == "test_bpipe_test" - - -def test_modules_create_test_yml_check_inputs(self): - """Test the check_inputs() function - raise UserWarning because test.yml exists""" - cwd = os.getcwd() - os.chdir(self.nfcore_modules) - meta_builder = nf_core.modules.ModulesTestYmlBuilder("bpipe/test", ".", False, "./", False, True) - meta_builder.module_test_main = Path(self.nfcore_modules, "tests", "modules", "bpipe", "test", "main.nf") - with pytest.raises(UserWarning) as excinfo: - meta_builder.check_inputs() - os.chdir(cwd) - assert "Test YAML file already exists!" in str(excinfo.value) diff --git a/tests/modules/lint.py b/tests/modules/lint.py index d31f2c3212..a8a775e6f6 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -1,16 +1,18 @@ -import os from pathlib import Path import pytest +import yaml +from git.repo import Repo import nf_core.modules from nf_core.modules.lint import main_nf +from nf_core.utils import set_wd -from ..utils import GITLAB_URL, set_wd +from ..utils import GITLAB_NFTEST_BRANCH, GITLAB_URL from .patch import BISMARK_ALIGN, CORRECT_SHA, PATCH_BRANCH, REPO_NAME, modify_main_nf -def setup_patch(pipeline_dir, modify_module): +def setup_patch(pipeline_dir: str, modify_module: bool): install_obj = nf_core.modules.ModuleInstall( pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=PATCH_BRANCH, sha=CORRECT_SHA ) @@ -46,7 +48,7 @@ def test_modules_lint_empty(self): def test_modules_lint_new_modules(self): """lint a new module""" module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=True, all_modules=True) + module_lint.lint(print_results=False, all_modules=True) assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 @@ -231,3 +233,414 @@ def test_modules_lint_check_process_labels(self): assert len(mocked_ModuleLint.passed) == passed assert len(mocked_ModuleLint.warned) == warned assert len(mocked_ModuleLint.failed) == failed + + +# Test cases for linting the container definitions + +CONTAINER_SINGLE_GOOD = ( + "Single-line container definition should pass", + """ + container "quay.io/nf-core/gatk:4.4.0.0" //Biocontainers is missing a package + """, + 2, # passed + 0, # warned + 0, # failed +) + +CONTAINER_TWO_LINKS_GOOD = ( + "Multi-line container definition should pass", + """ + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0': + 'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }" + """, + 6, + 0, + 0, +) + +CONTAINER_WITH_SPACE_BAD = ( + "Space in container URL should fail", + """ + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0 ': + 'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }" + """, + 5, + 0, + 1, +) + +CONTAINER_MULTIPLE_DBLQUOTES_BAD = ( + "Incorrect quoting of container string should fail", + """ + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0 ': + "biocontainers/gatk4:4.4.0.0--py36hdfd78af_0" }" + """, + 4, + 0, + 1, +) + +CONTAINER_TEST_CASES = [ + CONTAINER_SINGLE_GOOD, + CONTAINER_TWO_LINKS_GOOD, + CONTAINER_WITH_SPACE_BAD, + CONTAINER_MULTIPLE_DBLQUOTES_BAD, +] + + +def test_modules_lint_check_url(self): + for test_case in CONTAINER_TEST_CASES: + test, process, passed, warned, failed = test_case + mocked_ModuleLint = MockModuleLint() + for line in process.splitlines(): + if line.strip(): + main_nf.check_container_link_line(mocked_ModuleLint, line, registry="quay.io") + + assert ( + len(mocked_ModuleLint.passed) == passed + ), f"{test}: Expected {passed} PASS, got {len(mocked_ModuleLint.passed)}." + assert ( + len(mocked_ModuleLint.warned) == warned + ), f"{test}: Expected {warned} WARN, got {len(mocked_ModuleLint.warned)}." + assert ( + len(mocked_ModuleLint.failed) == failed + ), f"{test}: Expected {failed} FAIL, got {len(mocked_ModuleLint.failed)}." + + +def test_modules_lint_snapshot_file(self): + """Test linting a module with a snapshot file""" + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + +def test_modules_lint_snapshot_file_missing_fail(self): + """Test linting a module with a snapshot file missing, which should fail""" + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap").unlink() + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap").touch() + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_snapshot_exists" + + +def test_modules_lint_snapshot_file_not_needed(self): + """Test linting a module which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "r") as fh: + content = fh.read() + new_content = content.replace("snapshot(", "snap (") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: + fh.write(new_content) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + +def test_modules_environment_yml_file_doesnt_exists(self): + """Test linting a module with an environment.yml file""" + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml.bak") + ) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml.bak").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_exists" + + +def test_modules_environment_yml_file_sorted_correctly(self): + """Test linting a module with a correctly sorted environment.yml file""" + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + +def test_modules_environment_yml_file_sorted_incorrectly(self): + """Test linting a module with an incorrectly sorted environment.yml file""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "r") as fh: + yaml_content = yaml.safe_load(fh) + # Add a new dependency to the environment.yml file and reverse the order + yaml_content["dependencies"].append("z") + yaml_content["dependencies"].reverse() + yaml_content = yaml.dump(yaml_content) + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: + fh.write(yaml_content) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + # we fix the sorting on the fly, so this should pass + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + +def test_modules_environment_yml_file_not_array(self): + """Test linting a module with an incorrectly formatted environment.yml file""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh: + yaml_content = yaml.safe_load(fh) + yaml_content["dependencies"] = "z" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: + fh.write(yaml.dump(yaml_content)) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_valid" + + +def test_modules_environment_yml_file_name_mismatch(self): + """Test linting a module with a different name in the environment.yml file""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh: + yaml_content = yaml.safe_load(fh) + yaml_content["name"] = "bpipe-test" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: + fh.write(yaml.dump(yaml_content)) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + # reset changes + yaml_content["name"] = "bpipe_test" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: + fh.write(yaml.dump(yaml_content)) + + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_name" + + +def test_modules_meta_yml_incorrect_licence_field(self): + """Test linting a module with an incorrect Licence field in meta.yml""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: + meta_yml = yaml.safe_load(fh) + meta_yml["tools"][0]["bpipe"]["licence"] = "[MIT]" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: + fh.write(yaml.dump(meta_yml)) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + + # reset changes + meta_yml["tools"][0]["bpipe"]["licence"] = ["MIT"] + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: + fh.write(yaml.dump(meta_yml)) + + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "meta_yml_valid" + + +def test_modules_meta_yml_input_mismatch(self): + """Test linting a module with an extra entry in input fields in meta.yml compared to module.input""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: + main_nf = fh.read() + main_nf_new = main_nf.replace("path bam", "path bai") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf_new) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf) + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) == 2 + lint_tests = [x.lint_test for x in module_lint.warned] + # check that it is there twice: + assert lint_tests.count("meta_input_meta_only") == 1 + assert lint_tests.count("meta_input_main_only") == 1 + + +def test_modules_meta_yml_output_mismatch(self): + """Test linting a module with an extra entry in output fields in meta.yml compared to module.output""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: + main_nf = fh.read() + main_nf_new = main_nf.replace("emit: bam", "emit: bai") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf_new) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf) + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) == 2 + lint_tests = [x.lint_test for x in module_lint.warned] + # check that it is there twice: + assert lint_tests.count("meta_output_meta_only") == 1 + assert lint_tests.count("meta_output_main_only") == 1 + + +def test_modules_meta_yml_incorrect_name(self): + """Test linting a module with an incorrect name in meta.yml""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: + meta_yml = yaml.safe_load(fh) + meta_yml["name"] = "bpipe/test" + # need to make the same change to the environment.yml file + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh: + environment_yml = yaml.safe_load(fh) + environment_yml["name"] = "bpipe/test" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: + fh.write(yaml.dump(meta_yml)) + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: + fh.write(yaml.dump(environment_yml)) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + + # reset changes + meta_yml["name"] = "bpipe_test" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: + fh.write(yaml.dump(meta_yml)) + environment_yml["name"] = "bpipe_test" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: + fh.write(yaml.dump(environment_yml)) + + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "meta_name" + + +def test_modules_missing_test_dir(self): + """Test linting a module with a missing test directory""" + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak") + ) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests") + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_dir_exists" + + +def test_modules_missing_test_main_nf(self): + """Test linting a module with a missing test/main.nf file""" + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.bak") + ) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.bak").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test") + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_main_nf_exists" + + +def test_modules_missing_required_tag(self): + """Test linting a module with a missing required tag""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "r") as fh: + content = fh.read() + new_content = content.replace("modules_nfcore", "foo") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: + fh.write(new_content) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: + fh.write(content) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_main_tags" + + +def test_modules_missing_tags_yml(self): + """Test linting a module with a missing tags.yml file""" + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml.bak") + ) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml.bak").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml") + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_tags_yml_exists" + + +def test_modules_incorrect_tags_yml_key(self): + """Test linting a module with an incorrect key in tags.yml file""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "r") as fh: + content = fh.read() + new_content = content.replace("bpipe/test:", "bpipe_test:") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh: + fh.write(new_content) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=True, module="bpipe/test") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh: + fh.write(content) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_tags_yml" + + +def test_modules_incorrect_tags_yml_values(self): + """Test linting a module with an incorrect path in tags.yml file""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "r") as fh: + content = fh.read() + new_content = content.replace("modules/nf-core/bpipe/test/**", "foo") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh: + fh.write(new_content) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh: + fh.write(content) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_tags_yml" + + +def test_modules_unused_pytest_files(self): + """Test linting a nf-test module with files still present in `tests/modules/`""" + Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").mkdir(parents=True, exist_ok=True) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").rmdir() + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_old_test_dir" + + +def test_nftest_failing_linting(self): + """Test linting a module which includes other modules in nf-test tests. + Linting tests""" + # Clone modules repo with testing modules + tmp_dir = self.nfcore_modules.parent + self.nfcore_modules = Path(tmp_dir, "modules-test") + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_NFTEST_BRANCH) + + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="kallisto/quant") + + assert len(module_lint.failed) == 4, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_valid" + assert module_lint.failed[1].lint_test == "meta_yml_valid" + assert module_lint.failed[2].lint_test == "test_main_tags" + assert "kallisto/index" in module_lint.failed[2].message + assert module_lint.failed[3].lint_test == "test_tags_yml" diff --git a/tests/subworkflows/create.py b/tests/subworkflows/create.py index 60ee6add9a..fc628df34f 100644 --- a/tests/subworkflows/create.py +++ b/tests/subworkflows/create.py @@ -1,8 +1,15 @@ +import filecmp import os +import shutil +from pathlib import Path +from unittest import mock import pytest +import yaml +from git.repo import Repo import nf_core.subworkflows +from tests.utils import GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL def test_subworkflows_create_succeed(self): @@ -33,5 +40,72 @@ def test_subworkflows_create_nfcore_modules(self): subworkflow_create.create() assert os.path.exists(os.path.join(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf")) assert os.path.exists( - os.path.join(self.nfcore_modules, "tests", "subworkflows", "nf-core", "test_subworkflow", "main.nf") + os.path.join(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test") ) + + +@mock.patch("rich.prompt.Confirm.ask") +def test_subworkflows_migrate(self, mock_rich_ask): + """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test""" + pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") + subworkflow_dir = Path(self.nfcore_modules, "subworkflows", "nf-core", "bam_stats_samtools") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + with open(subworkflow_dir / "main.nf", "r") as fh: + old_main_nf = fh.read() + with open(subworkflow_dir / "meta.yml", "r") as fh: + old_meta_yml = fh.read() + + # Create a subworkflow with --migrate-pytest + mock_rich_ask.return_value = True + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True + ) + subworkflow_create.create() + + with open(subworkflow_dir / "main.nf", "r") as fh: + new_main_nf = fh.read() + with open(subworkflow_dir / "meta.yml", "r") as fh: + new_meta_yml = fh.read() + nextflow_config = subworkflow_dir / "tests" / "nextflow.config" + + # Check that old files have been copied to the new module + assert old_main_nf == new_main_nf + assert old_meta_yml == new_meta_yml + assert nextflow_config.is_file() + + # Check that pytest folder is deleted + assert not pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() + + +@mock.patch("rich.prompt.Confirm.ask") +def test_subworkflows_migrate_no_delete(self, mock_rich_ask): + """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test. + Test that pytest directory is not deleted.""" + pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = False + module_create = nf_core.subworkflows.SubworkflowCreate( + self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True + ) + module_create.create() + + # Check that pytest folder is not deleted + assert pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() diff --git a/tests/subworkflows/create_test_yml.py b/tests/subworkflows/create_test_yml.py deleted file mode 100644 index 40384b420f..0000000000 --- a/tests/subworkflows/create_test_yml.py +++ /dev/null @@ -1,96 +0,0 @@ -import os -from pathlib import Path -from unittest import mock - -import pytest - -import nf_core.subworkflows - -from ..utils import with_temporary_folder - - -@with_temporary_folder -def test_subworkflows_custom_yml_dumper(self, out_dir): - """Try to create a yml file with the custom yml dumper""" - yml_output_path = Path(out_dir, "test.yml") - meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( - subworkflow="test/tool", - directory=self.pipeline_dir, - test_yml_output_path=yml_output_path, - no_prompts=True, - ) - meta_builder.test_yml_output_path = yml_output_path - meta_builder.tests = [{"testname": "myname"}] - meta_builder.print_test_yml() - assert Path(yml_output_path).is_file() - - -@with_temporary_folder -def test_subworkflows_test_file_dict(self, test_file_dir): - """Create dict of test files and create md5 sums""" - meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( - subworkflow="test/tool", - directory=self.pipeline_dir, - test_yml_output_path="./", - no_prompts=True, - ) - with open(Path(test_file_dir, "test_file.txt"), "w") as fh: - fh.write("this line is just for testing") - test_files = meta_builder.create_test_file_dict(test_file_dir) - assert len(test_files) == 1 - assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" - - -@with_temporary_folder -def test_subworkflows_create_test_yml_get_md5(self, test_file_dir): - """Get md5 sums from a dummy output""" - meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( - subworkflow="test/tool", - directory=self.pipeline_dir, - test_yml_output_path="./", - no_prompts=True, - ) - with open(Path(test_file_dir, "test_file.txt"), "w") as fh: - fh.write("this line is just for testing") - test_files = meta_builder.get_md5_sums( - command="dummy", - results_dir=test_file_dir, - results_dir_repeat=test_file_dir, - ) - assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" - - -def test_subworkflows_create_test_yml_entry_points(self): - """Test extracting test entry points from a main.nf file""" - subworkflow = "test_subworkflow" - meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( - subworkflow=f"{subworkflow}/test", - directory=self.pipeline_dir, - test_yml_output_path="./", - no_prompts=True, - ) - meta_builder.subworkflow_test_main = Path( - self.nfcore_modules, "tests", "subworkflows", "nf-core", subworkflow, "main.nf" - ) - meta_builder.scrape_workflow_entry_points() - assert meta_builder.entry_points[0] == f"test_{subworkflow}" - - -def test_subworkflows_create_test_yml_check_inputs(self): - """Test the check_inputs() function - raise UserWarning because test.yml exists""" - cwd = os.getcwd() - os.chdir(self.nfcore_modules) - subworkflow = "test_subworkflow" - meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( - subworkflow=f"{subworkflow}", - directory=self.pipeline_dir, - test_yml_output_path="./", - no_prompts=True, - ) - meta_builder.subworkflow_test_main = Path( - self.nfcore_modules, "tests", "subworkflows", "nf-core", subworkflow, "main.nf" - ) - with pytest.raises(UserWarning) as excinfo: - meta_builder.check_inputs() - os.chdir(cwd) - assert "Test YAML file already exists!" in str(excinfo.value) diff --git a/tests/subworkflows/lint.py b/tests/subworkflows/lint.py index d754985265..1380db2260 100644 --- a/tests/subworkflows/lint.py +++ b/tests/subworkflows/lint.py @@ -1,8 +1,11 @@ +import os +from pathlib import Path + import pytest import nf_core.subworkflows -from ..utils import GITLAB_URL +from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL def test_subworkflows_lint(self): @@ -25,7 +28,8 @@ def test_subworkflows_lint_new_subworkflow(self): """lint a new subworkflow""" subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) subworkflow_lint.lint(print_results=True, all_subworkflows=True) - assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0 @@ -39,9 +43,11 @@ def test_subworkflows_lint_no_gitlab(self): def test_subworkflows_lint_gitlab_subworkflows(self): """Lint subworkflows from a different remote""" self.subworkflow_install_gitlab.install("bam_stats_samtools") - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint( + dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) subworkflow_lint.lint(print_results=False, all_subworkflows=True) - assert len(subworkflow_lint.failed) == 2 + assert len(subworkflow_lint.failed) == 0 assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0 @@ -50,8 +56,51 @@ def test_subworkflows_lint_multiple_remotes(self): """Lint subworkflows from a different remote""" self.subworkflow_install_gitlab.install("bam_stats_samtools") self.subworkflow_install.install("fastq_align_bowtie2") - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) - subworkflow_lint.lint(print_results=False, all_modules=True) - assert len(subworkflow_lint.failed) == 1 + subworkflow_lint = nf_core.subworkflows.SubworkflowLint( + dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + subworkflow_lint.lint(print_results=False, all_subworkflows=True) + assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + +def test_subworkflows_lint_snapshot_file(self): + """Test linting a subworkflow with a snapshot file""" + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + +def test_subworkflows_lint_snapshot_file_missing_fail(self): + """Test linting a subworkflow with a snapshot file missing, which should fail""" + Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").unlink() + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").touch() + assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + +def test_subworkflows_lint_snapshot_file_not_needed(self): + """Test linting a subworkflow which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" + with open( + Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test"), "r" + ) as fh: + content = fh.read() + new_content = content.replace("snapshot(", "snap (") + with open( + Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test"), "w" + ) as fh: + fh.write(new_content) + + Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").unlink() + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").touch() + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0 diff --git a/tests/subworkflows/subworkflows_test.py b/tests/subworkflows/subworkflows_test.py deleted file mode 100644 index adb0989b33..0000000000 --- a/tests/subworkflows/subworkflows_test.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Test the 'subworkflows test' command which runs module pytests.""" -import os -import shutil -from pathlib import Path - -import pytest - -import nf_core.subworkflows - -from ..utils import set_wd - - -def test_subworkflows_test_check_inputs(self): - """Test the check_inputs() function - raise UserWarning because module doesn't exist""" - with set_wd(self.nfcore_modules): - meta_builder = nf_core.subworkflows.SubworkflowsTest("none", True, "") - with pytest.raises(UserWarning) as excinfo: - meta_builder._check_inputs() - assert "Cannot find directory" in str(excinfo.value) - - -def test_subworkflows_test_no_name_no_prompts(self): - """Test the check_inputs() function - raise UserWarning prompts are deactivated and module name is not provided.""" - with set_wd(self.nfcore_modules): - meta_builder = nf_core.subworkflows.SubworkflowsTest(None, True, "") - with pytest.raises(UserWarning) as excinfo: - meta_builder._check_inputs() - assert "Subworkflow name not provided and prompts deactivated." in str(excinfo.value) - - -def test_subworkflows_test_no_installed_subworkflows(self): - """Test the check_inputs() function - raise UserWarning because installed modules were not found""" - with set_wd(self.nfcore_modules): - module_dir = Path(self.nfcore_modules, "subworkflows") - shutil.rmtree(module_dir) - module_dir.mkdir() - meta_builder = nf_core.subworkflows.SubworkflowsTest(None, False, "") - meta_builder.repo_type = "modules" - with pytest.raises(UserWarning) as excinfo: - meta_builder._check_inputs() - assert "No installed subworkflows were found" in str(excinfo.value) diff --git a/tests/test_components.py b/tests/test_components.py new file mode 100644 index 0000000000..b7f67eb51d --- /dev/null +++ b/tests/test_components.py @@ -0,0 +1,52 @@ +""" Tests covering the modules commands +""" + +import os +import shutil +import tempfile +import unittest +from pathlib import Path + +from git.repo import Repo + +from .utils import GITLAB_NFTEST_BRANCH, GITLAB_URL + + +class TestComponents(unittest.TestCase): + """Class for components tests""" + + def setUp(self): + """Clone a testing version the nf-core/modules repo""" + self.tmp_dir = Path(tempfile.mkdtemp()) + self.nfcore_modules = Path(self.tmp_dir, "modules-test") + + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_NFTEST_BRANCH) + + # Set $PROFILE environment variable to docker - tests will run with Docker + if os.environ.get("PROFILE") is None: + os.environ["PROFILE"] = "docker" + + def tearDown(self): + """Clean up temporary files and folders""" + + # Clean up temporary files + if self.tmp_dir.is_dir(): + shutil.rmtree(self.tmp_dir) + + ############################################ + # Test of the individual components commands. # + ############################################ + + from .components.generate_snapshot import ( # type: ignore[misc] + test_generate_snapshot_module, + test_generate_snapshot_once, + test_generate_snapshot_subworkflow, + test_test_not_found, + test_unstable_snapshot, + test_update_snapshot_module, + ) + from .components.snapshot_test import ( # type: ignore[misc] + test_components_test_check_inputs, + test_components_test_no_installed_modules, + test_components_test_no_name_no_prompts, + ) diff --git a/tests/test_download.py b/tests/test_download.py index ee0744f660..7c9532e977 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -16,7 +16,7 @@ import nf_core.utils from nf_core.download import ContainerError, DownloadWorkflow, WorkflowRepo from nf_core.synced_repo import SyncedRepo -from nf_core.utils import NFCORE_CACHE_DIR, NFCORE_DIR, nextflow_cmd +from nf_core.utils import NFCORE_CACHE_DIR, NFCORE_DIR, run_cmd from .utils import with_temporary_file, with_temporary_folder @@ -156,25 +156,27 @@ def test_find_container_images_config_basic(self, tmp_path, mock_fetch_wf_config @mock.patch("nf_core.utils.fetch_wf_config") def test__find_container_images_config_nextflow(self, tmp_path, mock_fetch_wf_config): download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) - nfconfig_raw = nextflow_cmd( - f"nextflow config -flat {Path(__file__).resolve().parent / 'data/mock_config_containers'}" - ) - config = {} - for l in nfconfig_raw.splitlines(): - ul = l.decode("utf-8") - try: - k, v = ul.split(" = ", 1) - config[k] = v.strip("'\"") - except ValueError: - pass - mock_fetch_wf_config.return_value = config - download_obj.find_container_images("workflow") - assert len(download_obj.containers) == 4 - assert "nfcore/methylseq:1.0" in download_obj.containers - assert "nfcore/methylseq:1.4" in download_obj.containers - assert "nfcore/sarek:dev" in download_obj.containers - assert "https://depot.galaxyproject.org/singularity/r-shinyngs:1.7.1--r42hdfd78af_1" in download_obj.containers - # does not yet pick up nfcore/sarekvep:dev.${params.genome}, because that is no valid URL or Docker URI. + result = run_cmd("nextflow", f"config -flat {Path(__file__).resolve().parent / 'data/mock_config_containers'}") + if result is not None: + nfconfig_raw, _ = result + config = {} + for l in nfconfig_raw.splitlines(): + ul = l.decode("utf-8") + try: + k, v = ul.split(" = ", 1) + config[k] = v.strip("'\"") + except ValueError: + pass + mock_fetch_wf_config.return_value = config + download_obj.find_container_images("workflow") + assert len(download_obj.containers) == 4 + assert "nfcore/methylseq:1.0" in download_obj.containers + assert "nfcore/methylseq:1.4" in download_obj.containers + assert "nfcore/sarek:dev" in download_obj.containers + assert ( + "https://depot.galaxyproject.org/singularity/r-shinyngs:1.7.1--r42hdfd78af_1" in download_obj.containers + ) + # does not yet pick up nfcore/sarekvep:dev.${params.genome}, because that is no valid URL or Docker URI. # # Test for 'find_container_images' in modules @@ -262,6 +264,11 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p "hello-world", f"{tmp_dir}/hello-world.sif", None, "docker.io", mock_rich_progress ) + # Test successful pull with absolute URI (use tiny 3.5MB test container from the "Kogia" project: https://github.com/bschiffthaler/kogia) + download_obj.singularity_pull_image( + "docker.io/bschiffthaler/sed", f"{tmp_dir}/sed.sif", None, "docker.io", mock_rich_progress + ) + # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExists is raised before attempting to pull.) with pytest.raises(ContainerError.RegistryNotFound): download_obj.singularity_pull_image( @@ -288,6 +295,16 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p "a-container", f"{tmp_dir}/acontainer.sif", None, "ghcr.io", mock_rich_progress ) + # test Image not found for absolute URI. + with pytest.raises(ContainerError.ImageNotFound): + download_obj.singularity_pull_image( + "docker.io/bschiffthaler/nothingtopullhere", + f"{tmp_dir}/nothingtopullhere.sif", + None, + "docker.io", + mock_rich_progress, + ) + # Traffic from Github Actions to GitHub's Container Registry is unlimited, so no harm should be done here. with pytest.raises(ContainerError.InvalidTag): download_obj.singularity_pull_image( diff --git a/tests/test_launch.py b/tests/test_launch.py index d830311ba3..03c6a8b692 100644 --- a/tests/test_launch.py +++ b/tests/test_launch.py @@ -3,40 +3,35 @@ import json import os -import tempfile -import unittest -from unittest import mock +import shutil +from pathlib import Path +from unittest import TestCase, mock import pytest import nf_core.create import nf_core.launch -from .utils import with_temporary_file, with_temporary_folder +from .utils import create_tmp_pipeline, with_temporary_file, with_temporary_folder -class TestLaunch(unittest.TestCase): +class TestLaunch(TestCase): """Class for launch tests""" def setUp(self): """Create a new PipelineSchema and Launch objects""" - # Set up the schema - root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") - # cannot use a context manager here, since outside setUp the temporary - # file will never exists - self.tmp_dir = tempfile.mkdtemp() + self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() self.nf_params_fn = os.path.join(self.tmp_dir, "nf-params.json") - self.launcher = nf_core.launch.Launch(self.template_dir, params_out=self.nf_params_fn) + self.launcher = nf_core.launch.Launch(self.pipeline_dir, params_out=self.nf_params_fn) def tearDown(self): """Clean up temporary files and folders""" - if os.path.exists(self.nf_params_fn): - os.remove(self.nf_params_fn) + if Path(self.nf_params_fn).exists(): + Path(self.nf_params_fn).unlink() - if os.path.exists(self.tmp_dir): - os.rmdir(self.tmp_dir) + if Path(self.tmp_dir).exists(): + shutil.rmtree(self.tmp_dir) @mock.patch.object(nf_core.launch.Launch, "prompt_web_gui", side_effect=[True]) @mock.patch.object(nf_core.launch.Launch, "launch_web_gui") @@ -304,7 +299,7 @@ def test_build_command_empty(self): self.launcher.get_pipeline_schema() self.launcher.merge_nxf_flag_schema() self.launcher.build_command() - assert self.launcher.nextflow_cmd == f"nextflow run {self.template_dir}" + assert self.launcher.nextflow_cmd == f"nextflow run {self.pipeline_dir}" def test_build_command_nf(self): """Test the functionality to build a nextflow command - core nf customised""" @@ -313,7 +308,7 @@ def test_build_command_nf(self): self.launcher.nxf_flags["-name"] = "Test_Workflow" self.launcher.nxf_flags["-resume"] = True self.launcher.build_command() - assert self.launcher.nextflow_cmd == f'nextflow run {self.template_dir} -name "Test_Workflow" -resume' + assert self.launcher.nextflow_cmd == f'nextflow run {self.pipeline_dir} -name "Test_Workflow" -resume' def test_build_command_params(self): """Test the functionality to build a nextflow command - params supplied""" @@ -323,7 +318,7 @@ def test_build_command_params(self): # Check command assert ( self.launcher.nextflow_cmd - == f'nextflow run {self.template_dir} -params-file "{os.path.relpath(self.nf_params_fn)}"' + == f'nextflow run {self.pipeline_dir} -params-file "{os.path.relpath(self.nf_params_fn)}"' ) # Check saved parameters file with open(self.nf_params_fn, "r") as fh: @@ -340,4 +335,4 @@ def test_build_command_params_cl(self): self.launcher.get_pipeline_schema() self.launcher.schema_obj.input_params.update({"input": "custom_input"}) self.launcher.build_command() - assert self.launcher.nextflow_cmd == f'nextflow run {self.template_dir} --input "custom_input"' + assert self.launcher.nextflow_cmd == f'nextflow run {self.pipeline_dir} --input "custom_input"' diff --git a/tests/test_lint.py b/tests/test_lint.py index e4e93bd1f4..b2e7f3b574 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -30,6 +30,7 @@ def setUp(self): "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir, plain=True ) self.create_obj.init_pipeline() + # Base lint object on this directory self.lint_obj = nf_core.lint.PipelineLint(self.test_pipeline_dir) @@ -178,43 +179,52 @@ def test_sphinx_md_files(self): ####################### # SPECIFIC LINT TESTS # ####################### - from .lint.actions_awsfulltest import ( + from .lint.actions_awsfulltest import ( # type: ignore[misc] test_actions_awsfulltest_fail, test_actions_awsfulltest_pass, test_actions_awsfulltest_warn, ) - from .lint.actions_awstest import ( + from .lint.actions_awstest import ( # type: ignore[misc] test_actions_awstest_fail, test_actions_awstest_pass, ) - from .lint.actions_ci import ( + from .lint.actions_ci import ( # type: ignore[misc] test_actions_ci_fail_wrong_nf, test_actions_ci_fail_wrong_trigger, test_actions_ci_pass, ) - from .lint.actions_schema_validation import ( + from .lint.actions_schema_validation import ( # type: ignore[misc] test_actions_schema_validation_fails_for_additional_property, test_actions_schema_validation_missing_jobs, test_actions_schema_validation_missing_on, ) - from .lint.files_exist import ( + from .lint.files_exist import ( # type: ignore[misc] test_files_exist_depreciated_file, test_files_exist_missing_config, test_files_exist_missing_main, test_files_exist_pass, ) - from .lint.files_unchanged import ( + from .lint.files_unchanged import ( # type: ignore[misc] test_files_unchanged_fail, test_files_unchanged_pass, ) - from .lint.merge_markers import test_merge_markers_found - from .lint.modules_json import test_modules_json_pass - from .lint.nextflow_config import ( + from .lint.merge_markers import test_merge_markers_found # type: ignore[misc] + from .lint.modules_json import test_modules_json_pass # type: ignore[misc] + from .lint.multiqc_config import ( # type: ignore[misc] + test_multiqc_config_exists_ignore, + test_multiqc_config_missing_report_section_order, + test_multiqc_config_report_comment_fail, + test_multiqc_config_report_comment_release_fail, + test_multiqc_config_report_comment_release_succeed, + test_multiqc_incorrect_export_plots, + ) + from .lint.nextflow_config import ( # type: ignore[misc] test_nextflow_config_bad_name_fail, test_nextflow_config_dev_in_release_mode_failed, test_nextflow_config_example_pass, + test_nextflow_config_missing_test_profile_failed, ) - from .lint.version_consistency import test_version_consistency + from .lint.version_consistency import test_version_consistency # type: ignore[misc] # TODO nf-core: Assess and strip out if no longer required for DSL2 diff --git a/tests/test_list.py b/tests/test_list.py index 70af3fada5..c1f51e03e0 100644 --- a/tests/test_list.py +++ b/tests/test_list.py @@ -65,7 +65,7 @@ def test_local_workflows_and_fail(self): """Test the local workflow class and try to get local Nextflow workflow information""" loc_wf = nf_core.list.LocalWorkflow("myWF") - with pytest.raises(AssertionError): + with pytest.raises(RuntimeError): loc_wf.get_local_nf_workflow_details() def test_local_workflows_compare_and_fail_silently(self): diff --git a/tests/test_modules.py b/tests/test_modules.py index 047369b7c3..92c8dfda3f 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -3,11 +3,12 @@ import os import shutil -import tempfile import unittest +from pathlib import Path import requests_cache import responses +import yaml import nf_core.create import nf_core.modules @@ -19,6 +20,7 @@ GITLAB_URL, OLD_TRIMGALORE_BRANCH, OLD_TRIMGALORE_SHA, + create_tmp_pipeline, mock_anaconda_api_calls, mock_biocontainers_api_calls, ) @@ -27,15 +29,12 @@ def create_modules_repo_dummy(tmp_dir): """Create a dummy copy of the nf-core/modules repo""" - root_dir = os.path.join(tmp_dir, "modules") - os.makedirs(os.path.join(root_dir, "modules", "nf-core")) - os.makedirs(os.path.join(root_dir, "tests", "modules", "nf-core")) - os.makedirs(os.path.join(root_dir, "tests", "config")) - with open(os.path.join(root_dir, "tests", "config", "pytest_modules.yml"), "w") as fh: - fh.writelines(["test:", "\n - modules/test/**", "\n - tests/modules/test/**"]) - with open(os.path.join(root_dir, ".nf-core.yml"), "w") as fh: + root_dir = Path(tmp_dir, "modules") + Path(root_dir, "modules", "nf-core").mkdir(parents=True) + Path(root_dir, "tests", "modules", "nf-core").mkdir(parents=True) + Path(root_dir, "tests", "config").mkdir(parents=True) + with open(Path(root_dir, ".nf-core.yml"), "w") as fh: fh.writelines(["repository_type: modules", "\n", "org_path: nf-core", "\n"]) - # mock biocontainers and anaconda response with responses.RequestsMock() as rsps: mock_anaconda_api_calls(rsps, "bpipe", "0.9.11--hdfd78af_0") @@ -46,15 +45,34 @@ def create_modules_repo_dummy(tmp_dir): module_create.create() # Remove doi from meta.yml which makes lint fail - meta_yml = os.path.join(root_dir, "modules", "nf-core", "bpipe", "test", "meta.yml") - with open(meta_yml, "r") as fh: - lines = fh.readlines() - for line_index in range(len(lines)): - if "doi" in lines[line_index]: - to_pop = line_index - lines.pop(to_pop) - with open(meta_yml, "w") as fh: - fh.writelines(lines) + meta_yml_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "meta.yml") + + with open(meta_yml_path, "r") as fh: + meta_yml = yaml.safe_load(fh) + del meta_yml["tools"][0]["bpipe"]["doi"] + with open(meta_yml_path, "w") as fh: + yaml.dump(meta_yml, fh) + # Add dummy content to main.nf.test.snap + test_snap_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap") + test_snap_path.touch() + with open(test_snap_path, "w") as fh: + fh.write('{\n "my test": {}\n}') + + # remove "TODO" statements from main.nf + main_nf_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "main.nf") + with open(main_nf_path, "r") as fh: + main_nf = fh.read() + main_nf = main_nf.replace("TODO", "") + with open(main_nf_path, "w") as fh: + fh.write(main_nf) + + # remove "TODO" statements from main.nf.test + main_nf_test_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test") + with open(main_nf_test_path, "r") as fh: + main_nf_test = fh.read() + main_nf_test = main_nf_test.replace("TODO", "") + with open(main_nf_test_path, "w") as fh: + fh.write(main_nf_test) return root_dir @@ -64,17 +82,10 @@ class TestModules(unittest.TestCase): def setUp(self): """Create a new PipelineSchema and Launch objects""" - self.tmp_dir = tempfile.mkdtemp() self.component_type = "modules" # Set up the schema - root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") - self.pipeline_name = "mypipeline" - self.pipeline_dir = os.path.join(self.tmp_dir, self.pipeline_name) - nf_core.create.PipelineCreate( - self.pipeline_name, "it is mine", "me", no_git=True, outdir=self.pipeline_dir, plain=True - ).init_pipeline() + self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() # Set up install objects self.mods_install = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=False, force=True) self.mods_install_old = nf_core.modules.ModuleInstall( @@ -135,32 +146,27 @@ def test_modulesrepo_class(self): # Test of the individual modules commands. # ############################################ - from .modules.bump_versions import ( + from .modules.bump_versions import ( # type: ignore[misc] test_modules_bump_versions_all_modules, test_modules_bump_versions_fail, test_modules_bump_versions_fail_unknown_version, test_modules_bump_versions_single_module, ) - from .modules.create import ( + from .modules.create import ( # type: ignore[misc] test_modules_create_fail_exists, test_modules_create_nfcore_modules, test_modules_create_nfcore_modules_subtool, test_modules_create_succeed, + test_modules_migrate, + test_modules_migrate_no_delete, ) - from .modules.create_test_yml import ( - test_modules_create_test_yml_check_inputs, - test_modules_create_test_yml_entry_points, - test_modules_create_test_yml_get_md5, - test_modules_custom_yml_dumper, - test_modules_test_file_dict, - ) - from .modules.info import ( + from .modules.info import ( # type: ignore[misc] test_modules_info_in_modules_repo, test_modules_info_local, test_modules_info_remote, test_modules_info_remote_gitlab, ) - from .modules.install import ( + from .modules.install import ( # type: ignore[misc] test_modules_install_alternate_remote, test_modules_install_different_branch_fail, test_modules_install_different_branch_succeed, @@ -172,24 +178,45 @@ def test_modulesrepo_class(self): test_modules_install_trimgalore, test_modules_install_trimgalore_twice, ) - from .modules.lint import ( + from .modules.lint import ( # type: ignore[misc] + test_modules_environment_yml_file_doesnt_exists, + test_modules_environment_yml_file_name_mismatch, + test_modules_environment_yml_file_not_array, + test_modules_environment_yml_file_sorted_correctly, + test_modules_environment_yml_file_sorted_incorrectly, + test_modules_incorrect_tags_yml_key, + test_modules_incorrect_tags_yml_values, test_modules_lint_check_process_labels, + test_modules_lint_check_url, test_modules_lint_empty, test_modules_lint_gitlab_modules, test_modules_lint_multiple_remotes, test_modules_lint_new_modules, test_modules_lint_no_gitlab, test_modules_lint_patched_modules, + test_modules_lint_snapshot_file, + test_modules_lint_snapshot_file_missing_fail, + test_modules_lint_snapshot_file_not_needed, test_modules_lint_trimgalore, + test_modules_meta_yml_incorrect_licence_field, + test_modules_meta_yml_incorrect_name, + test_modules_meta_yml_input_mismatch, + test_modules_meta_yml_output_mismatch, + test_modules_missing_required_tag, + test_modules_missing_tags_yml, + test_modules_missing_test_dir, + test_modules_missing_test_main_nf, + test_modules_unused_pytest_files, + test_nftest_failing_linting, ) - from .modules.list import ( + from .modules.list import ( # type: ignore[misc] test_modules_install_and_list_pipeline, test_modules_install_gitlab_and_list_pipeline, test_modules_list_pipeline, test_modules_list_remote, test_modules_list_remote_gitlab, ) - from .modules.modules_json import ( + from .modules.modules_json import ( # type: ignore[misc] test_get_modules_json, test_mod_json_create, test_mod_json_create_with_patch, @@ -204,12 +231,7 @@ def test_modulesrepo_class(self): test_mod_json_with_empty_modules_value, test_mod_json_with_missing_modules_entry, ) - from .modules.modules_test import ( - test_modules_test_check_inputs, - test_modules_test_no_installed_modules, - test_modules_test_no_name_no_prompts, - ) - from .modules.patch import ( + from .modules.patch import ( # type: ignore[misc] test_create_patch_change, test_create_patch_no_change, test_create_patch_try_apply_failed, @@ -218,12 +240,12 @@ def test_modulesrepo_class(self): test_create_patch_update_success, test_remove_patch, ) - from .modules.remove import ( + from .modules.remove import ( # type: ignore[misc] test_modules_remove_multiqc_from_gitlab, test_modules_remove_trimgalore, test_modules_remove_trimgalore_uninstalled, ) - from .modules.update import ( + from .modules.update import ( # type: ignore[misc] test_install_and_update, test_install_at_hash_and_update, test_install_at_hash_and_update_and_save_diff_to_file, diff --git a/tests/test_schema.py b/tests/test_schema.py index d3b4fda817..105cd9473e 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -47,7 +47,7 @@ def test_load_lint_schema(self): def test_load_lint_schema_nofile(self): """Check that linting raises properly if a non-existant file is given""" - with pytest.raises(AssertionError): + with pytest.raises(RuntimeError): self.schema_obj.get_schema_path("fake_file") def test_load_lint_schema_notjson(self): diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 1c290cb882..19872ee168 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -3,10 +3,8 @@ import os import shutil -import tempfile import unittest - -import responses +from pathlib import Path import nf_core.create import nf_core.modules @@ -17,28 +15,30 @@ GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL, OLD_SUBWORKFLOWS_SHA, + create_tmp_pipeline, ) def create_modules_repo_dummy(tmp_dir): """Create a dummy copy of the nf-core/modules repo""" - root_dir = os.path.join(tmp_dir, "modules") - os.makedirs(os.path.join(root_dir, "modules")) - os.makedirs(os.path.join(root_dir, "subworkflows")) - os.makedirs(os.path.join(root_dir, "subworkflows", "nf-core")) - os.makedirs(os.path.join(root_dir, "tests", "modules")) - os.makedirs(os.path.join(root_dir, "tests", "subworkflows")) - os.makedirs(os.path.join(root_dir, "tests", "config")) - with open(os.path.join(root_dir, "tests", "config", "pytest_modules.yml"), "w") as fh: - fh.writelines(["test:", "\n - modules/test/**", "\n - tests/modules/test/**"]) - with open(os.path.join(root_dir, ".nf-core.yml"), "w") as fh: + root_dir = Path(tmp_dir, "modules") + Path(root_dir, "modules").mkdir(parents=True, exist_ok=True) + Path(root_dir, "subworkflows").mkdir(parents=True, exist_ok=True) + Path(root_dir, "subworkflows", "nf-core").mkdir(parents=True, exist_ok=True) + Path(root_dir, "tests", "config").mkdir(parents=True, exist_ok=True) + with open(Path(root_dir, ".nf-core.yml"), "w") as fh: fh.writelines(["repository_type: modules", "\n", "org_path: nf-core", "\n"]) - # TODO Add a mock here subworkflow_create = nf_core.subworkflows.SubworkflowCreate(root_dir, "test_subworkflow", "@author", True) subworkflow_create.create() + # Add dummy content to main.nf.test.snap + test_snap_path = Path(root_dir, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap") + test_snap_path.touch() + with open(test_snap_path, "w") as fh: + fh.write('{\n "my test": {}\n}') + return root_dir @@ -47,18 +47,10 @@ class TestSubworkflows(unittest.TestCase): def setUp(self): """Create a new PipelineStructure and Launch objects""" - self.tmp_dir = tempfile.mkdtemp() self.component_type = "subworkflows" # Set up the pipeline structure - root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") - self.pipeline_name = "mypipeline" - self.pipeline_dir = os.path.join(self.tmp_dir, self.pipeline_name) - nf_core.create.PipelineCreate( - self.pipeline_name, "it is mine", "me", no_git=True, outdir=self.pipeline_dir, plain=True - ).init_pipeline() - + self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() # Set up the nf-core/modules repo dummy self.nfcore_modules = create_modules_repo_dummy(self.tmp_dir) @@ -101,25 +93,20 @@ def tearDown(self): # Test of the individual subworkflow commands. # ################################################ - from .subworkflows.create import ( + from .subworkflows.create import ( # type: ignore[misc] test_subworkflows_create_fail_exists, test_subworkflows_create_nfcore_modules, test_subworkflows_create_succeed, + test_subworkflows_migrate, + test_subworkflows_migrate_no_delete, ) - from .subworkflows.create_test_yml import ( - test_subworkflows_create_test_yml_check_inputs, - test_subworkflows_create_test_yml_entry_points, - test_subworkflows_create_test_yml_get_md5, - test_subworkflows_custom_yml_dumper, - test_subworkflows_test_file_dict, - ) - from .subworkflows.info import ( + from .subworkflows.info import ( # type: ignore[misc] test_subworkflows_info_in_modules_repo, test_subworkflows_info_local, test_subworkflows_info_remote, test_subworkflows_info_remote_gitlab, ) - from .subworkflows.install import ( + from .subworkflows.install import ( # type: ignore[misc] test_subworkflow_install_nopipeline, test_subworkflows_install_alternate_remote, test_subworkflows_install_bam_sort_stats_samtools, @@ -132,24 +119,30 @@ def tearDown(self): test_subworkflows_install_tracking_added_already_installed, test_subworkflows_install_tracking_added_super_subworkflow, ) - from .subworkflows.list import ( + from .subworkflows.lint import ( # type: ignore[misc] + test_subworkflows_lint, + test_subworkflows_lint_empty, + test_subworkflows_lint_gitlab_subworkflows, + test_subworkflows_lint_multiple_remotes, + test_subworkflows_lint_new_subworkflow, + test_subworkflows_lint_no_gitlab, + test_subworkflows_lint_snapshot_file, + test_subworkflows_lint_snapshot_file_missing_fail, + test_subworkflows_lint_snapshot_file_not_needed, + ) + from .subworkflows.list import ( # type: ignore[misc] test_subworkflows_install_and_list_subworkflows, test_subworkflows_install_gitlab_and_list_subworkflows, test_subworkflows_list_remote, test_subworkflows_list_remote_gitlab, ) - from .subworkflows.remove import ( + from .subworkflows.remove import ( # type: ignore[misc] test_subworkflows_remove_included_subworkflow, test_subworkflows_remove_one_of_two_subworkflow, test_subworkflows_remove_subworkflow, test_subworkflows_remove_subworkflow_keep_installed_module, ) - from .subworkflows.subworkflows_test import ( - test_subworkflows_test_check_inputs, - test_subworkflows_test_no_installed_subworkflows, - test_subworkflows_test_no_name_no_prompts, - ) - from .subworkflows.update import ( + from .subworkflows.update import ( # type: ignore[misc] test_install_and_update, test_install_at_hash_and_update, test_install_at_hash_and_update_and_save_diff_to_file, diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index c4e3d49ae0..154a31fca6 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -3,7 +3,7 @@ import pytest -from .utils import set_wd, with_temporary_file, with_temporary_folder +from .utils import with_temporary_file, with_temporary_folder def test_with_temporary_file(): @@ -30,20 +30,3 @@ def tmp_folder_exists(tmp_folder): def test_tmp_folder_does_not_exist_after(): tmp_folder = with_temporary_folder(lambda x: x)() assert not Path(tmp_folder).exists() - - -def test_set_wd(): - with tempfile.TemporaryDirectory() as tmpdirname: - with set_wd(tmpdirname): - context_wd = Path().resolve() - assert context_wd == Path(tmpdirname).resolve() - assert context_wd != Path().resolve() - - -def test_set_wd_revert_on_raise(): - wd_before_context = Path().resolve() - with tempfile.TemporaryDirectory() as tmpdirname: - with pytest.raises(Exception): - with set_wd(tmpdirname): - raise Exception - assert wd_before_context == Path().resolve() diff --git a/tests/test_utils.py b/tests/test_utils.py index 2ab5b64bfc..90d1886dbd 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -175,13 +175,13 @@ def test_get_repo_releases_branches_nf_core(self): def test_get_repo_releases_branches_not_nf_core(self): wfs = nf_core.list.Workflows() wfs.get_remote_workflows() - pipeline, wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches("ewels/MultiQC", wfs) + pipeline, wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches("MultiQC/MultiQC", wfs) for r in wf_releases: if r.get("tag_name") == "v1.10": break else: raise AssertionError("MultiQC release v1.10 not found") - assert "master" in wf_branches.keys() + assert "main" in wf_branches.keys() def test_get_repo_releases_branches_not_exists(self): wfs = nf_core.list.Workflows() @@ -207,3 +207,34 @@ def test_validate_file_md5(): nf_core.utils.validate_file_md5(test_file, different_md5) with pytest.raises(ValueError): nf_core.utils.validate_file_md5(test_file, non_hex_string) + + +def test_nested_setitem(): + d = {"a": {"b": {"c": "value"}}} + nf_core.utils.nested_setitem(d, ["a", "b", "c"], "value new") + assert d["a"]["b"]["c"] == "value new" + assert d == {"a": {"b": {"c": "value new"}}} + + +def test_nested_delitem(): + d = {"a": {"b": {"c": "value"}}} + nf_core.utils.nested_delitem(d, ["a", "b", "c"]) + assert "c" not in d["a"]["b"] + assert d == {"a": {"b": {}}} + + +def test_set_wd(): + with tempfile.TemporaryDirectory() as tmpdirname: + with nf_core.utils.set_wd(tmpdirname): + context_wd = Path().resolve() + assert context_wd == Path(tmpdirname).resolve() + assert context_wd != Path().resolve() + + +def test_set_wd_revert_on_raise(): + wd_before_context = Path().resolve() + with tempfile.TemporaryDirectory() as tmpdirname: + with pytest.raises(Exception): + with nf_core.utils.set_wd(tmpdirname): + raise Exception + assert wd_before_context == Path().resolve() diff --git a/tests/utils.py b/tests/utils.py index d39d172a66..198ac3d583 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -7,9 +7,11 @@ import tempfile from contextlib import contextmanager from pathlib import Path +from typing import Any, Callable, Generator, Tuple import responses +import nf_core.create import nf_core.modules OLD_TRIMGALORE_SHA = "9b7a3bdefeaad5d42324aa7dd50f87bea1b04386" @@ -25,9 +27,10 @@ GITLAB_BRANCH_ORG_PATH_BRANCH = "org-path" GITLAB_BRANCH_TEST_OLD_SHA = "e772abc22c1ff26afdf377845c323172fb3c19ca" GITLAB_BRANCH_TEST_NEW_SHA = "7d73e21f30041297ea44367f2b4fd4e045c0b991" +GITLAB_NFTEST_BRANCH = "nf-test-tests" -def with_temporary_folder(func): +def with_temporary_folder(func: Callable[..., Any]) -> Callable[..., Any]: """ Call the decorated function under the tempfile.TemporaryDirectory context manager. Pass the temporary directory name to the decorated @@ -35,46 +38,28 @@ def with_temporary_folder(func): """ @functools.wraps(func) - def wrapper(*args, **kwargs): + def wrapper(*args: Any, **kwargs: Any) -> Any: with tempfile.TemporaryDirectory() as tmpdirname: return func(*args, tmpdirname, **kwargs) return wrapper -def with_temporary_file(func): +def with_temporary_file(func: Callable[..., Any]) -> Callable[..., Any]: """ Call the decorated function under the tempfile.NamedTemporaryFile context manager. Pass the opened file handle to the decorated function """ @functools.wraps(func) - def wrapper(*args, **kwargs): + def wrapper(*args: Any, **kwargs: Any) -> Any: with tempfile.NamedTemporaryFile() as tmpfile: return func(*args, tmpfile, **kwargs) return wrapper -@contextmanager -def set_wd(path: Path): - """Sets the working directory for this context. - - Arguments - --------- - - path : Path - Path to the working directory to be used iside this context. - """ - start_wd = Path().absolute() - os.chdir(path) - try: - yield - finally: - os.chdir(start_wd) - - -def mock_anaconda_api_calls(rsps: responses.RequestsMock, module, version): +def mock_anaconda_api_calls(rsps: responses.RequestsMock, module: str, version: str) -> None: """Mock anaconda api calls for module""" anaconda_api_url = f"https://api.anaconda.org/package/bioconda/{module}" anaconda_mock = { @@ -83,12 +68,12 @@ def mock_anaconda_api_calls(rsps: responses.RequestsMock, module, version): "doc_url": "http://test", "dev_url": "http://test", "files": [{"version": version.split("--")[0]}], - "license": "", + "license": "MIT", } rsps.get(anaconda_api_url, json=anaconda_mock, status=200) -def mock_biocontainers_api_calls(rsps: responses.RequestsMock, module, version): +def mock_biocontainers_api_calls(rsps: responses.RequestsMock, module: str, version: str) -> None: """Mock biocontainers api calls for module""" biocontainers_api_url = ( f"https://api.biocontainers.pro/ga4gh/trs/v2/tools/{module}/versions/{module}-{version.split('--')[0]}" @@ -108,3 +93,20 @@ def mock_biocontainers_api_calls(rsps: responses.RequestsMock, module, version): ], } rsps.get(biocontainers_api_url, json=biocontainers_mock, status=200) + + +def create_tmp_pipeline() -> Tuple[str, str, str, str]: + """Create a new Pipeline for testing""" + + tmp_dir = tempfile.mkdtemp() + root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") + pipeline_name = "mypipeline" + pipeline_dir = os.path.join(tmp_dir, pipeline_name) + + nf_core.create.PipelineCreate( + pipeline_name, "it is mine", "me", no_git=True, outdir=pipeline_dir, plain=True + ).init_pipeline() + + # return values to instance variables for later use in test methods + return tmp_dir, template_dir, pipeline_name, pipeline_dir