diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index f2e172926e..6f816f9735 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -38,7 +38,7 @@ The HTML will then be generated in `docs/api/_build/html`. ## Tests -When you create a pull request with changes, [Travis CI](https://travis-ci.org/) will run automatic tests. +When you create a pull request with changes, [Travis CI](https://travis-ci.com/) will run automatic tests. Typically, pull-requests are only fully reviewed when these tests are passing, though of course we can help out before then. There are two types of tests that run: diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 487fdf62e5..9ad9e59a74 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,4 +1,4 @@ -Many thanks to contributing to nf-core/tools! +Many thanks for contributing to nf-core/tools! Please fill in the appropriate checklist below (delete whatever is not relevant). These are the most common things requested on pull requests (PRs). diff --git a/.github/markdownlint.yml b/.github/markdownlint.yml index 9e84d0e784..c6b3f58f08 100644 --- a/.github/markdownlint.yml +++ b/.github/markdownlint.yml @@ -1,10 +1,7 @@ # Markdownlint configuration file default: true, line-length: false -no-multiple-blanks: 0 -blanks-around-headers: false -blanks-around-lists: false -header-increment: false no-duplicate-header: siblings_only: true no-bare-urls: false # tools only - the {{ jinja variables }} break URLs and cause this to error +commands-show-output: false # tools only - suppresses error messages for usage of $ in main README diff --git a/.travis.yml b/.travis.yml index 70d4a9547d..5508abc91b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,7 +2,6 @@ sudo: required language: python jdk: openjdk8 python: - - '2.7' - '3.5' - '3.6' - '3.7' @@ -42,7 +41,7 @@ jobs: include: - stage: docs generation if: (branch = "master" OR branch = "dev") AND type = push AND repo = nf-core/tools - script: bash ./bin/push.sh + script: bash ./bin/build_api_docs.sh deploy: provider: pypi @@ -57,4 +56,4 @@ deploy: # Sync pipelines with possible nf-core template changes after_deploy: - - ./bin/sync + - nf-core sync --all diff --git a/CHANGELOG.md b/CHANGELOG.md index cf38320a15..e2e334c0e1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,77 @@ # nf-core/tools: Changelog -## v1.7 +## v1.8 -### PyPI package description +### Continuous integration -* The readme should now be rendered properly on PyPI. +* GitHub Actions CI workflows are now included in the template pipeline + * Please update these files to match the existing tests that you have in `.travis.yml` +* Travis CI tests will be deprecated from the next `tools` release +* Linting will generate a warning if GitHub Actions workflows do not exist and if applicable to remove Travis CI workflow file i.e. `.travis.yml`. + +### Tools helper code + +* Refactored the template synchronisation code to be part of the main nf-core tool +* `nf-core bump-version` now also bumps the version string of the exported conda environment in the Dockerfile +* Updated Blacklist of synced pipelines +* Ignore pre-releases in `nf-core list` +* Updated documentation for `nf-core download` +* Fixed typo in `nf-core launch` final command +* Handle missing pipeline descriptions in `nf-core list` + +### Linting + +* Adjusted linting to enable `patch` branches from being tested +* Warn if GitHub Actions workflows do not exist, warn if `.travis.yml` and circleCI are there +* Lint for `Singularity` file and raise error if found [#458](https://github.com/nf-core/tools/issues/458) +* Added linting of GitHub Actions workflows `linting.yml`, `ci.yml` and `branch.yml` +* Warn if pipeline name contains upper case letters or non alphabetical characters [#85](https://github.com/nf-core/tools/issues/85) +* Make CI tests of lint code pass for releases + +### Template pipeline + +* Fixed incorrect paths in iGenomes config as described in issue [#418](https://github.com/nf-core/tools/issues/418) +* Fixed incorrect usage of non-existent parameter in the template [#446](https://github.com/nf-core/tools/issues/446) +* Add UCSC genomes to `igenomes.config` and add paths to all genome indices +* Change `maxMultiqcEmailFileSize` parameter to `max_multiqc_email_size` +* Export conda environment in Docker file [#349](https://github.com/nf-core/tools/issues/349) +* Change remaining parameters from `camelCase` to `snake_case` [#39](https://github.com/nf-core/hic/issues/39) + * `--singleEnd` to `--single_end` + * `--igenomesIgnore` to `--igenomes_ignore` + * Having the old camelCase versions of these will now throw an error +* Add `autoMounts=true` to default singularity profile +* Add in `markdownlint` checks that were being ignored by default +* Disable ansi logging in the travis CI tests +* Move `params`section from `base.config` to `nextflow.config` +* Use `env` scope to export `PYTHONNOUSERSITE` in `nextflow.config` to prevent conflicts with host Python environment +* Bump minimum Nextflow version to `19.10.0` - required to properly use `env` scope in `nextflow.config` +* Added support for nf-tower in the travis tests, using public mailbox nf-core@mailinator.com +* Add link to [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and [Semantic Versioning](http://semver.org/spec/v2.0.0.html) to CHANGELOG +* Adjusted `.travis.yml` checks to allow for `patch` branches to be tested +* Add Python 3.7 dependency to the `environment.yml` file +* Remove `awsbatch` profile cf [nf-core/configs#71](https://github.com/nf-core/configs/pull/71) +* Make `scrape_software_versions.py` compatible with Python3 to enable miniconda3 in [base image PR](https://github.com/nf-core/tools/pull/462) +* Add GitHub Actions workflows and respective linting +* Add `NXF_ANSI_LOG` as global environment variable to template GitHub Actions CI workflow +* Fixed global environment variable in GitHub Actions CI workflow +* Add `--awscli` parameter +* Add `README.txt` path for genomes in `igenomes.config` [nf-core/atacseq#75](https://github.com/nf-core/atacseq/issues/75) +* Fix buggy ANSI codes in pipeline summary log messages +* Add a `TODO` line in the new GitHub Actions CI test files + +### Base Docker image + +* Use miniconda3 instead of miniconda for a Python 3k base environment + * If you still need Python 2 for your pipeline, add `conda-forge::python=2.7.4` to the dependencies in your `environment.yml` +* Update conda version to 4.7.12 + +### Other + +* Updated Base Dockerfile to Conda 4.7.10 +* Entirely switched from Travis-Ci.org to Travis-Ci.com for template and tools +* Improved core documentation (`-profile`) + +## v1.7 ### Tools helper code @@ -17,6 +84,7 @@ * When listing pipelines, a nicer message is given for the rare case of a detached `HEAD` ref in a locally pulled pipeline. [#297](https://github.com/nf-core/tools/issues/297) * The `download` command can now compress files into a single archive. * `nf-core create` now fetches a logo for the pipeline from the nf-core website +* The readme should now be rendered properly on PyPI. ### Syncing @@ -29,7 +97,7 @@ * If the container slug does not contain the nf-core organisation (for example during development on a fork), linting will raise a warning, and an error with release mode on -### Template +### Template pipeline * Add new code for Travis CI to allow PRs from patch branches too * Fix small typo in central readme of tools for future releases @@ -46,6 +114,7 @@ an AWS S3 bucket as the `--outdir`. * Fix workflow.onComplete() message when finishing pipeline * Update URL for joining the nf-core slack to https://nf-co.re/join/slack +* Add GitHub Action for CI and Linting * [Increased default time limit](https://github.com/nf-core/tools/issues/370) to 4h * Add direct link to the pipeline slack channel in the contribution guidelines * Add contributions and support heading with links to contribution guidelines and link to the pipeline slack channel in the main README @@ -217,7 +286,7 @@ Very large release containing lots of work from the first nf-core hackathon, hel * New pipelines are now created using the command `nf-core create` * The nf-core template and associated linting are now controlled under the same version system * Large number of template updates and associated linting changes - * New simplified cookicutter variable usage + * New simplified cookiecutter variable usage * Refactored documentation - simplified and reduced duplication * Better `manifest` variables instead of `params` for pipeline name and version * New integrated nextflow version checking diff --git a/Dockerfile b/Dockerfile index 7e3babe4bb..f8b138e9ed 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ -FROM continuumio/miniconda:4.6.14 +FROM continuumio/miniconda3:4.7.12 LABEL authors="phil.ewels@scilifelab.se,alexander.peltzer@qbic.uni-tuebingen.de" \ description="Docker image containing base requirements for the nfcore pipelines" # Install procps so that Nextflow can poll CPU usage -RUN apt-get update && apt-get install -y procps && apt-get clean -y \ No newline at end of file +RUN apt-get update && apt-get install -y procps && apt-get clean -y diff --git a/README.md b/README.md index 4403afcce3..33ec8dd467 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # ![nf-core/tools](docs/images/nfcore-tools_logo.png) -[![Build Status](https://travis-ci.org/nf-core/tools.svg?branch=master)](https://travis-ci.org/nf-core/tools) +[![Build Status](https://travis-ci.com/nf-core/tools.svg?branch=master)](https://travis-ci.com/nf-core/tools) [![codecov](https://codecov.io/gh/nf-core/tools/branch/master/graph/badge.svg)](https://codecov.io/gh/nf-core/tools) [![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg?style=flat-square)](http://bioconda.github.io/recipes/nf-core/README.html) @@ -16,7 +16,7 @@ A python package with helper tools for the nf-core community. * [`nf-core create` - Create a new workflow from the nf-core template](#creating-a-new-workflow) * [`nf-core lint` - Check pipeline code against nf-core guidelines](#linting-a-workflow) * [`nf-core bump-version` - Update nf-core pipeline version number](#bumping-a-pipeline-version-number) - +* [`nf-core sync` - Synchronise pipeline TEMPLATE branches](#sync-a-pipeline-with-the-template) The nf-core tools package is written in Python and can be imported and used within other packages. For documentation of the internal Python functions, please refer to the [Tools Python API docs](https://nf-co.re/tools-docs/). @@ -56,6 +56,7 @@ pip install -e . ``` ## Listing pipelines + The command `nf-core list` shows all available nf-core pipelines along with their latest version, when that was published and how recently the pipeline code was pulled to your local system (if at all). An example of the output from the command is as follows: @@ -139,6 +140,7 @@ nf-core/mag 8 dev - - Finally, to return machine-readable JSON output, use the `--json` flag. ## Launch a pipeline + Some nextflow pipelines have a considerable number of command line flags that can be used. To help with this, the `nf-core launch` command uses an interactive command-line wizard tool to prompt you for values for running nextflow and the pipeline parameters. @@ -190,7 +192,7 @@ Specify the location of your input FastQ files. [..truncated..] Nextflow command: - nextflow run nf-core/rnaseq -profile "docker" -name "test_run" -r "1.3" --params-file "/Users/ewels/testing/nfx-params.json" + nextflow run nf-core/rnaseq -profile "docker" -name "test_run" -r "1.3" -params-file "/Users/ewels/testing/nfx-params.json" Do you want to run this command now? [y/N]: y @@ -202,16 +204,19 @@ Launching `nf-core/rnaseq` [evil_engelbart] - revision: 37f260d360 [master] [..truncated..] ``` - ## Downloading pipelines for offline use + Sometimes you may need to run an nf-core pipeline on a server or HPC system that has no internet connection. In this case you will need to fetch the pipeline files first, then manually transfer them to your system. To make this process easier and ensure accurate retrieval of correctly versioned code and software containers, we have written a download helper tool. Simply specify the name of the nf-core pipeline and it will be downloaded to your current working directory. -By default, the pipeline will just download the pipeline code. If you specify the flag `--singularity`, it will also download any singularity image files that are required. +By default, the pipeline will download the pipeline code and the [institutional nf-core/configs](https://github.com/nf-core/configs) files. +If you specify the flag `--singularity`, it will also download any singularity image files that are required. + +Use `-r`/`--release` to download a specific release of the pipeline. If not specified, the tool will automatically fetch the latest release. ```console -$ nf-core download methylseq --singularity +$ nf-core download methylseq -r 1.4 --singularity ,--./,-. ___ __ __ __ ___ /,-._.--~\ @@ -221,40 +226,87 @@ $ nf-core download methylseq --singularity INFO: Saving methylseq - Pipeline release: 1.0 + Pipeline release: 1.4 Pull singularity containers: Yes - Output directory: nf-core-methylseq-1.0 + Output file: nf-core-methylseq-1.4.tar.gz INFO: Downloading workflow files from GitHub +INFO: Downloading centralised configs from GitHub + INFO: Downloading 1 singularity container -nf-core-methylseq-1.0.simg [762.28MB] [####################################] 780573/780572 + +INFO: Building singularity image from dockerhub: docker://nfcore/methylseq:1.4 +INFO: Converting OCI blobs to SIF format +INFO: Starting build... +Getting image source signatures +.... +INFO: Creating SIF file... +INFO: Build complete: /my-pipelines/nf-core-methylseq-1.4/singularity-images/nf-core-methylseq-1.4.simg + +INFO: Compressing download.. + +INFO: Command to extract files: tar -xzf nf-core-methylseq-1.4.tar.gz + +INFO: MD5 checksum for nf-core-methylseq-1.4.tar.gz: f5c2b035619967bb227230bc3ec986c5 ``` -```console -$ tree -L 2 nf-core-methylseq-1.0/ +The tool automatically compresses all of the resulting file in to a `.tar.gz` archive. +You can choose other formats (`.tar.bz2`, `zip`) or to not compress (`none`) with the `-c`/`--compress` flag. +The console output provides the command you need to extract the files. + +Once uncompressed, you will see the following file structure for the downloaded pipeline: -nf-core-methylseq-1.0/ +```console +$ tree -L 2 nf-core-methylseq-1.4/ + +nf-core-methylseq-1.4 +├── configs +│   ├── bin +│   ├── conf +│   ├── configtest.nf +│   ├── docs +│   ├── LICENSE +│   ├── nextflow.config +│   ├── nfcore_custom.config +│   └── README.md ├── singularity-images -│   └── nf-core-methylseq-1.0.simg +│   └── nf-core-methylseq-1.4.simg └── workflow - ├── CHANGELOG.md - ├── Dockerfile - ├── LICENCE.md - ├── README.md ├── assets ├── bin + ├── CHANGELOG.md + ├── CODE_OF_CONDUCT.md ├── conf + ├── Dockerfile ├── docs ├── environment.yml + ├── LICENSE ├── main.nf ├── nextflow.config - └── tests + ├── parameters.settings.json + └── README.md -7 directories, 8 files +10 directories, 15 files +``` + +The pipeline files are automatically updated so that the local copy of institutional configs are available when running the pipeline. +So using `-profile ` should work if available within [nf-core/configs](https://github.com/nf-core/configs). + +You can run the pipeline by simply providing the directory path for the `workflow` folder. +Note that if using Singularity, you will also need to provide the path to the Singularity image. +For example: + +```bash +nextflow run /path/to/nf-core-methylseq-1.4/workflow/ \ + -profile singularity \ + -with-singularity /path/to/nf-core-methylseq-1.4/singularity-images/nf-core-methylseq-1.4.simg \ + # .. other normal pipeline parameters from here on.. + --reads '*_R{1,2}.fastq.gz' --genome GRCh38 ``` ## Pipeline software licences + Sometimes it's useful to see the software licences of the tools used in a pipeline. You can use the `licences` subcommand to fetch and print the software licence from each conda / PyPI package used in an nf-core pipeline. ```console @@ -292,6 +344,7 @@ samtools 1.8 MIT ``` ## Creating a new workflow + The `create` subcommand makes a new workflow using the nf-core base template. With a given pipeline name, description and author, it makes a starter pipeline which follows nf-core best practices. @@ -335,8 +388,8 @@ Please see the [nf-core documentation](https://nf-co.re/developers/adding_pipeli Note that if the required arguments for `nf-core create` are not given, it will interactively prompt for them. If you prefer, you can supply them as command line arguments. See `nf-core create --help` for more information. - ## Linting a workflow + The `lint` subcommand checks a given pipeline for all nf-core community guidelines. This is the same test that is used on the automated continuous integration tests. @@ -366,7 +419,6 @@ WARNING: Test Warnings: You can find extensive documentation about each of the lint tests in the [lint errors documentation](https://nf-co.re/errors). - ## Bumping a pipeline version number When releasing a new version of a nf-core pipeline, version numbers have to be updated in several different places. The helper command `nf-core bump-version` automates this for you to avoid manual errors (and frustration!). @@ -403,32 +455,103 @@ INFO: Updating version in nextflow.config + version = '1.0' INFO: Updating version in nextflow.config - - container = 'nfcore/mypipeline:dev' - + container = 'nfcore/mypipeline:1.0' + - process.container = 'nfcore/mypipeline:dev' + + process.container = 'nfcore/mypipeline:1.0' INFO: Updating version in .travis.yml - - docker tag nfcore/mypipeline:dev nfcore/mypipeline:latest - + docker tag nfcore/mypipeline:dev nfcore/mypipeline:1.0 - -INFO: Updating version in Singularity - - VERSION 1.0dev - + VERSION 1.0 + - - docker tag nfcore/mypipeline:dev nfcore/mypipeline:dev + + - docker tag nfcore/mypipeline:dev nfcore/mypipeline:1.0 INFO: Updating version in environment.yml - name: nf-core-mypipeline-1.0dev + name: nf-core-mypipeline-1.0 INFO: Updating version in Dockerfile - - PATH /opt/conda/envs/nf-core-mypipeline-1.0dev/bin:$PATH - + PATH /opt/conda/envs/nf-core-mypipeline-1.0/bin:\$PATH - -INFO: Updating version in Singularity - - PATH=/opt/conda/envs/nf-core-mypipeline-1.0dev/bin:$PATH - + PATH=/opt/conda/envs/nf-core-mypipeline-1.0/bin:\$PATH + - RUN conda env export --name nf-core-mypipeline-1.0dev > nf-core-mypipeline-1.0dev.yml + - ENV PATH /opt/conda/envs/nf-core-mypipeline-1.0dev/bin:$PATH + + RUN conda env export --name nf-core-mypipeline-1.0 > nf-core-mypipeline-1.0.yml + + ENV PATH /opt/conda/envs/nf-core-mypipeline-1.0/bin:$PATH ``` To change the required version of Nextflow instead of the pipeline version number, use the flag `--nextflow`. +## Sync a pipeline with the template + +Over time, the main nf-core pipeline template is updated. To keep all nf-core pipelines up to date, +we synchronise these updates automatically when new versions of nf-core/tools are released. +This is done by maintaining a special `TEMPLATE` branch, containing a vanilla copy of the nf-core template +with only the variables used when it first ran (name, description etc.). This branch is updated and a +pull-request can be made with just the updates from the main template code. + +This command takes a pipeline directory and attempts to run this synchronisation. +Usage is `nf-core sync `, eg: + +```console +$ nf-core sync my_pipeline/ + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + +INFO: Pipeline directory: /path/to/my_pipeline + +INFO: Fetching workflow config variables + +INFO: Deleting all files in TEMPLATE branch + +INFO: Making a new template pipeline using pipeline variables + +INFO: Committed changes to TEMPLATE branch + +INFO: Now try to merge the updates in to your pipeline: + cd /path/to/my_pipeline + git merge TEMPLATE +``` + +If your pipeline repository does not already have a `TEMPLATE` branch, you can instruct +the command to try to create one by giving the `--make-template-branch` flag. +If it has to, the sync tool will then create an orphan branch - see the +[nf-core website sync documentation](https://nf-co.re/developers/sync) for details on +how to handle this. + +By default, the tool will collect workflow variables from the current branch in your +pipeline directory. You can supply the `--from-branch` flag to specific a different branch. + +Finally, if you give the `--pull-request` flag, the command will push any changes to the remote +and attempt to create a pull request using the GitHub API. The GitHub username and repository +name will be fetched from the remote url (see `git remote -v | grep origin`), or can be supplied +with `--username` and `--repository`. + +To create the pull request, a personal access token is required for API authentication. +These can be created at [https://github.com/settings/tokens](https://github.com/settings/tokens). +Supply this using the `--auth-token` flag, or setting it as the environment variable `NF_CORE_BOT`: +`export NF_CORE_BOT=my_auth_token`. + +Finally, if `--all` is supplied, then the command attempts to pull and synchronise all nf-core workflows. +This is used by the nf-core/tools release automation to synchronise all nf-core pipelines +with the newest version of the template. It requires authentication as either the nf-core-bot account +or as an nf-core administrator. + +```console +$ nf-core sync --all + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + +INFO: Syncing nf-core/ampliseq + +[...] + +INFO: Successfully synchronised [n] pipelines +``` + ## Citation If you use `nf-core tools` in your work, please cite the `nf-core` preprint as follows: diff --git a/bin/blacklist.json b/bin/blacklist.json deleted file mode 100644 index 47959b9e41..0000000000 --- a/bin/blacklist.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "pipelines": [ - "epitopeprediction", - "exoseq", - "neutronstar", - "smrnaseq", - "vipr", - "sarek", - "neutronstar", - "proteomicslfq", - "clinvap", - "lncpipe", - "ddamsproteomics", - "scrnaseq", - "guideseq" - ] -} diff --git a/bin/push.sh b/bin/build_api_docs.sh similarity index 100% rename from bin/push.sh rename to bin/build_api_docs.sh diff --git a/bin/sync b/bin/sync deleted file mode 100755 index 6dda0ed626..0000000000 --- a/bin/sync +++ /dev/null @@ -1,129 +0,0 @@ -#!/usr/bin/env python - -import json -import os -import requests -from requests.auth import HTTPBasicAuth -import sys -import syncutils.template -import syncutils.utils - -# Set the default nf-core pipeline template branch -DEF_TEMPLATE_BRANCH = "TEMPLATE" -# The GitHub base url or the nf-core project -GH_BASE_URL = "https://{token}@github.com/nf-core/{pipeline}" -# The JSON file is updated on every push event on the nf-core GitHub project -NF_CORE_PIPELINE_INFO = "http://nf-co.re/pipelines.json" -# The API endpoint for creating pull requests -GITHUB_PR_URL_TEMPL = "https://api.github.com/repos/nf-core/{pipeline}/pulls" -# Current script dir -PATH_PARENT_DIR = os.path.dirname(os.path.realpath(__file__)) - -sync_errors = [] -pr_errors = [] - - -def create_pullrequest(pipeline, origin="dev", template="TEMPLATE", token="", user="nf-core"): - """Create a pull request to a base branch (default: dev), - from a head branch (default: TEMPLATE) - - Returns: An instance of class requests.Response - """ - content = {} - content['title'] = "Important pipeline nf-core update! (version {tag})".format(tag=os.environ['TRAVIS_TAG']) - content['body'] = "Some important changes have been made in the nf-core pipelines templates.\n" \ - "Please make sure to merge this in ASAP and make a new minor release of your pipeline.\n\n" \ - "Follow the link [nf-core/tools](https://github.com/nf-core/tools/releases/tag/{})".format(os.environ['TRAVIS_TAG']) - content['head'] = "{}".format(template) - content['base'] = origin - return requests.post(url=GITHUB_PR_URL_TEMPL.format(pipeline=pipeline), - data=json.dumps(content), - auth=HTTPBasicAuth(user, token)) - - -def filter_blacklisted_pipelines_from_list(pipelines, blacklisted_pipelines): - filtered_pipelines = [] - for pipeline in pipelines: - if not pipeline.get('name'): - print("No attribute \'name\' for pipeline found: {}".format(pipeline)) - else: - filtered_pipelines.append(pipeline) if pipeline.get('name') not in blacklisted_pipelines \ - else filtered_pipelines - return filtered_pipelines - - -def fetch_black_listed_pipelines_from_file(file_path): - with open(file_path) as fh: - blacklist = json.load(fh) - return blacklist.get('pipelines') - - -def fetch_nfcore_workflows_from_website(url): - try: - res = requests.get(url) - pipelines = res.json().get('remote_workflows') - except Exception as e: - print("Could not get remote workflows. Reason was: {}".format(e)) - pipelines = [] - return pipelines - - -def update_template_branch_for_pipeline(pipeline): - try: - syncutils.template.NfcoreTemplate( - pipeline['name'], - branch=DEF_TEMPLATE_BRANCH, - repo_url=GH_BASE_URL.format(token=os.environ["NF_CORE_BOT"], pipeline=pipeline['name']) - ).sync() - except Exception as e: - sync_errors.append((pipeline['name'], e)) - - -def create_pullrequest_if_update_sucessful(pipeline): - name = pipeline.get('name') - for errored_pipeline, _ in sync_errors: - if name == errored_pipeline: - return - response = create_pullrequest(name, token=os.environ["NF_CORE_BOT"]) - if response.status_code != 201: - pr_errors.append((name, response.status_code, response.content)) - else: - print("Created pull-request for pipeline \'{pipeline}\' successfully." - .format(pipeline=name)) - - -def main(): - assert os.environ['TRAVIS_TAG'] - assert os.environ['NF_CORE_BOT'] - - blacklisted_pipeline_names = fetch_black_listed_pipelines_from_file(PATH_PARENT_DIR + "/blacklist.json") - - pipelines = fetch_nfcore_workflows_from_website(NF_CORE_PIPELINE_INFO) - - if len(sys.argv) > 1: - pipeline_to_sync = sys.argv[1] - filtered_pipelines = [pipeline for pipeline in pipelines if pipeline_to_sync in pipeline.get('name')] - else: - filtered_pipelines = filter_blacklisted_pipelines_from_list(pipelines, blacklisted_pipeline_names) - - for pipeline in filtered_pipelines: - print("Update template branch for pipeline '{pipeline}'... ".format(pipeline=pipeline['name'])) - update_template_branch_for_pipeline(pipeline) - print("Trying to open pull request for pipeline {}...".format(pipeline['name'])) - create_pullrequest_if_update_sucessful(pipeline) - - for pipeline, exception in sync_errors: - print("WARNING!!!! Sync for pipeline {name} failed.".format(name=pipeline)) - print(exception) - - for pipeline, return_code, content in pr_errors: - print("WARNING!!!! Pull-request for pipeline \'{pipeline}\' failed," - " got return code {return_code}." - .format(pipeline=pipeline, return_code=return_code)) - print(content) - - sys.exit(0) - - -if __name__ == "__main__": - main() diff --git a/bin/syncutils/__init__.py b/bin/syncutils/__init__.py deleted file mode 100644 index d3f5a12faa..0000000000 --- a/bin/syncutils/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/bin/syncutils/template.py b/bin/syncutils/template.py deleted file mode 100644 index 69fc1bca91..0000000000 --- a/bin/syncutils/template.py +++ /dev/null @@ -1,87 +0,0 @@ -import tempfile -from syncutils import utils -import git -import os -import shutil - -import nf_core.create - -TEMPLATE_BRANCH = "TEMPLATE" - - -class NfcoreTemplate: - """Updates the template content of an nf-core pipeline in - its `TEMPLATE` branch. - - Args: - pipeline: The pipeline name - - branch: The template branch name, default=`TEMPLATE` - - token: GitHub auth token - """ - def __init__(self, pipeline, branch=TEMPLATE_BRANCH, repo_url=""): - """Basic constructor - """ - self.pipeline = pipeline - self.repo_url = repo_url - self.branch = branch - self.tmpdir = tempfile.mkdtemp() - self.templatedir = tempfile.mkdtemp() - self.repo = git.Repo.clone_from(self.repo_url, self.tmpdir) - assert self.repo - - def sync(self): - """Execute the template update. - """ - context = self.context_from_nextflow(nf_project_dir=self.tmpdir) - self.update_child_template(self.templatedir, self.tmpdir, context=context) - self.commit_changes() - self.push_changes() - - def context_from_nextflow(self, nf_project_dir): - """Fetch a Nextflow pipeline's config settings. - - Returns: A cookiecutter-readable context (Python dictionary) - """ - # Check if we are on "master" (main pipeline code) - if self.repo.active_branch.name != "dev": - self.repo.git.checkout("origin/dev", b="dev") - - # Fetch the config variables from the Nextflow pipeline - config = utils.fetch_wf_config(wf_path=nf_project_dir) - - # Checkout again to configured template branch - self.repo.git.checkout("origin/{branch}".format(branch=self.branch), - b="{branch}".format(branch=self.branch)) - - return utils.create_context(config) - - def update_child_template(self, templatedir, target_dir, context=None): - """Apply the changes of the cookiecutter template - to the pipelines template branch. - """ - # Clear the pipeline's template branch content - for f in os.listdir(self.tmpdir): - if f == ".git": - continue - try: - shutil.rmtree(os.path.join(target_dir, f)) - except: - os.remove(os.path.join(target_dir, f)) - # Create the new template structure - nf_core.create.PipelineCreate( - name=context.get('pipeline_name'), - description=context.get('pipeline_short_description'), - new_version=context.get('version'), - no_git=True, - force=True, - outdir=target_dir, - author=context.get('author') - ).init_pipeline() - - def commit_changes(self): - """Commits the changes of the new template to the current branch. - """ - self.repo.git.add(A=True) - self.repo.index.commit("Update nf-core pipeline template.") - - def push_changes(self): - self.repo.git.push() diff --git a/bin/syncutils/utils.py b/bin/syncutils/utils.py deleted file mode 100644 index 8d3ac94e09..0000000000 --- a/bin/syncutils/utils.py +++ /dev/null @@ -1,64 +0,0 @@ -import erro -import os -import requests -import subprocess - - -def fetch_wf_config(wf_path): - """ - Use nextflow to retrieve the nf configuration variables from a workflow - """ - config = dict() - # Call `nextflow config` and pipe stderr to /dev/null - try: - with open(os.devnull, 'w') as devnull: - nfconfig_raw = subprocess.check_output(['nextflow', 'config', '-flat', wf_path], stderr=devnull) - except OSError as e: - if e.errno == errno.ENOENT: - raise AssertionError("It looks like Nextflow is not installed. It is required for most nf-core functions.") - except subprocess.CalledProcessError as e: - raise AssertionError("`nextflow config` returned non-zero error code: %s,\n %s", e.returncode, e.output) - else: - for l in nfconfig_raw.splitlines(): - ul = l.decode() - k, v = ul.split(' = ', 1) - config[k] = v.replace("\'", "").replace("\"", "") - return config - - -def create_context(config): - """Consumes a flat Nextflow config file and will create - a context dictionary with information for the nf-core template creation. - - Returns: A dictionary with: - { - 'pipeline_name': '' - 'pipeline_short_description': '' - 'version': '' - } - """ - context = {} - context["pipeline_name"] = config.get("manifest.name") if config.get("manifest.name") else get_name_from_url(config.get("manifest.homePage")) - context["pipeline_short_description"] = config.get("manifest.description") - context["version"] = config.get("manifest.version") if config.get("manifest.version") else config.get("params.version") - context["author"] = config.get("manifest.author") if config.get("manifest.author") else "No author provided" - return context - - -def get_name_from_url(url): - return url.split("/")[-1] if url else "" - - -def repos_without_template_branch(pipeline_names): - pipelines_without_template = [] - for pipeline in pipeline_names: - api_call = "https://api.github.com/repos/nf-core/{}/branches".format(pipeline) - print("Fetching branch information for nf-core/{}...".format(pipeline)) - res = requests.get(api_call) - branch_list = res.json() - branch_names = [branch["name"] for branch in branch_list] - if "TEMPLATE" not in branch_names: - pipelines_without_template.append(pipeline) - print("WARNING: nf-core/{} had no TEMPLATE branch!".format(pipeline)) - - return pipelines_without_template diff --git a/docs/lint_errors.md b/docs/lint_errors.md index 9d80f54b15..43ff65af04 100644 --- a/docs/lint_errors.md +++ b/docs/lint_errors.md @@ -2,15 +2,19 @@ This page contains detailed descriptions of the tests done by the [nf-core/tools](https://github.com/nf-core/tools) package. Linting errors should show URLs next to any failures that link to the relevant heading below. -## Error #1 - File not found ## {#1} -nf-core pipelines should adhere to a common file structure for consistency. The lint test looks for the following required files: +## Error #1 - File not found / must be removed ## {#1} + +nf-core pipelines should adhere to a common file structure for consistency. + +The lint test looks for the following required files: * `nextflow.config` * The main nextflow config file * `Dockerfile` * A docker build script to generate a docker image with the required software -* `.travis.yml` or `circle.yml` - * A config file for automated continuous testing with either [Travis CI](https://travis-ci.org/) or [Circle CI](https://circleci.com/) +* Continuous integration tests with either [GitHub actions](https://github.com/features/actions) or [Travis CI](https://travis-ci.com/) + * GitHub actions workflows for CI (`.github/workflows/ci.yml`), branch protection (`.github/workflows/branch.yml`) and linting (`.github/workflows/linting.yml`) + * Alternatively, `.travis.yml` continuous integration testing is still allowed but will be deprecated in the near future * `LICENSE`, `LICENSE.md`, `LICENCE.md` or `LICENCE.md` * The MIT licence. Copy from [here](https://raw.githubusercontent.com/nf-core/tools/master/LICENSE). * `README.md` @@ -27,13 +31,18 @@ The following files are suggested but not a hard requirement. If they are missin * `conf/base.config` * A `conf` directory with at least one config called `base.config` +Additionally, the following files must not be present: + +* `Singularity` ## Error #2 - Docker file check failed ## {#2} + Pipelines should have a files called `Dockerfile` in their root directory. The file is used for automated docker image builds. This test checks that the file exists and contains at least the string `FROM` (`Dockerfile`). ## Error #3 - Licence check failed ## {#3} + nf-core pipelines must ship with an open source [MIT licence](https://choosealicense.com/licenses/mit/). This test fails if the following conditions are not met: @@ -46,6 +55,7 @@ This test fails if the following conditions are not met: * `[year]`, `[fullname]`, ``, ``, `` or `` ## Error #4 - Nextflow config check failed ## {#4} + nf-core pipelines are required to be configured with a minimal set of variable names. This test fails or throws warnings if required variables are not set. @@ -90,9 +100,9 @@ The following variables throw warnings if missing: * If the pipeline version number contains the string `dev`, the dockerhub tag must be `:dev` * `params.reads` * Input parameter to specify input data (typically FastQ files / pairs) -* `params.singleEnd` - * Specify to work with single-end sequence data instead of default paired-end - * Used with Nextflow: `.fromFilePairs( params.reads, size: params.singleEnd ? 1 : 2 )` +* `params.single_end` + * Specify to work with single-end sequence data instead of paired-end by default + * Nextflow implementation: `.fromFilePairs( params.reads, size: params.single_end ? 1 : 2 )` The following variables are depreciated and fail the test if they are still present: @@ -102,16 +112,67 @@ The following variables are depreciated and fail the test if they are still pres * The old method for specifying the minimum Nextflow version. Replaced by `manifest.nextflowVersion` * `params.container` * The old method for specifying the dockerhub container address. Replaced by `process.container` +* `singleEnd` and `igenomesIgnore` + * Changed to `single_end` and `igenomes_ignore` + * The `snake_case` convention should now be used when defining pipeline parameters ## Error #5 - Continuous Integration configuration ## {#5} -nf-core pipelines must have CI testing with Travis or Circle CI. -This test fails if the following happens: +nf-core pipelines must have CI testing with GitHub actions or Travis. + +This test fails if the following requirements are not met: -* `.travis.yml` does not contain the string `nf-core lint ${TRAVIS_BUILD_DIR}` under `script` -* `.travis.yml` does not contain the string `docker pull :dev` under `before_install` +For GitHub actions CI workflow: + +* `.github/workflows/ci.yml` must be turned on for `push` and `pull_request`` +* `.github/workflows/ci.yml` must test with the minimum Nextflow version specified in the pipline as `manifest.nextflowVersion` under `jobs`, `test`, `strategy`, `matrix`, `nxf_ver`. E.g.: + + ```yaml + jobs: + test: + runs-on: ubuntu-18.04 + strategy: + matrix: + # Nextflow versions: check pipeline minimum and current latest + nxf_ver: ['19.10.0', ''] + ``` + +* `.github/workflows/ci.yml` must pull the container with the command `docker pull :dev && docker tag :dev :` under `jobs`,`test`,`steps`. E.g. for nfcore/tools container: + + ```yaml + jobs: + test: + runs-on: ubuntu-18.04 + steps: + - name: Pull image + run: | + docker pull nfcore/tools:dev && docker tag nfcore/tools:dev nfcore/tools:1.0.0 + ``` + +* `.github/workflows/branch.yml` must be turned on for pull requests to `master` +* `.github/workflows/branch.yml` must check that PRs are only ok if coming from an nf-core `dev` branch or a fork `patch` branch. E.g. for nf-core/tools: + + ```yaml + jobs: + test: + runs-on: ubuntu-18.04 + steps: + # PRs are only ok if coming from an nf-core `dev` branch or a fork `patch` branch + - name: Check PRs + run: | + { [[ $(git remote get-url origin) == *nf-core/tools ]] && [[ ${GITHUB_HEAD_REF} = "dev" ]]; } || [[ ${GITHUB_HEAD_REF} == "patch" ]] + ``` + +* `.github/workflows/linting.yml` must be turned on for push and pull requests +* `.github/workflows/linting.yml` must perform markdown linting with the command `markdownlint ${GITHUB_WORKSPACE} -c ${GITHUB_WORKSPACE}/.github/markdownlint.yml` under `jobs`, `Markdown`, `steps`. +* `.github/workflows/linting.yml` must perform nf-core linting with the command `nf-core lint ${GITHUB_WORKSPACE}` under `jobs`, `nf-core`, `steps`. + +For Travis CI: + +* `.travis.yml` must contain the string `nf-core lint ${TRAVIS_BUILD_DIR}` under `script` +* `.travis.yml` must contain the string `docker pull :dev` under `before_install` * Where `` is fetched from `process.container` in the `nextflow.config` file, without the docker tag _(if we have the tag the tests fail when making a release)_ -* `.travis.yml` does not test the Nextflow version specified in the pipeline as `manifest.nextflowVersion` +* `.travis.yml` must test the Nextflow version specified in the pipeline as `manifest.nextflowVersion` * This is expected in the `env` section of the config, eg: ```yaml @@ -122,7 +183,7 @@ This test fails if the following happens: * At least one of these `NXF_VER` variables must match the `manifest.nextflowVersion` version specified in the pipeline config * Other variables can be specified on these lines as long as they are space separated. -* `.travis.yml` checks that pull requests are not opened directly to the `master` branch +* `.travis.yml` must check that pull requests are not opened directly to the `master` branch * The following is expected in the `before_install` section: ```yaml @@ -131,6 +192,7 @@ This test fails if the following happens: ``` ## Error #6 - Repository `README.md` tests ## {#6} + The `README.md` files for a project are very important and must meet some requirements: * Nextflow badge @@ -196,11 +258,14 @@ to create the container. Such `Dockerfile`s can usually be very short, eg: ```Dockerfile FROM nfcore/base:1.7 +MAINTAINER Rocky Balboa LABEL authors="your@email.com" \ - description="Container image containing all requirements for nf-core/EXAMPLE pipeline" + description="Docker image containing all requirements for the nf-core mypipeline pipeline" COPY environment.yml / -RUN conda env update -n root -f /environment.yml && conda clean -a +RUN conda env create -f /environment.yml && conda clean -a +RUN conda env export --name nf-core-mypipeline-1.0 > nf-core-mypipeline-1.0.yml +ENV PATH /opt/conda/envs/nf-core-mypipeline-1.0/bin:$PATH ``` To enforce this minimal `Dockerfile` and check for common copy+paste errors, we require @@ -222,3 +287,11 @@ The nf-core workflow template contains a number of comment lines with the follow ``` This lint test runs through all files in the pipeline and searches for these lines. + +## Error #11 - Singularity file found ##{#11} + +As we are relying on [Docker Hub](https://https://hub.docker.com/) instead of Singularity and all containers are automatically pulled from there, repositories should not have a `Singularity` file present. + +## Error #12 - Pipeline name ## {#12} + +In order to ensure consistent naming, pipeline names should contain only lower case, alphabetical characters. Otherwise a warning is displayed. diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index c56c9f43a5..415ab9fb98 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -8,6 +8,8 @@ import re import sys +import click + def bump_pipeline_version(lint_obj, new_version): """Bumps a pipeline version number. @@ -53,10 +55,10 @@ def bump_pipeline_version(lint_obj, new_version): nfconfig_newstr = "name: nf-core-{}-{}".format(lint_obj.pipeline_name.lower(), new_version) update_file_version("environment.yml", lint_obj, nfconfig_pattern, nfconfig_newstr) - # Update Dockerfile PATH - nfconfig_pattern = r"PATH\s+/opt/conda/envs/nf-core-{}-{}/bin:\$PATH".format(lint_obj.pipeline_name.lower(), current_version.replace('.',r'\.')) - nfconfig_newstr = "PATH /opt/conda/envs/nf-core-{}-{}/bin:$PATH".format(lint_obj.pipeline_name.lower(), new_version) - update_file_version("Dockerfile", lint_obj, nfconfig_pattern, nfconfig_newstr) + # Update Dockerfile ENV PATH and RUN conda env create + nfconfig_pattern = r"nf-core-{}-{}".format(lint_obj.pipeline_name.lower(), current_version.replace('.',r'\.')) + nfconfig_newstr = "nf-core-{}-{}".format(lint_obj.pipeline_name.lower(), new_version) + update_file_version("Dockerfile", lint_obj, nfconfig_pattern, nfconfig_newstr, allow_multiple=True) def bump_nextflow_version(lint_obj, new_version): @@ -113,14 +115,20 @@ def update_file_version(filename, lint_obj, pattern, newstr, allow_multiple=Fals content = fh.read() # Check that we have exactly one match - matches = re.findall(pattern, content) - if len(matches) == 0: + matches_pattern = re.findall("^.*{}.*$".format(pattern),content,re.MULTILINE) + if len(matches_pattern) == 0: raise SyntaxError("Could not find version number in {}: '{}'".format(filename, pattern)) - if len(matches) > 1 and not allow_multiple: + if len(matches_pattern) > 1 and not allow_multiple: raise SyntaxError("Found more than one version number in {}: '{}'".format(filename, pattern)) # Replace the match - logging.info("Updating version in {}\n - {}\n + {}".format(filename, matches[0], newstr)) new_content = re.sub(pattern, newstr, content) + matches_newstr = re.findall("^.*{}.*$".format(newstr),new_content,re.MULTILINE) + + logging.info("Updating version in {}\n".format(filename) + + click.style(" - {}\n".format("\n - ".join(matches_pattern).strip()), fg='red') + + click.style(" + {}\n".format("\n + ".join(matches_newstr).strip()), fg='green') + ) + with open(fn, 'w') as fh: fh.write(new_content) diff --git a/nf_core/create.py b/nf_core/create.py index 5b0802eb85..e44a7105f3 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -81,6 +81,7 @@ def run_cookiecutter(self): 'author': self.author, 'name_noslash': self.name_noslash, 'name_docker': self.name_docker, + 'short_name': self.short_name, 'version': self.new_version, 'nf_core_version': nf_core.__version__ }, diff --git a/nf_core/launch.py b/nf_core/launch.py index 37b91b25a1..ff297c7b02 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -370,7 +370,7 @@ def build_command(self): if self.use_params_file: path = self.create_nfx_params_file() if path is not None: - self.nextflow_cmd = '{} {} "{}"'.format(self.nextflow_cmd, "--params-file", path) + self.nextflow_cmd = '{} {} "{}"'.format(self.nextflow_cmd, "-params-file", path) self.write_params_as_full_json() # Call nextflow with a list of command line flags diff --git a/nf_core/lint.py b/nf_core/lint.py index 5b65e5688c..e22dbe13cb 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -112,7 +112,7 @@ class PipelineLint(object): params.help = false params.outdir = './results' params.bam = false - params.singleEnd = false + params.single_end = false params.seqtype = 'dna' params.solver = 'glpk' params.igenomes_base = './iGenomes' @@ -167,11 +167,15 @@ def lint_pipeline(self, release_mode=False): 'check_licence', 'check_docker', 'check_nextflow_config', + 'check_actions_branch_protection', + 'check_actions_ci', + 'check_actions_lint', 'check_ci_config', 'check_readme', 'check_conda_env_yaml', 'check_conda_dockerfile', - 'check_pipeline_todos' + 'check_pipeline_todos', + 'check_pipeline_name' ] if release_mode: self.release_mode = True @@ -194,7 +198,6 @@ def check_files_exist(self): 'nextflow.config', 'Dockerfile', - ['.travis.yml', '.circle.yml'], ['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling 'README.md', 'CHANGELOG.md', @@ -206,7 +209,18 @@ def check_files_exist(self): 'main.nf', 'environment.yml', - 'conf/base.config' + 'conf/base.config', + '.github/workflows/branch.yml', + '.github/workflows/ci.yml', + '.github/workfows/linting.yml' + + Files that *must not* be present:: + + 'Singularity' + + Files that *should not* be present:: + + '.travis.yml' Raises: An AssertionError if neither `nextflow.config` or `main.nf` found. @@ -217,18 +231,31 @@ def check_files_exist(self): files_fail = [ 'nextflow.config', 'Dockerfile', - ['.travis.yml', '.circle.yml'], ['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling 'README.md', 'CHANGELOG.md', - 'docs/README.md', - 'docs/output.md', - 'docs/usage.md', + os.path.join('docs','README.md'), + os.path.join('docs','output.md'), + os.path.join('docs','usage.md'), + ['.travis.yml', os.path.join('.github', 'workflows', 'branch.yml'), os.path.join('.circleci','config.yml')], + ['.travis.yml', os.path.join('.github', 'workflows','ci.yml'), os.path.join('.circleci','config.yml')], + ['.travis.yml', os.path.join('.github', 'workflows', 'linting.yml'), os.path.join('.circleci','config.yml')] + ] files_warn = [ 'main.nf', 'environment.yml', - 'conf/base.config' + os.path.join('conf','base.config'), + os.path.join('.github', 'workflows', 'branch.yml'), + os.path.join('.github', 'workflows','ci.yml'), + os.path.join('.github', 'workflows', 'linting.yml') + ] + files_fail_ifexists = [ + 'Singularity', + ] + files_warn_ifexists = [ + '.travis.yml', + os.path.join('.circleci','config.yml') ] def pf(file_path): @@ -238,7 +265,7 @@ def pf(file_path): if not os.path.isfile(pf('nextflow.config')) and not os.path.isfile(pf('main.nf')): raise AssertionError('Neither nextflow.config or main.nf found! Is this a Nextflow pipeline?') - # Files that cause an error + # Files that cause an error if they don't exist for files in files_fail: if not isinstance(files, list): files = [files] @@ -248,7 +275,7 @@ def pf(file_path): else: self.failed.append((1, "File not found: {}".format(files))) - # Files that cause a warning + # Files that cause a warning if they don't exist for files in files_warn: if not isinstance(files, list): files = [files] @@ -258,6 +285,24 @@ def pf(file_path): else: self.warned.append((1, "File not found: {}".format(files))) + # Files that cause an error if they exist + for files in files_fail_ifexists: + if not isinstance(files, list): + files = [files] + if any([os.path.isfile(pf(f)) for f in files]): + self.failed.append((1, "File must be removed: {}".format(files))) + else: + self.passed.append((1, "File not found check: {}".format(files))) + + # Files that cause a warning if they exist + for files in files_warn_ifexists: + if not isinstance(files, list): + files = [files] + if any ([os.path.isfile(pf(f)) for f in files]): + self.warned.append((1, "File should be removed: {}".format(files))) + else: + self.passed.append((1, "File not found check: {}".format(files))) + # Load and parse files for later if 'environment.yml' in self.files: with open(os.path.join(self.path, 'environment.yml'), 'r') as fh: @@ -326,6 +371,7 @@ def check_nextflow_config(self): and print all config variables. NB: Does NOT parse contents of main.nf / nextflow script """ + # Fail tests if these are missing config_fail = [ 'manifest.name', @@ -351,13 +397,15 @@ def check_nextflow_config(self): 'dag.file', 'params.reads', 'process.container', - 'params.singleEnd' + 'params.single_end' ] # Old depreciated vars - fail if present config_fail_ifdefined = [ 'params.version', 'params.nf_required_version', - 'params.container' + 'params.container', + 'params.singleEnd', + 'params.igenomesIgnore' ] # Get the nextflow config for this pipeline @@ -442,6 +490,118 @@ def check_nextflow_config(self): else: self.passed.append((4, "Config variable process.container looks correct: '{}'".format(container_name))) + def check_actions_branch_protection(self): + """Checks that the GitHub actions branch protection workflow is valid. + + Makes sure PRs can only come from nf-core dev or 'patch' of a fork. + """ + fn = os.path.join(self.path, '.github', 'workflows', 'branch.yml') + if os.path.isfile(fn): + with open(fn, 'r') as fh: + branchwf = yaml.safe_load(fh) + + # Check that the action is turned on for PRs to master + try: + assert('master' in branchwf[True]['pull_request']['branches']) + except (AssertionError, KeyError): + self.failed.append((5, "GitHub actions branch workflow must check for master branch PRs: '{}'".format(fn))) + else: + self.passed.append((5, "GitHub actions branch workflow checks for master branch PRs: '{}'".format(fn))) + + # Check that PRs are only ok if coming from an nf-core `dev` branch or a fork `patch` branch + pipeline_version = self.config.get('manifest.version', '').strip(' \'"') + PRMasterCheck = "{{ [[ $(git remote get-url origin) == *nf-core/{} ]] && [[ ${{GITHUB_HEAD_REF}} = \"dev\" ]]; }} || [[ ${{GITHUB_HEAD_REF}} == \"patch\" ]]".format(self.pipeline_name.lower()) + steps = branchwf['jobs']['test']['steps'] + try: + steps = branchwf['jobs']['test']['steps'] + assert(any([PRMasterCheck in step['run'] for step in steps])) + except (AssertionError, KeyError): + self.failed.append((5, "GitHub actions branch workflow checks for master branch PRs: '{}'".format(fn))) + else: + self.passed.append((5, "GitHub actions branch workflow checks for master branch PRs: '{}'".format(fn))) + + def check_actions_ci(self): + """Checks that the GitHub actions ci workflow is valid + + Makes sure tests run with the required nextflow version. + """ + fn = os.path.join(self.path, '.github', 'workflows', 'ci.yml') + if os.path.isfile(fn): + with open(fn, 'r') as fh: + ciwf = yaml.safe_load(fh) + + # Check that the action is turned on for push and pull requests + try: + assert('push' in ciwf[True]) + assert('pull_request' in ciwf[True]) + except (AssertionError, KeyError, TypeError): + self.failed.append((5, "GitHub actions ci workflow must be triggered on PR and push: '{}'".format(fn))) + else: + self.passed.append((5, "GitHub actions ci workflow is triggered on PR and push: '{}'".format(fn))) + + # Check that we're pulling the right docker image and tagging it properly + if self.config.get('process.container', ''): + docker_notag = re.sub(r':(?:[\.\d]+|dev)$', '', self.config.get('process.container', '').strip('"\'')) + docker_withtag = self.config.get('process.container', '').strip('"\'') + docker_pull_cmd = 'docker pull {}:dev && docker tag {}:dev {}\n'.format(docker_notag, docker_notag, docker_withtag) + try: + steps = ciwf['jobs']['test']['steps'] + assert(any([docker_pull_cmd in step['run'] for step in steps if 'run' in step.keys()])) + except (AssertionError, KeyError, TypeError): + self.failed.append((5, "CI is not pulling and tagging the correct docker image. Should be:\n '{}'".format(docker_pull_cmd))) + else: + self.passed.append((5, "CI is pulling and tagging the correct docker image: {}".format(docker_pull_cmd))) + + # Check that we are testing the minimum nextflow version + try: + matrix = ciwf['jobs']['test']['strategy']['matrix']['nxf_ver'] + assert(any([self.minNextflowVersion in matrix])) + except (KeyError, TypeError): + self.failed.append((5, "Continuous integration does not check minimum NF version: '{}'".format(fn))) + except AssertionError: + self.failed.append((5, "Minimum NF version differed from CI and what was set in the pipelines manifest: {}".format(fn))) + else: + self.passed.append((5, "Continuous integration checks minimum NF version: '{}'".format(fn))) + + def check_actions_lint(self): + """Checks that the GitHub actions lint workflow is valid + + Makes sure ``nf-core lint`` and ``markdownlint`` runs. + """ + fn = os.path.join(self.path, '.github', 'workflows', 'linting.yml') + if os.path.isfile(fn): + with open(fn, 'r') as fh: + lintwf = yaml.safe_load(fh) + + # Check that the action is turned on for push and pull requests + try: + assert('push' in lintwf[True]) + assert('pull_request' in lintwf[True]) + except (AssertionError, KeyError, TypeError): + self.failed.append((5, "GitHub actions linting workflow must be triggered on PR and push: '{}'".format(fn))) + else: + self.passed.append((5, "GitHub actions linting workflow is triggered on PR and push: '{}'".format(fn))) + + # Check that the Markdown linting runs + Markdownlint_cmd = 'markdownlint ${GITHUB_WORKSPACE} -c ${GITHUB_WORKSPACE}/.github/markdownlint.yml' + try: + steps = lintwf['jobs']['Markdown']['steps'] + assert(any([Markdownlint_cmd in step['run'] for step in steps if 'run' in step.keys()])) + except (AssertionError, KeyError, TypeError): + self.failed.append((5, "Continuous integration must run Markdown lint Tests: '{}'".format(fn))) + else: + self.passed.append((5, "Continuous integration runs Markdown lint Tests: '{}'".format(fn))) + + + # Check that the nf-core linting runs + nfcore_lint_cmd = 'nf-core lint ${GITHUB_WORKSPACE}' + try: + steps = lintwf['jobs']['nf-core']['steps'] + assert(any([ nfcore_lint_cmd in step['run'] for step in steps if 'run' in step.keys()])) + except (AssertionError, KeyError, TypeError): + self.failed.append((5, "Continuous integration must run nf-core lint Tests: '{}'".format(fn))) + else: + self.passed.append((5, "Continuous integration runs nf-core lint Tests: '{}'".format(fn))) def check_ci_config(self): """Checks that the Travis or Circle CI YAML config is valid. @@ -449,13 +609,14 @@ def check_ci_config(self): Makes sure that ``nf-core lint`` runs in travis tests and that tests run with the required nextflow version. """ - for cf in ['.travis.yml', 'circle.yml']: + for cf in ['.travis.yml', os.path.join('.circleci','config.yml')]: fn = os.path.join(self.path, cf) if os.path.isfile(fn): with open(fn, 'r') as fh: ciconf = yaml.safe_load(fh) # Check that we have the master branch protection, but allow patch as well - travisMasterCheck = '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && ([ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ] || [ $TRAVIS_PULL_REQUEST_BRANCH = "patch" ]))' + travisMasterCheck = '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ]) || [ $TRAVIS_PULL_REQUEST_BRANCH = "patch" ]' + try: assert(travisMasterCheck in ciconf.get('before_install', {})) except AssertionError: @@ -759,6 +920,7 @@ def check_conda_dockerfile(self): "FROM nfcore/base:{}".format('dev' if 'dev' in nf_core.__version__ else nf_core.__version__), 'COPY environment.yml /', 'RUN conda env create -f /environment.yml && conda clean -a', + 'RUN conda env export --name {} > {}.yml'.format(self.conda_config['name'], self.conda_config['name']), 'ENV PATH /opt/conda/envs/{}/bin:$PATH'.format(self.conda_config['name']) ] @@ -792,6 +954,18 @@ def check_pipeline_todos(self): l = '{}..'.format(l[:50-len(fname)]) self.warned.append((10, "TODO string found in '{}': {}".format(fname,l))) + def check_pipeline_name(self): + """Check whether pipeline name adheres to lower case/no hyphen naming convention""" + + if self.pipeline_name.islower() and self.pipeline_name.isalpha(): + self.passed.append((12, "Name adheres to nf-core convention")) + if not self.pipeline_name.islower(): + self.warned.append((12, "Naming does not adhere to nf-core conventions: Contains uppercase letters")) + if not self.pipeline_name.isalpha(): + self.warned.append((12, "Naming does not adhere to nf-core conventions: Contains non alphabetical characters")) + + + def print_results(self): # Print results rl = "\n Using --release mode linting tests" if self.release_mode else '' diff --git a/nf_core/list.py b/nf_core/list.py index f77e159c53..204147e066 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -145,8 +145,8 @@ def filtered_workflows(self): filtered_workflows = [] for wf in self.remote_workflows: for k in self.keyword_filters: - in_name = k in wf.name - in_desc = k in wf.description + in_name = k in wf.name if wf.name else False + in_desc = k in wf.description if wf.description else False in_topics = any([ k in t for t in wf.topics]) if not in_name and not in_desc and not in_topics: break @@ -238,8 +238,8 @@ def __init__(self, data): self.watchers_count = data.get('watchers_count') self.forks_count = data.get('forks_count') - # Placeholder vars for releases info - self.releases = data.get('releases') + # Placeholder vars for releases info (ignore pre-releases) + self.releases = [ r for r in data.get('releases', []) if r.get('published_at') is not None ] # Placeholder vars for local comparison self.local_wf = None diff --git a/nf_core/pipeline-template/cookiecutter.json b/nf_core/pipeline-template/cookiecutter.json index dfc2f668a0..4d10d74047 100644 --- a/nf_core/pipeline-template/cookiecutter.json +++ b/nf_core/pipeline-template/cookiecutter.json @@ -4,6 +4,7 @@ "author": "Rocky Balboa", "name_noslash": "{{ cookiecutter.name.replace('/', '-') }}", "name_docker": "{{ cookiecutter.name_docker }}", + "short_name": "{{ cookiecutter.short_name }}", "version": "1.0dev", "nf_core_version": "{{ cookiecutter.nf_core_version }}" } diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md index 937aed8212..3486863845 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md @@ -1,47 +1,57 @@ # {{ cookiecutter.name }}: Contributing Guidelines -Hi there! Many thanks for taking an interest in improving {{ cookiecutter.name }}. +Hi there! +Many thanks for taking an interest in improving {{ cookiecutter.name }}. -We try to manage the required tasks for {{ cookiecutter.name }} using GitHub issues, you probably came to this page when creating one. Please use the pre-filled template to save time. - -However, don't be put off by this template - other more general issues and suggestions are welcome! Contributions to the code are even more welcome ;) - -> If you need help using or modifying {{ cookiecutter.name }} then the best place to ask is on the pipeline channel on [Slack](https://nf-co.re/join/slack/). +We try to manage the required tasks for {{ cookiecutter.name }} using GitHub issues, you probably came to this page when creating one. +Please use the pre-filled template to save time. +However, don't be put off by this template - other more general issues and suggestions are welcome! +Contributions to the code are even more welcome ;) +> If you need help using or modifying {{ cookiecutter.name }} then the best place to ask is on the nf-core Slack [#{{ cookiecutter.short_name }}](https://nfcore.slack.com/channels/{{ cookiecutter.short_name }}) channel ([join our Slack here](https://nf-co.re/join/slack)). ## Contribution workflow -If you'd like to write some code for {{ cookiecutter.name }}, the standard workflow -is as follows: -1. Check that there isn't already an issue about your idea in the - [{{ cookiecutter.name }} issues](https://github.com/{{ cookiecutter.name }}/issues) to avoid - duplicating work. +If you'd like to write some code for {{ cookiecutter.name }}, the standard workflow is as follows: + +1. Check that there isn't already an issue about your idea in the [{{ cookiecutter.name }} issues](https://github.com/{{ cookiecutter.name }}/issues) to avoid duplicating work * If there isn't one already, please create one so that others know you're working on this -2. Fork the [{{ cookiecutter.name }} repository](https://github.com/{{ cookiecutter.name }}) to your GitHub account +2. [Fork](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) the [{{ cookiecutter.name }} repository](https://github.com/{{ cookiecutter.name }}) to your GitHub account 3. Make the necessary changes / additions within your forked repository -4. Submit a Pull Request against the `dev` branch and wait for the code to be reviewed and merged. - -If you're not used to this workflow with git, you can start with some [basic docs from GitHub](https://help.github.com/articles/fork-a-repo/) or even their [excellent interactive tutorial](https://try.github.io/). +4. Submit a Pull Request against the `dev` branch and wait for the code to be reviewed and merged +If you're not used to this workflow with git, you can start with some [docs from GitHub](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests) or even their [excellent `git` resources](https://try.github.io/). ## Tests -When you create a pull request with changes, [Travis CI](https://travis-ci.org/) will run automatic tests. + +When you create a pull request with changes, [GitHub Actions](https://github.com/features/actions) will run automatic tests. Typically, pull-requests are only fully reviewed when these tests are passing, though of course we can help out before then. There are typically two types of tests that run: ### Lint Tests -The nf-core has a [set of guidelines](https://nf-co.re/developers/guidelines) which all pipelines must adhere to. + +`nf-core` has a [set of guidelines](https://nf-co.re/developers/guidelines) which all pipelines must adhere to. To enforce these and ensure that all pipelines stay in sync, we have developed a helper tool which runs checks on the pipeline code. This is in the [nf-core/tools repository](https://github.com/nf-core/tools) and once installed can be run locally with the `nf-core lint ` command. If any failures or warnings are encountered, please follow the listed URL for more documentation. ### Pipeline Tests -Each nf-core pipeline should be set up with a minimal set of test-data. -Travis CI then runs the pipeline on this data to ensure that it exists successfully. + +Each `nf-core` pipeline should be set up with a minimal set of test-data. +`GitHub Actions` then runs the pipeline on this data to ensure that it exits successfully. If there are any failures then the automated tests fail. -These tests are run both with the latest available version of Nextflow and also the minimum required version that is stated in the pipeline code. +These tests are run both with the latest available version of `Nextflow` and also the minimum required version that is stated in the pipeline code. + +## Patch + +: warning: Only in the unlikely and regretful event of a release happening with a bug. + +* On your own fork, make a new branch `patch` based on `upstream/master`. +* Fix the bug, and bump version (X.Y.Z+1). +* A PR should be made on `master` from patch to directly this particular bug. ## Getting help -For further information/help, please consult the [{{ cookiecutter.name }} documentation](https://github.com/{{ cookiecutter.name }}#documentation) and don't hesitate to get in touch on the [{{ cookiecutter.name }} pipeline channel](https://nfcore.slack.com/channels/{{ cookiecutter.name }}) on [Slack](https://nf-co.re/join/slack/). + +For further information/help, please consult the [{{ cookiecutter.name }} documentation](https://nf-co.re/{{ cookiecutter.name }}/docs) and don't hesitate to get in touch on the nf-core Slack [#{{ cookiecutter.short_name }}](https://nfcore.slack.com/channels/{{ cookiecutter.short_name }}) channel ([join our Slack here](https://nf-co.re/join/slack)). diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/ISSUE_TEMPLATE/bug_report.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/ISSUE_TEMPLATE/bug_report.md index e1aa587d97..a732734304 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/ISSUE_TEMPLATE/bug_report.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,31 +1,42 @@ +# {{ cookiecutter.name }} bug report + Hi there! -Thanks for telling us about a problem with the pipeline. Please delete this text and anything that's not relevant from the template below: +Thanks for telling us about a problem with the pipeline. +Please delete this text and anything that's not relevant from the template below: + +## Describe the bug -#### Describe the bug A clear and concise description of what the bug is. -#### Steps to reproduce +## Steps to reproduce + Steps to reproduce the behaviour: + 1. Command line: `nextflow run ...` 2. See error: _Please provide your error message_ -#### Expected behaviour +## Expected behaviour + A clear and concise description of what you expected to happen. -#### System: - - Hardware: [e.g. HPC, Desktop, Cloud...] - - Executor: [e.g. slurm, local, awsbatch...] - - OS: [e.g. CentOS Linux, macOS, Linux Mint...] - - Version [e.g. 7, 10.13.6, 18.3...] +## System + +- Hardware: +- Executor: +- OS: +- Version + +## Nextflow Installation + +- Version: + +## Container engine -#### Nextflow Installation: - - Version: [e.g. 0.31.0] +- Engine: +- version: +- Image tag: -#### Container engine: - - Engine: [e.g. Conda, Docker or Singularity] - - version: [e.g. 1.0.0] - - Image tag: [e.g. {{ cookiecutter.name_docker }}:1.0.0] +## Additional context -#### Additional context Add any other context about the problem here. diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/ISSUE_TEMPLATE/feature_request.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/ISSUE_TEMPLATE/feature_request.md index 1f025b779c..148df5999c 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/ISSUE_TEMPLATE/feature_request.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,16 +1,24 @@ +# {{ cookiecutter.name }} feature request + Hi there! -Thanks for suggesting a new feature for the pipeline! Please delete this text and anything that's not relevant from the template below: +Thanks for suggesting a new feature for the pipeline! +Please delete this text and anything that's not relevant from the template below: + +## Is your feature request related to a problem? Please describe -#### Is your feature request related to a problem? Please describe. A clear and concise description of what the problem is. + Ex. I'm always frustrated when [...] -#### Describe the solution you'd like +## Describe the solution you'd like + A clear and concise description of what you want to happen. -#### Describe alternatives you've considered +## Describe alternatives you've considered + A clear and concise description of any alternative solutions or features you've considered. -#### Additional context +## Additional context + Add any other context about the feature request here. diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/PULL_REQUEST_TEMPLATE.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/PULL_REQUEST_TEMPLATE.md index ef7cae0e01..3143db9604 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/PULL_REQUEST_TEMPLATE.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/PULL_REQUEST_TEMPLATE.md @@ -1,15 +1,19 @@ -Many thanks to contributing to {{ cookiecutter.name }}! +# {{ cookiecutter.name }} pull request -Please fill in the appropriate checklist below (delete whatever is not relevant). These are the most common things requested on pull requests (PRs). +Many thanks for contributing to {{ cookiecutter.name }}! + +Please fill in the appropriate checklist below (delete whatever is not relevant). +These are the most common things requested on pull requests (PRs). ## PR checklist - - [ ] This comment contains a description of changes (with reason) - - [ ] If you've fixed a bug or added code that should be tested, add tests! - - [ ] If necessary, also make a PR on the [{{ cookiecutter.name }} branch on the nf-core/test-datasets repo]( https://github.com/nf-core/test-datasets/pull/new/{{ cookiecutter.name }}) - - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker`). - - [ ] Make sure your code lints (`nf-core lint .`). - - [ ] Documentation in `docs` is updated - - [ ] `CHANGELOG.md` is updated - - [ ] `README.md` is updated - -**Learn more about contributing:** https://github.com/{{ cookiecutter.name }}/tree/master/.github/CONTRIBUTING.md + +- [ ] This comment contains a description of changes (with reason) +- [ ] If you've fixed a bug or added code that should be tested, add tests! +- [ ] If necessary, also make a PR on the [{{ cookiecutter.name }} branch on the nf-core/test-datasets repo](https://github.com/nf-core/test-datasets/pull/new/{{ cookiecutter.name }}) +- [ ] Ensure the test suite passes (`nextflow run . -profile test,docker`). +- [ ] Make sure your code lints (`nf-core lint .`). +- [ ] Documentation in `docs` is updated +- [ ] `CHANGELOG.md` is updated +- [ ] `README.md` is updated + +**Learn more about contributing:** [CONTRIBUTING.md](https://github.com/{{ cookiecutter.name }}/tree/master/.github/CONTRIBUTING.md) \ No newline at end of file diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/markdownlint.yml b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/markdownlint.yml index e052a635aa..96b12a7039 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/markdownlint.yml +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/markdownlint.yml @@ -1,9 +1,5 @@ # Markdownlint configuration file default: true, line-length: false -no-multiple-blanks: 0 -blanks-around-headers: false -blanks-around-lists: false -header-increment: false no-duplicate-header: siblings_only: true diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/workflows/branch.yml b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/workflows/branch.yml new file mode 100644 index 0000000000..1018f6d253 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/workflows/branch.yml @@ -0,0 +1,16 @@ +name: nf-core branch protection +# This workflow is triggered on PRs to master branch on the repository +# It fails when someone tries to make a PR against the nf-core `master` branch instead of `dev` +on: + pull_request: + branches: + - master + +jobs: + test: + runs-on: ubuntu-18.04 + steps: + # PRs are only ok if coming from an nf-core `dev` branch or a fork `patch` branch + - name: Check PRs + run: | + { [[ $(git remote get-url origin) == *{{cookiecutter.name}} ]] && [[ ${GITHUB_HEAD_REF} = "dev" ]]; } || [[ ${GITHUB_HEAD_REF} == "patch" ]] diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/workflows/ci.yml b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/workflows/ci.yml new file mode 100644 index 0000000000..ebf5cb043f --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/workflows/ci.yml @@ -0,0 +1,29 @@ +name: nf-core CI +# This workflow is triggered on pushes and PRs to the repository. +# It runs the pipeline with the minimal test dataset to check that it completes without any syntax errors +on: [push, pull_request] + +jobs: + test: + env: + NXF_VER: {% raw %}${{ matrix.nxf_ver }}{% endraw %} + NXF_ANSI_LOG: false + runs-on: ubuntu-latest + strategy: + matrix: + # Nextflow versions: check pipeline minimum and current latest + nxf_ver: ['19.10.0', ''] + steps: + - uses: actions/checkout@v2 + - name: Install Nextflow + run: | + wget -qO- get.nextflow.io | bash + sudo mv nextflow /usr/local/bin/ + - name: Pull docker image + run: | + docker pull {{ cookiecutter.name_docker }}:dev && docker tag {{ cookiecutter.name_docker }}:dev {{ cookiecutter.name_docker }}:dev + - name: Run pipeline with test data + run: | + # TODO nf-core: You can customise CI pipeline run tests as required + # (eg. adding multiple test runs with different parameters) + nextflow run ${GITHUB_WORKSPACE} -profile test,docker diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/workflows/linting.yml b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/workflows/linting.yml new file mode 100644 index 0000000000..7354dc7437 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/workflows/linting.yml @@ -0,0 +1,41 @@ +name: nf-core linting +# This workflow is triggered on pushes and PRs to the repository. +# It runs the `nf-core lint` and markdown lint tests to ensure that the code meets the nf-core guidelines +on: [push, pull_request] + +jobs: + Markdown: + runs-on: ubuntu-18.04 + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-node@v1 + with: + node-version: '10' + - name: Install markdownlint + run: | + npm install -g markdownlint-cli + - name: Run Markdownlint + run: | + markdownlint ${GITHUB_WORKSPACE} -c ${GITHUB_WORKSPACE}/.github/markdownlint.yml + nf-core: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - name: Install Nextflow + run: | + wget -qO- get.nextflow.io | bash + sudo mv nextflow /usr/local/bin/ + - uses: actions/setup-python@v1 + with: + python-version: '3.6' + architecture: 'x64' + - name: Install pip + run: | + sudo apt install python3-pip + pip install --upgrade pip + - name: Install nf-core tools + run: | + pip install nf-core + - name: Run nf-core lint + run: | + nf-core lint ${GITHUB_WORKSPACE} diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.gitignore b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.gitignore index 5b54e3e6c2..0189a44444 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.gitignore +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.gitignore @@ -3,5 +3,5 @@ work/ data/ results/ .DS_Store -tests/test_data +test* *.pyc diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.travis.yml b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.travis.yml index 31c5205eb3..73af535dea 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.travis.yml +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.travis.yml @@ -9,7 +9,7 @@ matrix: before_install: # PRs to master are only ok if coming from dev branch - - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && ([ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ] || [ $TRAVIS_PULL_REQUEST_BRANCH = "patch" ]))' + - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ]) || [ $TRAVIS_PULL_REQUEST_BRANCH = "patch" ]' # Pull the docker image first so the test doesn't wait for this - docker pull {{ cookiecutter.name_docker }}:dev # Fake the tag locally so that the pipeline runs properly @@ -30,8 +30,13 @@ install: - sudo apt-get install npm && npm install -g markdownlint-cli env: - - NXF_VER='0.32.0' # Specify a minimum NF version that should be tested and work - - NXF_VER='' # Plus: get the latest NF version and check that it works + # Tower token is to inspect runs on https://tower.nf + # Use public mailbox nf-core@mailinator.com to log in: https://www.mailinator.com/v3/index.jsp?zone=public&query=nf-core + # Specify a minimum NF version that should be tested and work + - NXF_VER='19.10.0' TOWER_ACCESS_TOKEN="1c1f493bc2703472d6f1b9f6fb9e9d117abab7b1" + # Plus: get the latest NF version and check that it works + - NXF_VER='' TOWER_ACCESS_TOKEN="1c1f493bc2703472d6f1b9f6fb9e9d117abab7b1" + script: # Lint the pipeline code @@ -39,4 +44,4 @@ script: # Lint the documentation - markdownlint ${TRAVIS_BUILD_DIR} -c ${TRAVIS_BUILD_DIR}/.github/markdownlint.yml # Run the pipeline with the test profile - - nextflow run ${TRAVIS_BUILD_DIR} -profile test,docker + - nextflow run ${TRAVIS_BUILD_DIR} -profile test,docker -ansi-log false -name {{ cookiecutter.short_name }}-${TRAVIS_EVENT_TYPE}-${TRAVIS_PULL_REQUEST}-${TRAVIS_COMMIT:0:6}-test-description diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CHANGELOG.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CHANGELOG.md index 334d08780d..fcbbfe48f9 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CHANGELOG.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CHANGELOG.md @@ -1,4 +1,16 @@ # {{ cookiecutter.name }}: Changelog +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + ## v{{ cookiecutter.version }} - [date] + Initial release of {{ cookiecutter.name }}, created with the [nf-core](http://nf-co.re/) template. + +### `Added` + +### `Fixed` + +### `Dependencies` + +### `Deprecated` diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CODE_OF_CONDUCT.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CODE_OF_CONDUCT.md index 1cda760094..cf930c8acf 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CODE_OF_CONDUCT.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CODE_OF_CONDUCT.md @@ -34,7 +34,7 @@ This Code of Conduct applies both within project spaces and in public spaces whe ## Enforcement -Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team on [Slack](https://nf-co.re/join/slack/). The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team on [Slack](https://nf-co.re/join/slack). The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/Dockerfile b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/Dockerfile index f57f757dae..69d24cdc31 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/Dockerfile +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/Dockerfile @@ -1,7 +1,13 @@ FROM nfcore/base:{{ 'dev' if 'dev' in cookiecutter.nf_core_version else cookiecutter.nf_core_version }} LABEL authors="{{ cookiecutter.author }}" \ - description="Docker image containing all requirements for {{ cookiecutter.name }} pipeline" + description="Docker image containing all software requirements for the {{ cookiecutter.name }} pipeline" +# Install the conda environment COPY environment.yml / RUN conda env create -f /environment.yml && conda clean -a + +# Add conda installation dir to PATH (instead of doing 'conda activate') ENV PATH /opt/conda/envs/{{ cookiecutter.name_noslash }}-{{ cookiecutter.version }}/bin:$PATH + +# Dump the details of the installed packages to a file for posterity +RUN conda env export --name {{ cookiecutter.name_noslash }}-{{ cookiecutter.version }} > {{ cookiecutter.name_noslash }}-{{ cookiecutter.version }}.yml diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md index b219a0c3a5..3fac6d7ea5 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md @@ -3,7 +3,9 @@ **{{ cookiecutter.description }}**. [![Build Status](https://travis-ci.com/{{ cookiecutter.name }}.svg?branch=master)](https://travis-ci.com/{{ cookiecutter.name }}) -[![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A50.32.0-brightgreen.svg)](https://www.nextflow.io/) +[![GitHub Actions CI Status](https://github.com/{{ cookiecutter.name }}/workflows/nf-core%20CI/badge.svg)](https://github.com/{{ cookiecutter.name }}/actions) +[![GitHub Actions Linting Status](https://github.com/{{ cookiecutter.name }}/workflows/nf-core%20linting/badge.svg)](https://github.com/{{ cookiecutter.name }}/actions) +[![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A519.10.0-brightgreen.svg)](https://www.nextflow.io/) [![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](http://bioconda.github.io/) [![Docker](https://img.shields.io/docker/automated/{{ cookiecutter.name_docker }}.svg)](https://hub.docker.com/r/{{ cookiecutter.name_docker }}) @@ -21,14 +23,17 @@ ii. Install one of [`docker`](https://docs.docker.com/engine/installation/), [`s iii. Download the pipeline and test it on a minimal dataset with a single command ```bash -nextflow run {{ cookiecutter.name }} -profile test, +nextflow run {{ cookiecutter.name }} -profile test, ``` +> Please check [nf-core/configs](https://github.com/nf-core/configs#documentation) to see if a custom config file to run nf-core pipelines already exists for your Institute. If so, you can simply use `-profile institute` in your command. This will enable either `docker` or `singularity` and set the appropriate execution settings for your local compute environment. + iv. Start running your own analysis! + ```bash -nextflow run {{ cookiecutter.name }} -profile --reads '*_R{1,2}.fastq.gz' --genome GRCh37 +nextflow run {{ cookiecutter.name }} -profile --reads '*_R{1,2}.fastq.gz' --genome GRCh37 ``` See [usage docs](docs/usage.md) for all of the available options when running the pipeline. @@ -56,7 +61,7 @@ The {{ cookiecutter.name }} pipeline comes with documentation about the pipeline If you would like to contribute to this pipeline, please see the [contributing guidelines](.github/CONTRIBUTING.md). -For further information or help, don't hesitate to get in touch on [Slack](https://nfcore.slack.com/channels/{{ cookiecutter.name }}) (you can join with [this invite](https://nf-co.re/join/slack)). +For further information or help, don't hesitate to get in touch on [Slack](https://nfcore.slack.com/channels/{{ cookiecutter.short_name }}) (you can join with [this invite](https://nf-co.re/join/slack)). ## Citation @@ -64,4 +69,5 @@ For further information or help, don't hesitate to get in touch on [Slack](https You can cite the `nf-core` pre-print as follows: -Ewels PA, Peltzer A, Fillinger S, Alneberg JA, Patel H, Wilm A, Garcia MU, Di Tommaso P, Nahnsen S. **nf-core: Community curated bioinformatics pipelines**. *bioRxiv*. 2019. p. 610741. [doi: 10.1101/610741](https://www.biorxiv.org/content/10.1101/610741v1). + +> Ewels PA, Peltzer A, Fillinger S, Alneberg JA, Patel H, Wilm A, Garcia MU, Di Tommaso P, Nahnsen S. **nf-core: Community curated bioinformatics pipelines**. *bioRxiv*. 2019. p. 610741. [doi: 10.1101/610741](https://www.biorxiv.org/content/10.1101/610741v1). diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py index c9d2361223..9f5a6a64a3 100755 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py @@ -28,7 +28,7 @@ results[k] = False # Remove software set to false in results -for k in results: +for k in list(results): if not results[k]: del(results[k]) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/awsbatch.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/awsbatch.config deleted file mode 100644 index 14af5866f5..0000000000 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/awsbatch.config +++ /dev/null @@ -1,18 +0,0 @@ -/* - * ------------------------------------------------- - * Nextflow config file for running on AWS batch - * ------------------------------------------------- - * Base config needed for running with -profile awsbatch - */ -params { - config_profile_name = 'AWSBATCH' - config_profile_description = 'AWSBATCH Cloud Profile' - config_profile_contact = 'Alexander Peltzer (@apeltzer)' - config_profile_url = 'https://aws.amazon.com/de/batch/' -} - -aws.region = params.awsregion -process.executor = 'awsbatch' -process.queue = params.awsqueue -executor.awscli = '/home/ec2-user/miniconda/bin/aws' -params.tracedir = './' diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/base.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/base.config index 3c5aac4633..2a2322c95d 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/base.config +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/base.config @@ -47,12 +47,5 @@ process { withName:get_software_versions { cache = false } -} - -params { - // Defaults only, expecting to be overwritten - max_memory = 128.GB - max_cpus = 16 - max_time = 240.h - igenomes_base = 's3://ngi-igenomes/igenomes/' + } diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/igenomes.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/igenomes.config index 392f250734..2de924228f 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/igenomes.config +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/igenomes.config @@ -9,184 +9,412 @@ params { // illumina iGenomes reference file paths - // TODO nf-core: Add new reference types and strip out those that are not needed genomes { 'GRCh37' { - bed12 = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/README.txt" + mito_name = "MT" + macs_gsize = "2.7e9" + blacklist = "${baseDir}/assets/blacklists/GRCh37-blacklist.bed" + } + 'GRCh38' { + fasta = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Annotation/Genes/genes.bed" + mito_name = "chrM" + macs_gsize = "2.7e9" + blacklist = "${baseDir}/assets/blacklists/hg38-blacklist.bed" } 'GRCm38' { - bed12 = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCh37/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCh37/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/README.txt" + mito_name = "MT" + macs_gsize = "1.87e9" + blacklist = "${baseDir}/assets/blacklists/GRCm38-blacklist.bed" } 'TAIR10' { - bed12 = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/README.txt" + mito_name = "Mt" } 'EB2' { - bed12 = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/README.txt" } 'UMD3.1' { - bed12 = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/BWAIndex/" - + fasta = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/README.txt" + mito_name = "MT" } 'WBcel235' { - bed12 = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.bed" + mito_name = "MtDNA" + macs_gsize = "9e7" } 'CanFam3.1' { - bed12 = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/README.txt" + mito_name = "MT" } 'GRCz10' { - bed12 = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.bed" + mito_name = "MT" } 'BDGP6' { - bed12 = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.bed" + mito_name = "M" + macs_gsize = "1.2e8" } 'EquCab2' { - bed12 = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/README.txt" + mito_name = "MT" } 'EB1' { - bed12 = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/README.txt" } 'Galgal4' { - bed12 = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.bed" + mito_name = "MT" } 'Gm01' { - bed12 = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/README.txt" } 'Mmul_1' { - bed12 = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/README.txt" + mito_name = "MT" } 'IRGSP-1.0' { - bed12 = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.bed" + mito_name = "Mt" } 'CHIMP2.1.4' { - bed12 = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/README.txt" + mito_name = "MT" } 'Rnor_6.0' { - bed12 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.bed" + mito_name = "MT" } 'R64-1-1' { - bed12 = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.bed" + mito_name = "MT" + macs_gsize = "1.2e7" } 'EF2' { - bed12 = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/README.txt" + mito_name = "MT" + macs_gsize = "1.21e7" } 'Sbi1' { - bed12 = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/README.txt" } 'Sscrofa10.2' { - bed12 = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/README.txt" + mito_name = "MT" } 'AGPv3' { - bed12 = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.bed" - fasta = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/WholeGenomeFasta/genome.fa" - gtf = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.gtf" - star = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/STARIndex/" - bowtie2 = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/Bowtie2Index/" - bwa = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/BWAIndex/" + fasta = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.bed" + mito_name = "Mt" + } + 'hg38' { + fasta = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Annotation/Genes/genes.bed" + mito_name = "chrM" + macs_gsize = "2.7e9" + blacklist = "${baseDir}/assets/blacklists/hg38-blacklist.bed" + } + 'hg19' { + fasta = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Annotation/README.txt" + mito_name = "chrM" + macs_gsize = "2.7e9" + blacklist = "${baseDir}/assets/blacklists/hg19-blacklist.bed" + } + 'mm10' { + fasta = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Annotation/README.txt" + mito_name = "chrM" + macs_gsize = "1.87e9" + blacklist = "${baseDir}/assets/blacklists/mm10-blacklist.bed" + } + 'bosTau8' { + fasta = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Annotation/Genes/genes.bed" + mito_name = "chrM" + } + 'ce10' { + fasta = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Annotation/README.txt" + mito_name = "chrM" + macs_gsize = "9e7" + } + 'canFam3' { + fasta = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Annotation/README.txt" + mito_name = "chrM" + } + 'danRer10' { + fasta = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Annotation/Genes/genes.bed" + mito_name = "chrM" + } + 'dm6' { + fasta = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Annotation/Genes/genes.bed" + mito_name = "chrM" + macs_gsize = "1.2e8" + } + 'equCab2' { + fasta = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Annotation/README.txt" + mito_name = "chrM" + } + 'galGal4' { + fasta = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Annotation/README.txt" + mito_name = "chrM" + } + 'panTro4' { + fasta = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Annotation/README.txt" + mito_name = "chrM" + } + 'rn6' { + fasta = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Annotation/Genes/genes.bed" + mito_name = "chrM" + } + 'sacCer3' { + fasta = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/BismarkIndex/" + readme = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Annotation/README.txt" + mito_name = "chrM" + macs_gsize = "1.2e7" + } + 'susScr3' { + fasta = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Annotation/README.txt" + mito_name = "chrM" } } } diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/test.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/test.config index 9ce6312ae8..70b44fa4dd 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/test.config +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/test.config @@ -4,7 +4,7 @@ * ------------------------------------------------- * Defines bundled input files and everything required * to run a fast and simple test. Use as follows: - * nextflow run {{ cookiecutter.name }} -profile test + * nextflow run {{ cookiecutter.name }} -profile test, */ params { @@ -14,11 +14,11 @@ params { max_cpus = 2 max_memory = 6.GB max_time = 48.h - + // Input data // TODO nf-core: Specify the paths to your test data on nf-core/test-datasets // TODO nf-core: Give any required params for the test so that command line flags are not needed - singleEnd = false + single_end = false readPaths = [ ['Testdata', ['https://github.com/nf-core/test-datasets/raw/exoseq/testdata/Testdata_R1.tiny.fastq.gz', 'https://github.com/nf-core/test-datasets/raw/exoseq/testdata/Testdata_R2.tiny.fastq.gz']], ['SRR389222', ['https://github.com/nf-core/test-datasets/raw/methylseq/testdata/SRR389222_sub1.fastq.gz', 'https://github.com/nf-core/test-datasets/raw/methylseq/testdata/SRR389222_sub2.fastq.gz']] diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/output.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/output.md index c9b9b9e4e1..f6bfa82bf7 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/output.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/output.md @@ -5,6 +5,7 @@ This document describes the output produced by the pipeline. Most of the plots a ## Pipeline overview + The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes data using the following steps: @@ -12,6 +13,7 @@ and processes data using the following steps: * [MultiQC](#multiqc) - aggregate report, describing results of the whole pipeline ## FastQC + [FastQC](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/) gives general quality metrics about your reads. It provides information about the quality score distribution across your reads, the per base sequence content (%T/A/G/C). You get information about adapter contamination and other overrepresented sequences. For further reading and documentation see the [FastQC help](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/). @@ -25,8 +27,8 @@ For further reading and documentation see the [FastQC help](http://www.bioinform * `zips/sample_fastqc.zip` * zip file containing the FastQC report, tab-delimited data file and plot images - ## MultiQC + [MultiQC](http://multiqc.info) is a visualisation tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in within the report data directory. The pipeline has special steps which allow the software versions used to be reported in the MultiQC output for future traceability. diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/usage.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/usage.md index cade6b7cea..727f8d5660 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/usage.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/usage.md @@ -2,8 +2,6 @@ ## Table of contents - - * [Table of contents](#table-of-contents) * [Introduction](#introduction) * [Running the pipeline](#running-the-pipeline) @@ -12,21 +10,23 @@ * [Main arguments](#main-arguments) * [`-profile`](#-profile) * [`--reads`](#--reads) - * [`--singleEnd`](#--singleend) + * [`--single_end`](#--single_end) * [Reference genomes](#reference-genomes) * [`--genome` (using iGenomes)](#--genome-using-igenomes) * [`--fasta`](#--fasta) - * [`--igenomesIgnore`](#--igenomesignore) + * [`--igenomes_ignore`](#--igenomes_ignore) * [Job resources](#job-resources) * [Automatic resubmission](#automatic-resubmission) * [Custom resource requests](#custom-resource-requests) * [AWS Batch specific parameters](#aws-batch-specific-parameters) * [`--awsqueue`](#--awsqueue) * [`--awsregion`](#--awsregion) + * [`--awscli`](#--awscli) * [Other command line parameters](#other-command-line-parameters) * [`--outdir`](#--outdir) * [`--email`](#--email) * [`--email_on_fail`](#--email_on_fail) + * [`--max_multiqc_email_size`](#--max_multiqc_email_size) * [`-name`](#-name) * [`-resume`](#-resume) * [`-c`](#-c) @@ -38,10 +38,9 @@ * [`--plaintext_email`](#--plaintext_email) * [`--monochrome_logs`](#--monochrome_logs) * [`--multiqc_config`](#--multiqc_config) - - ## Introduction + Nextflow handles job submissions on SLURM or other environments, and supervises running the jobs. Thus the Nextflow process must run until the pipeline is finished. We recommend that you put the process running in the background through `screen` / `tmux` or similar tool. Alternatively you can run nextflow within a cluster job submitted your job scheduler. It is recommended to limit the Nextflow Java virtual machines memory. We recommend adding the following line to your environment (typically in `~/.bashrc` or `~./bash_profile`): @@ -53,6 +52,7 @@ NXF_OPTS='-Xms1g -Xmx4g' ## Running the pipeline + The typical command for running the pipeline is as follows: ```bash @@ -71,6 +71,7 @@ results # Finished results (configurable, see below) ``` ### Updating the pipeline + When you run the above command, Nextflow automatically pulls the pipeline code from GitHub and stores it as a cached version. When running the pipeline after this, it will always use the cached version if available - even if the pipeline has been updated since. To make sure that you're running the latest version of the pipeline, make sure that you regularly update the cached version of the pipeline: ```bash @@ -78,22 +79,28 @@ nextflow pull {{ cookiecutter.name }} ``` ### Reproducibility + It's a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. First, go to the [{{ cookiecutter.name }} releases page](https://github.com/{{ cookiecutter.name }}/releases) and find the latest version number - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. - ## Main arguments ### `-profile` -Use this parameter to choose a configuration profile. Profiles can give configuration presets for different compute environments. Note that multiple profiles can be loaded, for example: `-profile docker` - the order of arguments is important! -If `-profile` is not specified at all the pipeline will be run locally and expects all software to be installed and available on the `PATH`. +Use this parameter to choose a configuration profile. Profiles can give configuration presets for different compute environments. + +Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Conda) - see below. + +The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to see if your system is available in these configs please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). + +Note that multiple profiles can be loaded, for example: `-profile test,docker` - the order of arguments is important! +They are loaded in sequence, so later profiles can overwrite earlier profiles. + +If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended. -* `awsbatch` - * A generic configuration profile to be used with AWS Batch. * `conda` * A generic configuration profile to be used with [conda](https://conda.io/docs/) * Pulls most software from [Bioconda](https://bioconda.github.io/) @@ -110,6 +117,7 @@ If `-profile` is not specified at all the pipeline will be run locally and expec ### `--reads` + Use this to specify the location of your input FastQ files. For example: ```bash @@ -124,21 +132,22 @@ Please note the following requirements: If left unspecified, a default pattern is used: `data/*{1,2}.fastq.gz` -### `--singleEnd` -By default, the pipeline expects paired-end data. If you have single-end data, you need to specify `--singleEnd` on the command line when you launch the pipeline. A normal glob pattern, enclosed in quotation marks, can then be used for `--reads`. For example: +### `--single_end` + +By default, the pipeline expects paired-end data. If you have single-end data, you need to specify `--single_end` on the command line when you launch the pipeline. A normal glob pattern, enclosed in quotation marks, can then be used for `--reads`. For example: ```bash ---singleEnd --reads '*.fastq' +--single_end --reads '*.fastq' ``` It is not possible to run a mixture of single-end and paired-end files in one run. - ## Reference genomes The pipeline config files come bundled with paths to the illumina iGenomes reference index files. If running with docker or AWS, the configuration is set up to use the [AWS-iGenomes](https://ewels.github.io/AWS-iGenomes/) resource. ### `--genome` (using iGenomes) + There are 31 different species supported in the iGenomes references. To run the pipeline, you must specify which to use with the `--genome` flag. You can find the keys to specify the genomes in the [iGenomes config file](../conf/igenomes.config). Common genomes that are supported are: @@ -172,33 +181,48 @@ params { ``` + ### `--fasta` + If you prefer, you can specify the full path to your reference genome when you run the pipeline: ```bash --fasta '[path to Fasta reference]' ``` -### `--igenomesIgnore` +### `--igenomes_ignore` + Do not load `igenomes.config` when running the pipeline. You may choose this option if you observe clashes between custom parameters and those supplied in `igenomes.config`. ## Job resources + ### Automatic resubmission + Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the steps in the pipeline, if the job exits with an error code of `143` (exceeded requested resources) it will automatically resubmit with higher requests (2 x original, then 3 x original). If it still fails after three times then the pipeline is stopped. ### Custom resource requests + Wherever process-specific requirements are set in the pipeline, the default value can be changed by creating a custom config file. See the files hosted at [`nf-core/configs`](https://github.com/nf-core/configs/tree/master/conf) for examples. If you are likely to be running `nf-core` pipelines regularly it may be a good idea to request that your custom config file is uploaded to the `nf-core/configs` git repository. Before you do this please can you test that the config file works with your pipeline of choice using the `-c` parameter (see definition below). You can then create a pull request to the `nf-core/configs` repository with the addition of your config file, associated documentation file (see examples in [`nf-core/configs/docs`](https://github.com/nf-core/configs/tree/master/docs)), and amending [`nfcore_custom.config`](https://github.com/nf-core/configs/blob/master/nfcore_custom.config) to include your custom profile. -If you have any questions or issues please send us a message on [Slack](https://nf-co.re/join/slack/). +If you have any questions or issues please send us a message on [Slack](https://nf-co.re/join/slack). ## AWS Batch specific parameters -Running the pipeline on AWS Batch requires a couple of specific parameters to be set according to your AWS Batch configuration. Please use the `-awsbatch` profile and then specify all of the following parameters. + +Running the pipeline on AWS Batch requires a couple of specific parameters to be set according to your AWS Batch configuration. Please use [`-profile awsbatch`](https://github.com/nf-core/configs/blob/master/conf/awsbatch.config) and then specify all of the following parameters. + ### `--awsqueue` + The JobQueue that you intend to use on AWS Batch. + ### `--awsregion` -The AWS region to run your job in. Default is set to `eu-west-1` but can be adjusted to your needs. + +The AWS region in which to run your job. Default is set to `eu-west-1` but can be adjusted to your needs. + +### `--awscli` + +The [AWS CLI](https://www.nextflow.io/docs/latest/awscloud.html#aws-cli-installation) path in your custom AMI. Default: `/home/ec2-user/miniconda/bin/aws`. Please make sure to also set the `-w/--work-dir` and `--outdir` parameters to a S3 storage bucket of your choice - you'll get an error message notifying you if you didn't. @@ -207,15 +231,23 @@ Please make sure to also set the `-w/--work-dir` and `--outdir` parameters to a ### `--outdir` + The output directory where the results will be saved. ### `--email` + Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits. If set in your user config file (`~/.nextflow/config`) then you don't need to specify this on the command line for every run. ### `--email_on_fail` + This works exactly as with `--email`, except emails are only sent if the workflow is not successful. +### `--max_multiqc_email_size` + +Threshold size for MultiQC report to be attached in notification email. If file generated by pipeline exceeds the threshold, it will not be attached (Default: 25MB). + ### `-name` + Name for the pipeline run. If not specified, Nextflow will automatically generate a random mnemonic. This is used in the MultiQC report (if not default) and in the summary HTML / e-mail (always). @@ -223,6 +255,7 @@ This is used in the MultiQC report (if not default) and in the summary HTML / e- **NB:** Single hyphen (core Nextflow option) ### `-resume` + Specify this when restarting a pipeline. Nextflow will used cached results from any pipeline steps where the inputs are the same, continuing from where it got to previously. You can also supply a run name to resume a specific run: `-resume [run-name]`. Use the `nextflow log` command to show previous run names. @@ -230,6 +263,7 @@ You can also supply a run name to resume a specific run: `-resume [run-name]`. U **NB:** Single hyphen (core Nextflow option) ### `-c` + Specify the path to a specific config file (this is a core NextFlow command). **NB:** Single hyphen (core Nextflow option) @@ -237,7 +271,8 @@ Specify the path to a specific config file (this is a core NextFlow command). Note - you can use this to override pipeline defaults. ### `--custom_config_version` -Provide git commit id for custom Institutional configs hosted at `nf-core/configs`. This was implemented for reproducibility purposes. Default is set to `master`. + +Provide git commit id for custom Institutional configs hosted at `nf-core/configs`. This was implemented for reproducibility purposes. Default: `master`. ```bash ## Download and use config file with following git commid id @@ -245,6 +280,7 @@ Provide git commit id for custom Institutional configs hosted at `nf-core/config ``` ### `--custom_config_base` + If you're running offline, nextflow will not be able to fetch the institutional config files from the internet. If you don't need them, then this is not a problem. If you do need them, you should download the files from the repo and tell nextflow where to find them with the @@ -265,22 +301,28 @@ nextflow run /path/to/pipeline/ --custom_config_base /path/to/my/configs/configs > files + singularity containers + institutional configs in one go for you, to make this process easier. ### `--max_memory` + Use to set a top-limit for the default memory requirement for each process. Should be a string in the format integer-unit. eg. `--max_memory '8.GB'` ### `--max_time` + Use to set a top-limit for the default time requirement for each process. Should be a string in the format integer-unit. eg. `--max_time '2.h'` ### `--max_cpus` + Use to set a top-limit for the default CPU requirement for each process. Should be a string in the format integer-unit. eg. `--max_cpus 1` ### `--plaintext_email` + Set to receive plain-text e-mails instead of HTML formatted. ### `--monochrome_logs` + Set to disable colourful command line output and live life in monochrome. ### `--multiqc_config` + Specify a path to a custom MultiQC configuration file. diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml index 537942ae77..df25d32c61 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml @@ -6,6 +6,7 @@ channels: - bioconda - defaults dependencies: + - conda-forge::python=3.7.3 # TODO nf-core: Add required software dependencies here - bioconda::fastqc=0.11.8 - bioconda::multiqc=1.7 diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index 29baacd38d..26eb14f234 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -21,27 +21,28 @@ def helpMessage() { nextflow run {{ cookiecutter.name }} --reads '*_R{1,2}.fastq.gz' -profile docker Mandatory arguments: - --reads Path to input data (must be surrounded with quotes) - -profile Configuration profile to use. Can use multiple (comma separated) - Available: conda, docker, singularity, awsbatch, test and more. + --reads [file] Path to input data (must be surrounded with quotes) + -profile [str] Configuration profile to use. Can use multiple (comma separated) + Available: conda, docker, singularity, test, awsbatch and more Options: - --genome Name of iGenomes reference - --singleEnd Specifies that the input is single end reads + --genome [str] Name of iGenomes reference + --single_end [bool] Specifies that the input is single-end reads - References If not specified in the configuration file or you wish to overwrite any of the references. - --fasta Path to Fasta reference + References If not specified in the configuration file or you wish to overwrite any of the references + --fasta [file] Path to fasta reference Other options: - --outdir The output directory where the results will be saved - --email Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits - --email_on_fail Same as --email, except only send mail if the workflow is not successful - --maxMultiqcEmailFileSize Theshold size for MultiQC report to be attached in notification email. If file generated by pipeline exceeds the threshold, it will not be attached (Default: 25MB) - -name Name for the pipeline run. If not specified, Nextflow will automatically generate a random mnemonic. + --outdir [file] The output directory where the results will be saved + --email [email] Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits + --email_on_fail [email] Same as --email, except only send mail if the workflow is not successful + --max_multiqc_email_size [str] Theshold size for MultiQC report to be attached in notification email. If file generated by pipeline exceeds the threshold, it will not be attached (Default: 25MB) + -name [str] Name for the pipeline run. If not specified, Nextflow will automatically generate a random mnemonic AWSBatch options: - --awsqueue The AWSBatch JobQueue that needs to be set when running on AWSBatch - --awsregion The AWS Region for your AWS Batch job to run on + --awsqueue [str] The AWSBatch JobQueue that needs to be set when running on AWSBatch + --awsregion [str] The AWS Region for your AWS Batch job to run on + --awscli [str] Path to the AWS CLI tool """.stripIndent() } @@ -75,17 +76,17 @@ if (params.fasta) { ch_fasta = file(params.fasta, checkIfExists: true) } // this has the bonus effect of catching both -name and --name custom_runName = params.name if (!(workflow.runName ==~ /[a-z]+_[a-z]+/)) { - custom_runName = workflow.runName + custom_runName = workflow.runName } -if ( workflow.profile == 'awsbatch') { - // AWSBatch sanity checking - if (!params.awsqueue || !params.awsregion) exit 1, "Specify correct --awsqueue and --awsregion parameters on AWSBatch!" - // Check outdir paths to be S3 buckets if running on AWSBatch - // related: https://github.com/nextflow-io/nextflow/issues/813 - if (!params.outdir.startsWith('s3:')) exit 1, "Outdir not on S3 - specify S3 Bucket to run on AWSBatch!" - // Prevent trace files to be stored on S3 since S3 does not support rolling files. - if (workflow.tracedir.startsWith('s3:')) exit 1, "Specify a local tracedir or run without trace! S3 cannot be used for tracefiles." +if (workflow.profile.contains('awsbatch')) { + // AWSBatch sanity checking + if (!params.awsqueue || !params.awsregion) exit 1, "Specify correct --awsqueue and --awsregion parameters on AWSBatch!" + // Check outdir paths to be S3 buckets if running on AWSBatch + // related: https://github.com/nextflow-io/nextflow/issues/813 + if (!params.outdir.startsWith('s3:')) exit 1, "Outdir not on S3 - specify S3 Bucket to run on AWSBatch!" + // Prevent trace files to be stored on S3 since S3 does not support rolling files. + if (params.tracedir.startsWith('s3:')) exit 1, "Specify a local tracedir or run without trace! S3 cannot be used for tracefiles." } // Stage config files @@ -96,24 +97,24 @@ ch_output_docs = file("$baseDir/docs/output.md", checkIfExists: true) * Create a channel for input read files */ if (params.readPaths) { - if (params.singleEnd) { + if (params.single_end) { Channel .from(params.readPaths) .map { row -> [ row[0], [ file(row[1][0], checkIfExists: true) ] ] } .ifEmpty { exit 1, "params.readPaths was empty - no input files supplied" } - .into { read_files_fastqc; read_files_trimming } + .into { ch_read_files_fastqc; ch_read_files_trimming } } else { Channel .from(params.readPaths) .map { row -> [ row[0], [ file(row[1][0], checkIfExists: true), file(row[1][1], checkIfExists: true) ] ] } .ifEmpty { exit 1, "params.readPaths was empty - no input files supplied" } - .into { read_files_fastqc; read_files_trimming } + .into { ch_read_files_fastqc; ch_read_files_trimming } } } else { Channel - .fromFilePairs( params.reads, size: params.singleEnd ? 1 : 2 ) - .ifEmpty { exit 1, "Cannot find any reads matching: ${params.reads}\nNB: Path needs to be enclosed in quotes!\nIf this is single-end data, please specify --singleEnd on the command line." } - .into { read_files_fastqc; read_files_trimming } + .fromFilePairs(params.reads, size: params.single_end ? 1 : 2) + .ifEmpty { exit 1, "Cannot find any reads matching: ${params.reads}\nNB: Path needs to be enclosed in quotes!\nIf this is single-end data, please specify --single_end on the command line." } + .into { ch_read_files_fastqc; ch_read_files_trimming } } // Header log info @@ -124,7 +125,7 @@ summary['Run Name'] = custom_runName ?: workflow.runName // TODO nf-core: Report custom parameters here summary['Reads'] = params.reads summary['Fasta Ref'] = params.fasta -summary['Data Type'] = params.singleEnd ? 'Single-End' : 'Paired-End' +summary['Data Type'] = params.single_end ? 'Single-End' : 'Paired-End' summary['Max Resources'] = "$params.max_memory memory, $params.max_cpus cpus, $params.max_time time per job" if (workflow.containerEngine) summary['Container'] = "$workflow.containerEngine - $workflow.container" summary['Output dir'] = params.outdir @@ -132,18 +133,19 @@ summary['Launch dir'] = workflow.launchDir summary['Working dir'] = workflow.workDir summary['Script dir'] = workflow.projectDir summary['User'] = workflow.userName -if (workflow.profile == 'awsbatch') { - summary['AWS Region'] = params.awsregion - summary['AWS Queue'] = params.awsqueue +if (workflow.profile.contains('awsbatch')) { + summary['AWS Region'] = params.awsregion + summary['AWS Queue'] = params.awsqueue + summary['AWS CLI'] = params.awscli } summary['Config Profile'] = workflow.profile if (params.config_profile_description) summary['Config Description'] = params.config_profile_description if (params.config_profile_contact) summary['Config Contact'] = params.config_profile_contact if (params.config_profile_url) summary['Config URL'] = params.config_profile_url if (params.email || params.email_on_fail) { - summary['E-mail Address'] = params.email - summary['E-mail on failure'] = params.email_on_fail - summary['MultiQC maxsize'] = params.maxMultiqcEmailFileSize + summary['E-mail Address'] = params.email + summary['E-mail on failure'] = params.email_on_fail + summary['MultiQC maxsize'] = params.max_multiqc_email_size } log.info summary.collect { k,v -> "${k.padRight(18)}: $v" }.join("\n") log.info "-\033[2m--------------------------------------------------\033[0m-" @@ -174,12 +176,12 @@ ${summary.collect { k,v -> "
$k
${v ?: ' - if (filename.indexOf(".csv") > 0) filename - else null - } + if (filename.indexOf(".csv") > 0) filename + else null + } output: - file 'software_versions_mqc.yaml' into software_versions_yaml + file 'software_versions_mqc.yaml' into ch_software_versions_yaml file "software_versions.csv" script: @@ -200,13 +202,15 @@ process fastqc { tag "$name" label 'process_medium' publishDir "${params.outdir}/fastqc", mode: 'copy', - saveAs: { filename -> filename.indexOf(".zip") > 0 ? "zips/$filename" : "$filename" } + saveAs: { filename -> + filename.indexOf(".zip") > 0 ? "zips/$filename" : "$filename" + } input: - set val(name), file(reads) from read_files_fastqc + set val(name), file(reads) from ch_read_files_fastqc output: - file "*_fastqc.{zip,html}" into fastqc_results + file "*_fastqc.{zip,html}" into ch_fastqc_results script: """ @@ -223,12 +227,12 @@ process multiqc { input: file multiqc_config from ch_multiqc_config // TODO nf-core: Add in log files from your new processes for MultiQC to find! - file ('fastqc/*') from fastqc_results.collect().ifEmpty([]) - file ('software_versions/*') from software_versions_yaml.collect() + file ('fastqc/*') from ch_fastqc_results.collect().ifEmpty([]) + file ('software_versions/*') from ch_software_versions_yaml.collect() file workflow_summary from create_workflow_summary(summary) output: - file "*multiqc_report.html" into multiqc_report + file "*multiqc_report.html" into ch_multiqc_report file "*_data" file "multiqc_plots" @@ -267,7 +271,7 @@ workflow.onComplete { // Set up the e-mail variables def subject = "[{{ cookiecutter.name }}] Successful: $workflow.runName" if (!workflow.success) { - subject = "[{{ cookiecutter.name }}] FAILED: $workflow.runName" + subject = "[{{ cookiecutter.name }}] FAILED: $workflow.runName" } def email_fields = [:] email_fields['version'] = workflow.manifest.version @@ -288,17 +292,16 @@ workflow.onComplete { if (workflow.repository) email_fields['summary']['Pipeline repository Git URL'] = workflow.repository if (workflow.commitId) email_fields['summary']['Pipeline repository Git Commit'] = workflow.commitId if (workflow.revision) email_fields['summary']['Pipeline Git branch/tag'] = workflow.revision - if (workflow.container) email_fields['summary']['Docker image'] = workflow.container email_fields['summary']['Nextflow Version'] = workflow.nextflow.version email_fields['summary']['Nextflow Build'] = workflow.nextflow.build email_fields['summary']['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp - // TODO nf-core: If not using MultiQC, strip out this code (including params.maxMultiqcEmailFileSize) + // TODO nf-core: If not using MultiQC, strip out this code (including params.max_multiqc_email_size) // On success try attach the multiqc report def mqc_report = null try { if (workflow.success) { - mqc_report = multiqc_report.getVal() + mqc_report = ch_multiqc_report.getVal() if (mqc_report.getClass() == ArrayList) { log.warn "[{{ cookiecutter.name }}] Found multiple reports from process 'multiqc', will use only one" mqc_report = mqc_report[0] @@ -326,7 +329,7 @@ workflow.onComplete { def email_html = html_template.toString() // Render the sendmail template - def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, baseDir: "$baseDir", mqcFile: mqc_report, mqcMaxSize: params.maxMultiqcEmailFileSize.toBytes() ] + def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, baseDir: "$baseDir", mqcFile: mqc_report, mqcMaxSize: params.max_multiqc_email_size.toBytes() ] def sf = new File("$baseDir/assets/sendmail_template.txt") def sendmail_template = engine.createTemplate(sf).make(smail_fields) def sendmail_html = sendmail_template.toString() @@ -334,59 +337,59 @@ workflow.onComplete { // Send the HTML e-mail if (email_address) { try { - if ( params.plaintext_email ){ throw GroovyException('Send plaintext e-mail, not HTML') } - // Try to send HTML e-mail using sendmail - [ 'sendmail', '-t' ].execute() << sendmail_html - log.info "[{{ cookiecutter.name }}] Sent summary e-mail to $email_address (sendmail)" + if (params.plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') } + // Try to send HTML e-mail using sendmail + [ 'sendmail', '-t' ].execute() << sendmail_html + log.info "[{{ cookiecutter.name }}] Sent summary e-mail to $email_address (sendmail)" } catch (all) { - // Catch failures and try with plaintext - [ 'mail', '-s', subject, email_address ].execute() << email_txt - log.info "[{{ cookiecutter.name }}] Sent summary e-mail to $email_address (mail)" + // Catch failures and try with plaintext + [ 'mail', '-s', subject, email_address ].execute() << email_txt + log.info "[{{ cookiecutter.name }}] Sent summary e-mail to $email_address (mail)" } } // Write summary e-mail HTML to a file - def output_d = new File( "${params.outdir}/pipeline_info/" ) + def output_d = new File("${params.outdir}/pipeline_info/") if (!output_d.exists()) { - output_d.mkdirs() + output_d.mkdirs() } - def output_hf = new File( output_d, "pipeline_report.html" ) + def output_hf = new File(output_d, "pipeline_report.html") output_hf.withWriter { w -> w << email_html } - def output_tf = new File( output_d, "pipeline_report.txt" ) + def output_tf = new File(output_d, "pipeline_report.txt") output_tf.withWriter { w -> w << email_txt } - c_reset = params.monochrome_logs ? '' : "\033[0m"; - c_purple = params.monochrome_logs ? '' : "\033[0;35m"; c_green = params.monochrome_logs ? '' : "\033[0;32m"; + c_purple = params.monochrome_logs ? '' : "\033[0;35m"; c_red = params.monochrome_logs ? '' : "\033[0;31m"; + c_reset = params.monochrome_logs ? '' : "\033[0m"; if (workflow.stats.ignoredCount > 0 && workflow.success) { - log.info "${c_purple}Warning, pipeline completed, but with errored process(es) ${c_reset}" - log.info "${c_red}Number of ignored errored process(es) : ${workflow.stats.ignoredCount} ${c_reset}" - log.info "${c_green}Number of successfully ran process(es) : ${workflow.stats.succeedCount} ${c_reset}" + log.info "-${c_purple}Warning, pipeline completed, but with errored process(es) ${c_reset}-" + log.info "-${c_red}Number of ignored errored process(es) : ${workflow.stats.ignoredCount} ${c_reset}-" + log.info "-${c_green}Number of successfully ran process(es) : ${workflow.stats.succeedCount} ${c_reset}-" } if (workflow.success) { - log.info "${c_purple}[{{ cookiecutter.name }}]${c_green} Pipeline completed successfully${c_reset}" + log.info "-${c_purple}[{{ cookiecutter.name }}]${c_green} Pipeline completed successfully${c_reset}-" } else { checkHostname() - log.info "${c_purple}[{{ cookiecutter.name }}]${c_red} Pipeline completed with errors${c_reset}" + log.info "-${c_purple}[{{ cookiecutter.name }}]${c_red} Pipeline completed with errors${c_reset}-" } } -def nfcoreHeader(){ +def nfcoreHeader() { // Log colors ANSI codes - c_reset = params.monochrome_logs ? '' : "\033[0m"; - c_dim = params.monochrome_logs ? '' : "\033[2m"; c_black = params.monochrome_logs ? '' : "\033[0;30m"; - c_green = params.monochrome_logs ? '' : "\033[0;32m"; - c_yellow = params.monochrome_logs ? '' : "\033[0;33m"; c_blue = params.monochrome_logs ? '' : "\033[0;34m"; - c_purple = params.monochrome_logs ? '' : "\033[0;35m"; c_cyan = params.monochrome_logs ? '' : "\033[0;36m"; + c_dim = params.monochrome_logs ? '' : "\033[2m"; + c_green = params.monochrome_logs ? '' : "\033[0;32m"; + c_purple = params.monochrome_logs ? '' : "\033[0;35m"; + c_reset = params.monochrome_logs ? '' : "\033[0m"; c_white = params.monochrome_logs ? '' : "\033[0;37m"; + c_yellow = params.monochrome_logs ? '' : "\033[0;33m"; return """ -${c_dim}--------------------------------------------------${c_reset}- ${c_green},--.${c_black}/${c_green},-.${c_reset} @@ -399,7 +402,7 @@ def nfcoreHeader(){ """.stripIndent() } -def checkHostname(){ +def checkHostname() { def c_reset = params.monochrome_logs ? '' : "\033[0m" def c_white = params.monochrome_logs ? '' : "\033[0;37m" def c_red = params.monochrome_logs ? '' : "\033[1;91m" diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config index 6ce293dab4..7e2d548ded 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config @@ -12,7 +12,7 @@ params { // TODO nf-core: Specify your pipeline's command line flags genome = false reads = "data/*{1,2}.fastq.gz" - singleEnd = false + single_end = false outdir = './results' // Boilerplate options @@ -20,21 +20,25 @@ params { multiqc_config = "$baseDir/assets/multiqc_config.yaml" email = false email_on_fail = false - maxMultiqcEmailFileSize = 25.MB + max_multiqc_email_size = 25.MB plaintext_email = false monochrome_logs = false help = false - igenomes_base = "./iGenomes" + igenomes_base = 's3://ngi-igenomes/igenomes/' tracedir = "${params.outdir}/pipeline_info" - awsqueue = false - awsregion = 'eu-west-1' - igenomesIgnore = false + igenomes_ignore = false custom_config_version = 'master' custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" hostnames = false config_profile_description = false config_profile_contact = false config_profile_url = false + + // Defaults only, expecting to be overwritten + max_memory = 128.GB + max_cpus = 16 + max_time = 240.h + } // Container slug. Stable releases should specify release tag! @@ -52,24 +56,33 @@ try { } profiles { - awsbatch { includeConfig 'conf/awsbatch.config' } conda { process.conda = "$baseDir/environment.yml" } debug { process.beforeScript = 'echo $HOSTNAME' } - docker { docker.enabled = true } - singularity { singularity.enabled = true } + docker { + docker.enabled = true + // Avoid this error: + // WARNING: Your kernel does not support swap limit capabilities or the cgroup is not mounted. Memory limited without swap. + // Testing this in nf-core after discussion here https://github.com/nf-core/tools/pull/351 + // once this is established and works well, nextflow might implement this behavior as new default. + docker.runOptions = '-u \$(id -u):\$(id -g)' + } + singularity { + singularity.enabled = true + singularity.autoMounts = true + } test { includeConfig 'conf/test.config' } } -// Avoid this error: -// WARNING: Your kernel does not support swap limit capabilities or the cgroup is not mounted. Memory limited without swap. -// Testing this in nf-core after discussion here https://github.com/nf-core/tools/pull/351, once this is established and works well, nextflow might implement this behavior as new default. -docker.runOptions = '-u \$(id -u):\$(id -g)' - // Load igenomes.config if required -if (!params.igenomesIgnore) { +if (!params.igenomes_ignore) { includeConfig 'conf/igenomes.config' } +// Export this variable to prevent local Python libraries from conflicting with those in the container +env { + PYTHONNOUSERSITE = 1 +} + // Capture exit codes from upstream processes when piping process.shell = ['/bin/bash', '-euo', 'pipefail'] @@ -96,7 +109,7 @@ manifest { homePage = 'https://github.com/{{ cookiecutter.name }}' description = '{{ cookiecutter.description }}' mainScript = 'main.nf' - nextflowVersion = '>=0.32.0' + nextflowVersion = '>=19.10.0' version = '{{ cookiecutter.version }}' } diff --git a/nf_core/sync.py b/nf_core/sync.py new file mode 100644 index 0000000000..fdef6d15cf --- /dev/null +++ b/nf_core/sync.py @@ -0,0 +1,418 @@ +#!/usr/bin/env python +"""Synchronise a pipeline TEMPLATE branch with the template. +""" + +import git +import json +import logging +import nf_core +import os +import re +import requests +import shutil +import sys +import tempfile + +class SyncException(Exception): + """Exception raised when there was an error with TEMPLATE branch synchronisation + """ + pass + +class PullRequestException(Exception): + """Exception raised when there was an error creating a Pull-Request on GitHub.com + """ + pass + +class PipelineSync(object): + """Object to hold syncing information and results. + + Args: + pipeline_dir (str): The path to the Nextflow pipeline root directory + make_template_branch (bool): Set this to `True` to create a `TEMPLATE` branch if it is not found + from_branch (str): The branch to use to fetch config vars. If not set, will use current active branch + make_pr (bool): Set this to `True` to create a GitHub pull-request with the changes + gh_username (str): GitHub username + gh_repo (str): GitHub repository name + gh_auth_token (str): Authorisation token used to make PR with GitHub API + + Attributes: + pipeline_dir (str): Path to target pipeline directory + from_branch (str): Repo branch to use when collecting workflow variables. Default: active branch. + make_template_branch (bool): Whether to try to create TEMPLATE branch if not found + orphan_branch (bool): Whether an orphan branch was made when creating TEMPLATE + made_changes (bool): Whether making the new template pipeline introduced any changes + make_pr (bool): Whether to try to automatically make a PR on GitHub.com + required_config_vars (list): List of nextflow variables required to make template pipeline + gh_username (str): GitHub username + gh_repo (str): GitHub repository name + gh_auth_token (str): Authorisation token used to make PR with GitHub API + """ + + def __init__(self, pipeline_dir, make_template_branch=False, from_branch=None, make_pr=False, + gh_username=None, gh_repo=None, gh_auth_token=None): + """ Initialise syncing object """ + + self.pipeline_dir = os.path.abspath(pipeline_dir) + self.from_branch = from_branch + self.make_template_branch = make_template_branch + self.orphan_branch = False + self.made_changes = False + self.make_pr = make_pr + self.required_config_vars = [ + 'manifest.name', + 'manifest.description', + 'manifest.version', + 'manifest.author' + ] + + self.gh_username = gh_username + self.gh_repo = gh_repo + self.gh_auth_token = gh_auth_token + if self.gh_auth_token is None: + try: + self.gh_auth_token = os.environ['NF_CORE_BOT'] + except KeyError: + pass + + def sync(self): + """ Find workflow attributes, create a new template pipeline on TEMPLATE + """ + + config_log_msg = "Pipeline directory: {}".format(self.pipeline_dir) + if self.from_branch: + config_log_msg += "\n Using branch `{}` to fetch workflow variables".format(self.from_branch) + if self.make_template_branch: + config_log_msg += "\n Will attempt to create `TEMPLATE` branch if not found" + if self.make_pr: + config_log_msg += "\n Will attempt to automatically create a pull request on GitHub.com" + logging.info(config_log_msg) + + self.inspect_sync_dir() + + self.get_wf_config() + + self.checkout_template_branch() + + self.make_template_pipeline() + + self.commit_template_changes() + + # Push and make a pull request if we've been asked to + if self.make_pr: + try: + self.push_template_branch() + self.make_pull_request() + except PullRequestException as e: + # Keep going - we want to clean up the target directory still + logging.error(e) + + self.reset_target_dir() + + if not self.make_pr: + self.git_merge_help() + + + def inspect_sync_dir(self): + """Takes a look at the target directory for syncing. Checks that it's a git repo + and makes sure that there are no uncommitted changes. + """ + # Check that the pipeline_dir is a git repo + try: + self.repo = git.Repo(self.pipeline_dir) + except git.exc.InvalidGitRepositoryError as e: + raise SyncException("'{}' does not appear to be a git repository".format(self.pipeline_dir)) + + # get current branch so we can switch back later + self.original_branch = self.repo.active_branch.name + logging.debug("Original pipeline repository branch is '{}'".format(self.original_branch)) + + # Check to see if there are uncommitted changes on current branch + if self.repo.is_dirty(untracked_files=True): + raise SyncException("Uncommitted changes found in pipeline directory!\nPlease commit these before running nf-core sync") + + def get_wf_config(self): + """Check out the target branch if requested and fetch the nextflow config. + Check that we have the required config variables. + """ + # Try to check out target branch (eg. `origin/dev`) + try: + if self.from_branch and self.repo.active_branch.name != self.from_branch: + logging.info("Checking out workflow branch '{}'".format(self.from_branch)) + self.repo.git.checkout(self.from_branch) + except git.exc.GitCommandError: + raise SyncException("Branch `{}` not found!".format(self.from_branch)) + + # If not specified, get the name of the active branch + if not self.from_branch: + try: + self.from_branch = self.repo.active_branch.name + except git.exc.GitCommandError as e: + logging.error("Could not find active repo branch: ".format(e)) + + # Figure out the GitHub username and repo name from the 'origin' remote if we can + try: + gh_ssh_username_match = re.search(r'git@github\.com:([^\/]+)/([^\/]+)\.git$', self.repo.remotes.origin.url) + if gh_ssh_username_match: + self.gh_username = gh_ssh_username_match.group(1) + self.gh_repo = gh_ssh_username_match.group(2) + gh_url_username_match = re.search(r'https://github\.com/([^\/]+)/([^\/]+)\.git$', self.repo.remotes.origin.url) + if gh_url_username_match: + self.gh_username = gh_url_username_match.group(1) + self.gh_repo = gh_url_username_match.group(2) + except AttributeError as e: + logging.debug("Could not find repository URL for remote called 'origin'") + + # Fetch workflow variables + logging.info("Fetching workflow config variables") + self.wf_config = nf_core.utils.fetch_wf_config(self.pipeline_dir) + + # Check that we have the required variables + for rvar in self.required_config_vars: + if rvar not in self.wf_config: + raise SyncException("Workflow config variable `{}` not found!".format(rvar)) + + def checkout_template_branch(self): + """Try to check out the TEMPLATE branch. If it fails, try origin/TEMPLATE. + If it still fails and --make-template-branch was given, create it as an orphan branch. + """ + # Try to check out the `TEMPLATE` branch + try: + self.repo.git.checkout("origin/TEMPLATE", b="TEMPLATE") + except git.exc.GitCommandError: + + # Try to check out an existing local branch called TEMPLATE + try: + self.repo.git.checkout("TEMPLATE") + except git.exc.GitCommandError: + + # Failed, if we're not making a new branch just die + if not self.make_template_branch: + raise SyncException( + "Could not check out branch 'origin/TEMPLATE'" \ + "\nUse flag --make-template-branch to attempt to create this branch" + ) + + # Branch and force is set, fire function to create `TEMPLATE` branch + else: + logging.debug("Could not check out origin/TEMPLATE!") + logging.info("Creating orphan TEMPLATE branch") + try: + self.repo.git.checkout('--orphan', 'TEMPLATE') + self.orphan_branch = True + if self.make_pr: + self.make_pr = False + logging.warning("Will not attempt to make a PR - orphan branch must be merged manually first") + except git.exc.GitCommandError as e: + raise SyncException("Could not create 'TEMPLATE' branch:\n{}".format(e)) + + def make_template_pipeline(self): + """Delete all files and make a fresh template using the workflow variables + """ + + # Delete everything + logging.info("Deleting all files in TEMPLATE branch") + for the_file in os.listdir(self.pipeline_dir): + if the_file == '.git': + continue + file_path = os.path.join(self.pipeline_dir, the_file) + logging.debug("Deleting {}".format(file_path)) + try: + if os.path.isfile(file_path): + os.unlink(file_path) + elif os.path.isdir(file_path): + shutil.rmtree(file_path) + except Exception as e: + raise SyncException(e) + + # Make a new pipeline using nf_core.create + logging.info("Making a new template pipeline using pipeline variables") + + # Suppress log messages from the pipeline creation method + orig_loglevel = logging.getLogger().getEffectiveLevel() + if orig_loglevel == getattr(logging, 'INFO'): + logging.getLogger().setLevel(logging.ERROR) + + nf_core.create.PipelineCreate( + name = self.wf_config['manifest.name'].strip('\"').strip("\'"), + description = self.wf_config['manifest.description'].strip('\"').strip("\'"), + new_version = self.wf_config['manifest.version'].strip('\"').strip("\'"), + no_git = True, + force = True, + outdir = self.pipeline_dir, + author = self.wf_config['manifest.author'].strip('\"').strip("\'"), + ).init_pipeline() + + # Reset logging + logging.getLogger().setLevel(orig_loglevel) + + def commit_template_changes(self): + """If we have any changes with the new template files, make a git commit + """ + # Commit changes if we have any + if not self.repo.is_dirty(untracked_files=True): + logging.info("Template contains no changes - no new commit created") + else: + try: + self.repo.git.add(A=True) + self.repo.index.commit("Template update for nf-core/tools version {}".format(nf_core.__version__)) + self.made_changes = True + logging.info("Committed changes to TEMPLATE branch") + except Exception as e: + raise SyncException("Could not commit changes to TEMPLATE:\n{}".format(e)) + + def push_template_branch(self): + """If we made any changes, push the TEMPLATE branch to the default remote + and try to make a PR. If we don't have the auth token, try to figure out a URL + for the PR and print this to the console. + """ + if self.made_changes: + logging.info("Pushing TEMPLATE branch to remote") + try: + self.repo.git.push() + except git.exc.GitCommandError as e: + if self.make_template_branch: + try: + self.repo.git.push('--set-upstream', 'origin', 'TEMPLATE') + except git.exc.GitCommandError as e: + raise PullRequestException("Could not push TEMPLATE branch:\n {}".format(e)) + else: + raise PullRequestException("Could not push TEMPLATE branch:\n {}".format(e)) + else: + logging.debug("No changes to TEMPLATE - skipping push to remote") + + def make_pull_request(self): + """Create a pull request to a base branch (default: dev), + from a head branch (default: TEMPLATE) + + Returns: An instance of class requests.Response + """ + if not self.made_changes: + logging.debug("No changes to TEMPLATE - skipping PR creation") + + # Check that we know the github username and repo name + try: + assert self.gh_username is not None + assert self.gh_repo is not None + except AssertionError: + raise PullRequestException("Could not find GitHub username and repo from git remote 'origin'") + + # If we've been asked to make a PR, check that we have the credentials + try: + assert self.gh_auth_token is not None + except AssertionError: + logging.info("Make a PR at the following URL:\n https://github.com/{}/{}/compare/{}...TEMPLATE".format(self.gh_username, self.gh_repo, self.original_branch)) + raise PullRequestException("No GitHub authentication token set - cannot make PR") + + logging.info("Submitting a pull request via the GitHub API") + pr_content = { + 'title': "Important! Template update for nf-core/tools v{}".format(nf_core.__version__), + 'body': "Some important changes have been made in the nf-core/tools pipeline template. " \ + "Please make sure to merge this pull-request as soon as possible. " \ + "Once complete, make a new minor release of your pipeline.\n\n" \ + "For more information, please see the [nf-core/tools v{tag} release page](https://github.com/nf-core/tools/releases/tag/{tag}).".format(tag=nf_core.__version__), + 'head': "TEMPLATE", + 'base': self.from_branch + } + r = requests.post( + url = "https://api.github.com/repos/{}/{}/pulls".format(self.gh_username, self.gh_repo), + data = json.dumps(pr_content), + auth = requests.auth.HTTPBasicAuth(self.gh_username, self.gh_auth_token) + ) + if r.status_code != 200: + raise PullRequestException("GitHub API returned code {}: {}".format(r.status_code, r.text)) + logging.debug(r.json) + + def reset_target_dir(self): + """Reset the target pipeline directory. Check out the original branch. + """ + + # Reset: Check out original branch again + logging.debug("Checking out original branch: '{}'".format(self.original_branch)) + try: + self.repo.git.checkout(self.original_branch) + except git.exc.GitCommandError as e: + raise SyncException("Could not reset to original branch `{}`:\n{}".format(self.from_branch, e)) + + def git_merge_help(self): + """Print a command line help message with instructions on how to merge changes + """ + if self.made_changes: + git_merge_cmd = 'git merge TEMPLATE' + manual_sync_link = '' + if self.orphan_branch: + git_merge_cmd += ' --allow-unrelated-histories' + manual_sync_link = "\n\nFor more information, please see:\nhttps://nf-co.re/developers/sync#merge-template-into-main-branches" + logging.info( + "Now try to merge the updates in to your pipeline:\n cd {}\n {}{}".format( + self.pipeline_dir, + git_merge_cmd, + manual_sync_link + ) + ) + + + + +def sync_all_pipelines(gh_username='nf-core-bot', gh_auth_token=None): + """Sync all nf-core pipelines + """ + + # Get remote workflows + wfs = nf_core.list.Workflows() + wfs.get_remote_workflows() + + successful_syncs = [] + failed_syncs = [] + + # Set up a working directory + tmpdir = tempfile.mkdtemp() + + # Let's do some updating! + for wf in wfs.remote_workflows: + + logging.info("Syncing {}".format(wf.full_name)) + + # Make a local working directory + wf_local_path = os.path.join(tmpdir, wf.name) + os.mkdir(wf_local_path) + logging.debug("Sync working directory: {}".format(wf_local_path)) + + # Clone the repo + wf_remote_url = "https://{}@github.com/nf-core/{}".format(gh_auth_token, wf.name) + repo = git.Repo.clone_from(wf_remote_url, wf_local_path) + assert repo + + # Suppress log messages from the pipeline creation method + orig_loglevel = logging.getLogger().getEffectiveLevel() + if orig_loglevel == getattr(logging, 'INFO'): + logging.getLogger().setLevel(logging.ERROR) + + # Sync the repo + logging.debug("Running template sync") + sync_obj = nf_core.sync.PipelineSync( + pipeline_dir=wf_local_path, + from_branch='dev', + make_pr=True, + gh_username=gh_username + ) + try: + sync_obj.sync() + except (nf_core.sync.SyncException, nf_core.sync.PullRequestException) as e: + logging.error("Sync failed for {}:\n{}".format(wf.full_name, e)) + failed_syncs.append(wf.name) + else: + logging.debug("Sync successful for {}".format(wf.full_name)) + successful_syncs.append(wf.name) + + # Reset logging + logging.getLogger().setLevel(orig_loglevel) + + # Clean up + logging.debug("Removing work directory: {}".format(wf_local_path)) + shutil.rmtree(wf_local_path) + + logging.info("Successfully synchronised {} pipelines".format(len(successful_syncs))) + + if len(failed_syncs) > 0: + failed_list = '\n - '.join(failed_syncs) + logging.error("Errors whilst synchronising {} pipelines:\n - {}".format(len(failed_syncs), failed_list)) diff --git a/scripts/nf-core b/scripts/nf-core index 9202c456d5..32a7148039 100755 --- a/scripts/nf-core +++ b/scripts/nf-core @@ -9,7 +9,14 @@ import os import re import nf_core -import nf_core.bump_version, nf_core.create, nf_core.download, nf_core.launch, nf_core.licences, nf_core.lint, nf_core.list +import nf_core.bump_version +import nf_core.create +import nf_core.download +import nf_core.launch +import nf_core.licences +import nf_core.lint +import nf_core.list +import nf_core.sync import logging @@ -269,6 +276,73 @@ def bump_version(pipeline_dir, new_version, nextflow): nf_core.bump_version.bump_nextflow_version(lint_obj, new_version) +@nf_core_cli.command('sync', help_priority=8) +@click.argument( + 'pipeline_dir', + type = click.Path(exists=True), + nargs = -1, + metavar = "" +) +@click.option( + '-t', '--make-template-branch', + is_flag = True, + default = False, + help = "Create a TEMPLATE branch if none is found." +) +@click.option( + '-b', '--from-branch', + type = str, + help = 'The git branch to use to fetch workflow vars.' +) +@click.option( + '-p', '--pull-request', + is_flag = True, + default = False, + help = "Make a GitHub pull-request with the changes." +) +@click.option( + '-u', '--username', + type = str, + help = 'GitHub username for the PR.' +) +@click.option( + '-r', '--repository', + type = str, + help = 'GitHub repository name for the PR.' +) +@click.option( + '-a', '--auth-token', + type = str, + help = 'GitHub API personal access token.' +) +@click.option( + '--all', + is_flag = True, + default = False, + help = "Sync template for all nf-core pipelines." +) +def sync(pipeline_dir, make_template_branch, from_branch, pull_request, username, repository, auth_token, all): + """ Sync a pipeline TEMPLATE branch with the nf-core template""" + + # Pull and sync all nf-core pipelines + if all: + nf_core.sync.sync_all_pipelines() + else: + # Manually check for the required parameter + if not pipeline_dir or len(pipeline_dir) != 1: + logging.error("Either use --all or specify one ") + sys.exit(1) + else: + pipeline_dir = pipeline_dir[0] + + # Sync the given pipeline dir + sync_obj = nf_core.sync.PipelineSync(pipeline_dir, make_template_branch, from_branch, pull_request) + try: + sync_obj.sync() + except (nf_core.sync.SyncException, nf_core.sync.PullRequestException) as e: + logging.error(e) + sys.exit(1) + if __name__ == '__main__': click.echo(click.style("\n ,--.", fg='green')+click.style("/",fg='black')+click.style(",-.", fg='green')) diff --git a/setup.py b/setup.py index f67bfab2ad..0d6d0847e3 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ from setuptools import setup, find_packages import sys -version = '1.7' +version = '1.8' with open('README.md') as f: readme = f.read() diff --git a/tests/lint_examples/failing_example/.github/workflows/branch.yml b/tests/lint_examples/failing_example/.github/workflows/branch.yml new file mode 100644 index 0000000000..05e345fd20 --- /dev/null +++ b/tests/lint_examples/failing_example/.github/workflows/branch.yml @@ -0,0 +1,10 @@ +name: nf-core branch protection +jobs: + test: + runs-on: ubuntu-18.04 + steps: + # PRs are only ok if coming from an nf-core `dev` branch or a fork `patch` branch + - name: Check PRs + run: bad example + - name: Check sth + run: still bad example \ No newline at end of file diff --git a/tests/lint_examples/failing_example/.github/workflows/ci.yml b/tests/lint_examples/failing_example/.github/workflows/ci.yml new file mode 100644 index 0000000000..eab6f83518 --- /dev/null +++ b/tests/lint_examples/failing_example/.github/workflows/ci.yml @@ -0,0 +1,16 @@ +name: nf-core CI +# This workflow is triggered on pushes and PRs to the repository. +# It runs the pipeline with the minimal test dataset to check that it completes without any syntax errors +on: + +jobs: + test: + runs-on: ubuntu-18.04 + steps: + - uses: actions/checkout@v1 + - name: Install Nextflow + run: | + - name: Pull container + run: | + - name: Run test + run: | diff --git a/tests/lint_examples/failing_example/.github/workflows/linting.yml b/tests/lint_examples/failing_example/.github/workflows/linting.yml new file mode 100644 index 0000000000..0c774d0fee --- /dev/null +++ b/tests/lint_examples/failing_example/.github/workflows/linting.yml @@ -0,0 +1,40 @@ +name: nf-core linting +# This workflow is triggered on pushes and PRs to the repository. +# It runs the `nf-core lint` and markdown lint tests to ensure that the code meets the nf-core guidelines +on: + +jobs: + Markdown: + runs-on: ubuntu-18.04 + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-node@v1 + with: + node-version: '10' + - name: Install markdownlint + run: | + npm install -g markdownlint-cli + - name: Run Markdownlint + run: | + nf-core: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - name: Install Nextflow + run: | + wget -qO- get.nextflow.io | bash + sudo mv nextflow /usr/local/bin/ + - uses: actions/setup-python@v1 + with: + python-version: '3.6' + architecture: 'x64' + - name: Install pip + run: | + sudo apt install python3-pip + pip install --upgrade pip + - name: Install nf-core tools + run: | + pip install nf-core + - name: Run nf-core lint + run: | + \ No newline at end of file diff --git a/tests/lint_examples/failing_example/Singularity b/tests/lint_examples/failing_example/Singularity new file mode 100644 index 0000000000..02e88c8045 --- /dev/null +++ b/tests/lint_examples/failing_example/Singularity @@ -0,0 +1 @@ +Nothing to be found here \ No newline at end of file diff --git a/tests/lint_examples/minimalworkingexample/.github/workflows/branch.yml b/tests/lint_examples/minimalworkingexample/.github/workflows/branch.yml new file mode 100644 index 0000000000..f1e5aef523 --- /dev/null +++ b/tests/lint_examples/minimalworkingexample/.github/workflows/branch.yml @@ -0,0 +1,16 @@ +name: nf-core branch protection +# This workflow is triggered on PRs to master branch on the repository +# It fails when someone tries to make a PR against the nf-core `master` branch instead of `dev` +on: + pull_request: + branches: + - master + +jobs: + test: + runs-on: ubuntu-18.04 + steps: + # PRs are only ok if coming from an nf-core `dev` branch or a fork `patch` branch + - name: Check PRs + run: | + { [[ $(git remote get-url origin) == *nf-core/tools ]] && [[ ${GITHUB_HEAD_REF} = "dev" ]]; } || [[ ${GITHUB_HEAD_REF} == "patch" ]] diff --git a/tests/lint_examples/minimalworkingexample/.github/workflows/ci.yml b/tests/lint_examples/minimalworkingexample/.github/workflows/ci.yml new file mode 100644 index 0000000000..a1b5c3cf27 --- /dev/null +++ b/tests/lint_examples/minimalworkingexample/.github/workflows/ci.yml @@ -0,0 +1,25 @@ +name: nf-core CI +# This workflow is triggered on pushes and PRs to the repository. +# It runs the pipeline with the minimal test dataset to check that it completes without any syntax errors +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-18.04 + strategy: + matrix: + # Nextflow versions: check pipeline minimum and current latest + nxf_ver: ['19.10.0', ''] + steps: + - uses: actions/checkout@v1 + - name: Install Nextflow + run: | + {% raw %}export NXF_VER=${{ matrix.nxf_ver }}{% endraw %} + wget -qO- get.nextflow.io | bash + sudo mv nextflow /usr/local/bin/ + - name: Pull container + run: | + docker pull nfcore/tools:dev && docker tag nfcore/tools:dev nfcore/tools:0.4 + - name: Run test + run: | + nextflow run ${GITHUB_WORKSPACE} -profile test,docker diff --git a/tests/lint_examples/minimalworkingexample/.github/workflows/linting.yml b/tests/lint_examples/minimalworkingexample/.github/workflows/linting.yml new file mode 100644 index 0000000000..7354dc7437 --- /dev/null +++ b/tests/lint_examples/minimalworkingexample/.github/workflows/linting.yml @@ -0,0 +1,41 @@ +name: nf-core linting +# This workflow is triggered on pushes and PRs to the repository. +# It runs the `nf-core lint` and markdown lint tests to ensure that the code meets the nf-core guidelines +on: [push, pull_request] + +jobs: + Markdown: + runs-on: ubuntu-18.04 + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-node@v1 + with: + node-version: '10' + - name: Install markdownlint + run: | + npm install -g markdownlint-cli + - name: Run Markdownlint + run: | + markdownlint ${GITHUB_WORKSPACE} -c ${GITHUB_WORKSPACE}/.github/markdownlint.yml + nf-core: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - name: Install Nextflow + run: | + wget -qO- get.nextflow.io | bash + sudo mv nextflow /usr/local/bin/ + - uses: actions/setup-python@v1 + with: + python-version: '3.6' + architecture: 'x64' + - name: Install pip + run: | + sudo apt install python3-pip + pip install --upgrade pip + - name: Install nf-core tools + run: | + pip install nf-core + - name: Run nf-core lint + run: | + nf-core lint ${GITHUB_WORKSPACE} diff --git a/tests/lint_examples/minimal_working_example/.travis.yml b/tests/lint_examples/minimalworkingexample/.travis.yml similarity index 85% rename from tests/lint_examples/minimal_working_example/.travis.yml rename to tests/lint_examples/minimalworkingexample/.travis.yml index 85b43c60d8..8c140c7855 100644 --- a/tests/lint_examples/minimal_working_example/.travis.yml +++ b/tests/lint_examples/minimalworkingexample/.travis.yml @@ -11,7 +11,7 @@ matrix: before_install: # PRs to master are only ok if coming from dev branch - - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && ([ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ] || [ $TRAVIS_PULL_REQUEST_BRANCH = "patch" ]))' + - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ]) || [ $TRAVIS_PULL_REQUEST_BRANCH = "patch" ]' # Pull the docker image first so the test doesn't wait for this - docker pull nfcore/tools:dev # Fake the tag locally so that the pipeline runs properly @@ -32,9 +32,9 @@ install: env: matrix: - - NXF_VER=0.32.0 + - NXF_VER=19.10.0 - NXF_VER='' script: - "nf-core lint ${TRAVIS_BUILD_DIR}" - - "cd ${TRAVIS_BUILD_DIR}/tests && ./run_test.sh" + - "cd ${TRAVIS_BUILD_DIR}/tests && ./run_test.sh" \ No newline at end of file diff --git a/tests/lint_examples/minimal_working_example/CHANGELOG.md b/tests/lint_examples/minimalworkingexample/CHANGELOG.md similarity index 100% rename from tests/lint_examples/minimal_working_example/CHANGELOG.md rename to tests/lint_examples/minimalworkingexample/CHANGELOG.md diff --git a/tests/lint_examples/minimal_working_example/Dockerfile b/tests/lint_examples/minimalworkingexample/Dockerfile similarity index 77% rename from tests/lint_examples/minimal_working_example/Dockerfile rename to tests/lint_examples/minimalworkingexample/Dockerfile index bcb5d79cd2..0b3f1d2876 100644 --- a/tests/lint_examples/minimal_working_example/Dockerfile +++ b/tests/lint_examples/minimalworkingexample/Dockerfile @@ -1,8 +1,9 @@ -FROM nfcore/base:1.7 +FROM nfcore/base:dev MAINTAINER Phil Ewels LABEL authors="phil.ewels@scilifelab.se" \ description="Docker image containing all requirements for the nf-core tools pipeline" COPY environment.yml / RUN conda env create -f /environment.yml && conda clean -a +RUN conda env export --name nf-core-tools-0.4 > nf-core-tools-0.4.yml ENV PATH /opt/conda/envs/nf-core-tools-0.4/bin:$PATH diff --git a/tests/lint_examples/minimal_working_example/LICENSE b/tests/lint_examples/minimalworkingexample/LICENSE similarity index 100% rename from tests/lint_examples/minimal_working_example/LICENSE rename to tests/lint_examples/minimalworkingexample/LICENSE diff --git a/tests/lint_examples/minimal_working_example/README.md b/tests/lint_examples/minimalworkingexample/README.md similarity index 57% rename from tests/lint_examples/minimal_working_example/README.md rename to tests/lint_examples/minimalworkingexample/README.md index 76ff3403d6..838a6faefe 100644 --- a/tests/lint_examples/minimal_working_example/README.md +++ b/tests/lint_examples/minimalworkingexample/README.md @@ -1,5 +1,5 @@ # The pipeline readme file -[![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A50.32.0-brightgreen.svg)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A519.10.0-brightgreen.svg)](https://www.nextflow.io/) [![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](http://bioconda.github.io/) diff --git a/tests/lint_examples/minimal_working_example/conf/base.config b/tests/lint_examples/minimalworkingexample/conf/base.config similarity index 100% rename from tests/lint_examples/minimal_working_example/conf/base.config rename to tests/lint_examples/minimalworkingexample/conf/base.config diff --git a/tests/lint_examples/minimal_working_example/docs/README.md b/tests/lint_examples/minimalworkingexample/docs/README.md similarity index 100% rename from tests/lint_examples/minimal_working_example/docs/README.md rename to tests/lint_examples/minimalworkingexample/docs/README.md diff --git a/tests/lint_examples/minimal_working_example/docs/output.md b/tests/lint_examples/minimalworkingexample/docs/output.md similarity index 100% rename from tests/lint_examples/minimal_working_example/docs/output.md rename to tests/lint_examples/minimalworkingexample/docs/output.md diff --git a/tests/lint_examples/minimal_working_example/docs/usage.md b/tests/lint_examples/minimalworkingexample/docs/usage.md similarity index 100% rename from tests/lint_examples/minimal_working_example/docs/usage.md rename to tests/lint_examples/minimalworkingexample/docs/usage.md diff --git a/tests/lint_examples/minimal_working_example/environment.yml b/tests/lint_examples/minimalworkingexample/environment.yml similarity index 100% rename from tests/lint_examples/minimal_working_example/environment.yml rename to tests/lint_examples/minimalworkingexample/environment.yml diff --git a/tests/lint_examples/minimal_working_example/main.nf b/tests/lint_examples/minimalworkingexample/main.nf similarity index 100% rename from tests/lint_examples/minimal_working_example/main.nf rename to tests/lint_examples/minimalworkingexample/main.nf diff --git a/tests/lint_examples/minimal_working_example/nextflow.config b/tests/lint_examples/minimalworkingexample/nextflow.config similarity index 92% rename from tests/lint_examples/minimal_working_example/nextflow.config rename to tests/lint_examples/minimalworkingexample/nextflow.config index b0f3a01f56..303d675d2d 100644 --- a/tests/lint_examples/minimal_working_example/nextflow.config +++ b/tests/lint_examples/minimalworkingexample/nextflow.config @@ -2,7 +2,7 @@ params { outdir = './results' reads = "data/*.fastq" - singleEnd = false + single_end = false custom_config_version = 'master' custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" } @@ -37,6 +37,6 @@ manifest { homePage = 'https://github.com/nf-core/tools' description = 'Minimal working example pipeline' mainScript = 'main.nf' - nextflowVersion = '>=0.32.0' + nextflowVersion = '>=19.10.0' version = '0.4' } diff --git a/tests/lint_examples/minimal_working_example/tests/run_test.sh b/tests/lint_examples/minimalworkingexample/tests/run_test.sh similarity index 100% rename from tests/lint_examples/minimal_working_example/tests/run_test.sh rename to tests/lint_examples/minimalworkingexample/tests/run_test.sh diff --git a/tests/test_bump_version.py b/tests/test_bump_version.py index 4c5191f30c..aa81e8520b 100644 --- a/tests/test_bump_version.py +++ b/tests/test_bump_version.py @@ -8,7 +8,7 @@ import nf_core.lint, nf_core.bump_version WD = os.path.dirname(__file__) -PATH_WORKING_EXAMPLE = os.path.join(WD, 'lint_examples/minimal_working_example') +PATH_WORKING_EXAMPLE = os.path.join(WD, 'lint_examples/minimalworkingexample') @pytest.mark.datafiles(PATH_WORKING_EXAMPLE) @@ -55,8 +55,8 @@ def test_multiple_patterns_found(datafiles): def test_successfull_nextflow_version_bump(datafiles): lint_obj = nf_core.lint.PipelineLint(str(datafiles)) lint_obj.pipeline_name = 'tools' - lint_obj.config['manifest.nextflowVersion'] = '0.32.0' + lint_obj.config['manifest.nextflowVersion'] = '19.10.0' nf_core.bump_version.bump_nextflow_version(lint_obj, '0.40') lint_obj_new = nf_core.lint.PipelineLint(str(datafiles)) lint_obj_new.check_nextflow_config() - assert lint_obj_new.config['manifest.nextflowVersion'] == "'>=0.40'" \ No newline at end of file + assert lint_obj_new.config['manifest.nextflowVersion'] == "'>=0.40'" diff --git a/tests/test_download.py b/tests/test_download.py index 75b891cef3..18bf26c555 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -16,7 +16,7 @@ import tempfile import unittest -PATH_WORKING_EXAMPLE = os.path.join(os.path.dirname(__file__), 'lint_examples/minimal_working_example') +PATH_WORKING_EXAMPLE = os.path.join(os.path.dirname(__file__), 'lint_examples/minimalworkingexample') class DownloadTest(unittest.TestCase): diff --git a/tests/test_lint.py b/tests/test_lint.py index 273f2232af..85d0e627dc 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -32,16 +32,20 @@ def pf(wd, path): WD = os.path.dirname(__file__) PATH_CRITICAL_EXAMPLE = pf(WD, 'lint_examples/critical_example') PATH_FAILING_EXAMPLE = pf(WD, 'lint_examples/failing_example') -PATH_WORKING_EXAMPLE = pf(WD, 'lint_examples/minimal_working_example') +PATH_WORKING_EXAMPLE = pf(WD, 'lint_examples/minimalworkingexample') PATH_MISSING_LICENSE_EXAMPLE = pf(WD, 'lint_examples/missing_license_example') PATHS_WRONG_LICENSE_EXAMPLE = [pf(WD, 'lint_examples/wrong_license_example'), pf(WD, 'lint_examples/license_incomplete_example')] # The maximum sum of passed tests currently possible -MAX_PASS_CHECKS = 59 +MAX_PASS_CHECKS = 77 # The additional tests passed for releases ADD_PASS_RELEASE = 1 +# The minimal working example expects a development release version +if 'dev' not in nf_core.__version__: + nf_core.__version__ = '{}dev'.format(nf_core.__version__) + class TestLint(unittest.TestCase): """Class for lint tests""" @@ -58,7 +62,7 @@ def test_call_lint_pipeline_pass(self): This should not result in any exception for the minimal working example""" lint_obj = nf_core.lint.run_linting(PATH_WORKING_EXAMPLE, False) - expectations = {"failed": 0, "warned": 3, "passed": MAX_PASS_CHECKS} + expectations = {"failed": 0, "warned": 4, "passed": MAX_PASS_CHECKS} self.assess_lint_status(lint_obj, **expectations) @pytest.mark.xfail(raises=AssertionError) @@ -73,7 +77,7 @@ def test_call_lint_pipeline_release(self): """Test the main execution function of PipelineLint when running with --release""" lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) lint_obj.lint_pipeline(release_mode=True) - expectations = {"failed": 0, "warned": 3, "passed": MAX_PASS_CHECKS + ADD_PASS_RELEASE} + expectations = {"failed": 0, "warned": 4, "passed": MAX_PASS_CHECKS + ADD_PASS_RELEASE} self.assess_lint_status(lint_obj, **expectations) def test_failing_dockerfile_example(self): @@ -91,7 +95,7 @@ def test_failing_missingfiles_example(self): """Tests for missing files like Dockerfile or LICENSE""" lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) lint_obj.check_files_exist() - expectations = {"failed": 4, "warned": 1, "passed": len(listfiles(PATH_WORKING_EXAMPLE)) - 5 - 1} + expectations = {"failed": 5, "warned": 2, "passed": 13} self.assess_lint_status(lint_obj, **expectations) def test_mit_licence_example_pass(self): @@ -112,14 +116,14 @@ def test_config_variable_example_pass(self): """Tests that config variable existence test works with good pipeline example""" good_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) good_lint_obj.check_nextflow_config() - expectations = {"failed": 0, "warned": 0, "passed": 33} + expectations = {"failed": 0, "warned": 0, "passed": 35} self.assess_lint_status(good_lint_obj, **expectations) def test_config_variable_example_with_failed(self): """Tests that config variable existence test fails with bad pipeline example""" bad_lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) bad_lint_obj.check_nextflow_config() - expectations = {"failed": 19, "warned": 8, "passed": 6} + expectations = {"failed": 19, "warned": 8, "passed": 8} self.assess_lint_status(bad_lint_obj, **expectations) @pytest.mark.xfail(raises=AssertionError) @@ -128,10 +132,70 @@ def test_config_variable_error(self): bad_lint_obj = nf_core.lint.PipelineLint('/non/existant/path') bad_lint_obj.check_nextflow_config() + def test_actions_wf_branch_pass(self): + """Tests that linting for GitHub actions workflow for branch protection works for a good example""" + lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) + lint_obj.pipeline_name = 'tools' + lint_obj.check_actions_branch_protection() + expectations = {"failed": 0, "warned": 0, "passed": 2} + self.assess_lint_status(lint_obj, **expectations) + + def test_actions_wf_branch_fail(self): + """Tests that linting for Github actions workflow for branch protection fails for a bad example""" + lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) + lint_obj.pipeline_name = 'tools' + lint_obj.check_actions_branch_protection() + expectations = {"failed": 2, "warned": 0, "passed": 0} + self.assess_lint_status(lint_obj, **expectations) + + def test_actions_wf_ci_pass(self): + """Tests that linting for GitHub actions CI workflow works for a good example""" + lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) + lint_obj.minNextflowVersion = '19.10.0' + lint_obj.pipeline_name = 'tools' + lint_obj.config['process.container'] = "'nfcore/tools:0.4'" + lint_obj.check_actions_ci() + expectations = {"failed": 0, "warned": 0, "passed": 3} + self.assess_lint_status(lint_obj, **expectations) + + def test_actions_wf_ci_fail(self): + """Tests that linting for GitHub actions CI workflow fails for a bad example""" + lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) + lint_obj.minNextflowVersion = '19.10.0' + lint_obj.pipeline_name = 'tools' + lint_obj.config['process.container'] = "'nfcore/tools:0.4'" + lint_obj.check_actions_ci() + expectations = {"failed": 3, "warned": 0, "passed": 0} + self.assess_lint_status(lint_obj, **expectations) + + def test_actions_wf_ci_fail_wrong_NF_version(self): + """Tests that linting for GitHub actions CI workflow fails for a bad NXF version""" + lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) + lint_obj.minNextflowVersion = '0.28.0' + lint_obj.pipeline_name = 'tools' + lint_obj.config['process.container'] = "'nfcore/tools:0.4'" + lint_obj.check_actions_ci() + expectations = {"failed": 1, "warned": 0, "passed": 2} + self.assess_lint_status(lint_obj, **expectations) + + def test_actions_wf_lint_pass(self): + """Tests that linting for GitHub actions linting wf works for a good example""" + lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) + lint_obj.check_actions_lint() + expectations = {"failed": 0, "warned": 0, "passed": 3} + self.assess_lint_status(lint_obj, **expectations) + + def test_actions_wf_lint_fail(self): + """Tests that linting for GitHub actions linting wf fails for a bad example""" + lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) + lint_obj.check_actions_lint() + expectations = {"failed": 3, "warned": 0, "passed": 0} + self.assess_lint_status(lint_obj, **expectations) + def test_ci_conf_pass(self): """Tests that the continous integration config checks work with a good example""" lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.minNextflowVersion = '0.32.0' + lint_obj.minNextflowVersion = '19.10.0' lint_obj.check_ci_config() expectations = {"failed": 0, "warned": 0, "passed": 3} self.assess_lint_status(lint_obj, **expectations) @@ -168,7 +232,7 @@ def test_missing_license_example(self): def test_readme_pass(self): """Tests that the pipeline README file checks work with a good example""" lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.minNextflowVersion = '0.32.0' + lint_obj.minNextflowVersion = '19.10.0' lint_obj.files = ['environment.yml'] lint_obj.check_readme() expectations = {"failed": 0, "warned": 0, "passed": 2} @@ -316,7 +380,7 @@ def test_conda_dockerfile_fail(self): lint_obj.conda_config['name'] = 'nf-core-tools-0.4' lint_obj.dockerfile = ['fubar'] lint_obj.check_conda_dockerfile() - expectations = {"failed": 4, "warned": 0, "passed": 0} + expectations = {"failed": 5, "warned": 0, "passed": 0} self.assess_lint_status(lint_obj, **expectations) def test_conda_dockerfile_skip(self): @@ -416,3 +480,20 @@ def test_pip_dependency_fails(self): lint_obj.check_conda_env_yaml() expectations = {"failed": 1, "warned": 0, "passed": 2} self.assess_lint_status(lint_obj, **expectations) + + def test_pipeline_name_pass(self): + """Tests pipeline name good pipeline example: lower case, no punctuation""" + #good_lint_obj = nf_core.lint.run_linting(PATH_WORKING_EXAMPLE) + good_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) + good_lint_obj.pipeline_name = 'tools' + good_lint_obj.check_pipeline_name() + expectations = {"failed": 0, "warned": 0, "passed": 1} + self.assess_lint_status(good_lint_obj, **expectations) + + def test_pipeline_name_critical(self): + """Tests that warning is returned for pipeline not adhering to naming convention""" + critical_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) + critical_lint_obj.pipeline_name = 'Tools123' + critical_lint_obj.check_pipeline_name() + expectations = {"failed": 0, "warned": 2, "passed": 0} + self.assess_lint_status(critical_lint_obj, **expectations)