From 4a47a94f4f55cafe4999832d27068f4dd2554859 Mon Sep 17 00:00:00 2001 From: sven1103 Date: Tue, 17 Jul 2018 17:03:58 +0200 Subject: [PATCH 01/75] Refactor dependency management PyPi cannot work with the requirements.txt, as it is not published with the nf-core tools distribution. The better way is to define the required modules in the respective section in the setuptools setup() method. --- requirements.txt | 6 ------ setup.py | 12 ++++++++---- 2 files changed, 8 insertions(+), 10 deletions(-) delete mode 100644 requirements.txt diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 027346bfcf..0000000000 --- a/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -click -GitPython -pyyaml -requests -requests_cache -tabulate diff --git a/setup.py b/setup.py index 3b992ce63c..0f3ab0504d 100644 --- a/setup.py +++ b/setup.py @@ -10,9 +10,6 @@ with open('LICENSE') as f: license = f.read() -with open('requirements.txt') as f: - required = f.read().splitlines() - setup( name = 'nf-core', version = version, @@ -25,7 +22,14 @@ url = 'https://github.com/nf-core/tools', license = license, scripts = ['scripts/nf-core'], - install_requires = required, + install_requires = [ + 'click', + 'GitPython', + 'pyyaml', + 'requests', + 'requests_cache', + 'tabulate' + ], packages = find_packages(exclude=('docs')), include_package_data = True, zip_safe = False From a58a4cb96b981b618af8bb492636f6e3431b876a Mon Sep 17 00:00:00 2001 From: Maxime Garcia Date: Mon, 6 Aug 2018 15:17:52 +0200 Subject: [PATCH 02/75] fix CI with new MultiQC version in minimal example --- tests/lint_examples/minimal_working_example/environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/lint_examples/minimal_working_example/environment.yml b/tests/lint_examples/minimal_working_example/environment.yml index 92e417a20e..86a5af6faa 100644 --- a/tests/lint_examples/minimal_working_example/environment.yml +++ b/tests/lint_examples/minimal_working_example/environment.yml @@ -9,4 +9,4 @@ dependencies: - conda-forge::openjdk=8.0.144 - fastqc=0.11.7 - pip: - - multiqc=1.5 + - multiqc=1.6 From 09a0cf4f0a151428af81ccb8fbdafef58ed8c00d Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 7 Aug 2018 15:31:45 +0200 Subject: [PATCH 03/75] Moved cookiecutter template into tools repo. Made a basic command which runs cookiecutter on the template files. --- MANIFEST.in | 1 + nf_core/pipeline-template/cookiecutter.json | 16 + .../.gitattributes | 1 + .../{{cookiecutter.pipeline_slug}}/.gitignore | 6 + .../.travis.yml | 38 ++ .../CHANGELOG.md | 3 + .../{{cookiecutter.pipeline_slug}}/Dockerfile | 7 + .../{{cookiecutter.pipeline_slug}}/LICENSE | 21 ++ .../{{cookiecutter.pipeline_slug}}/README.md | 30 ++ .../Singularity | 14 + .../assets/email_template.html | 52 +++ .../assets/email_template.txt | 51 +++ .../assets/sendmail_template.txt | 11 + .../bin/markdown_to_html.r | 51 +++ .../bin/scrape_software_versions.py | 38 ++ .../conf/aws.config | 26 ++ .../conf/base.config | 40 +++ .../conf/igenomes.config | 146 ++++++++ .../conf/multiqc_config.yaml | 7 + .../conf/test.config | 20 ++ .../docs/README.md | 11 + .../docs/configuration/adding_your_own.md | 138 ++++++++ .../docs/configuration/local.md | 42 +++ .../docs/configuration/reference_genomes.md | 46 +++ .../docs/installation.md | 43 +++ .../docs/output.md | 35 ++ .../docs/troubleshooting.md | 28 ++ .../docs/usage.md | 186 ++++++++++ .../environment.yml | 8 + .../{{cookiecutter.pipeline_slug}}/main.nf | 324 ++++++++++++++++++ .../nextflow.config | 108 ++++++ requirements.txt | 1 + scripts/nf-core | 31 ++ 33 files changed, 1580 insertions(+) create mode 100644 nf_core/pipeline-template/cookiecutter.json create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.gitattributes create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.gitignore create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CHANGELOG.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Dockerfile create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/LICENSE create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/README.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Singularity create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.html create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.txt create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/sendmail_template.txt create mode 100755 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/markdown_to_html.r create mode 100755 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/scrape_software_versions.py create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/aws.config create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/base.config create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/igenomes.config create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/multiqc_config.yaml create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/test.config create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/README.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/reference_genomes.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/output.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/troubleshooting.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config diff --git a/MANIFEST.in b/MANIFEST.in index c1a7121c1b..59d5a1a4e0 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,2 +1,3 @@ include LICENSE include README.md +recursive-include nf_core * diff --git a/nf_core/pipeline-template/cookiecutter.json b/nf_core/pipeline-template/cookiecutter.json new file mode 100644 index 0000000000..0806ef25a2 --- /dev/null +++ b/nf_core/pipeline-template/cookiecutter.json @@ -0,0 +1,16 @@ +{ + "pipeline_name": "nf-core/example", + "pipeline_short_description": "This pipeline takes some data and does something with it.", + "github_repo": "{{ cookiecutter.pipeline_name.replace(' ', '-') }}", + "pipeline_url": "https://github.com/{{ cookiecutter.github_repo }}", + "dockerhub_slug": "{{ cookiecutter.pipeline_name.lower().replace(' ', '-').replace('nf-core', 'nfcore') }}", + "pipeline_slug": "{{ cookiecutter.dockerhub_slug.replace('/', '-') }}", + "author_name": "Your Name", + "author_email": "{{ cookiecutter.author_name.lower().replace(' ', '.') }}@gmail.com", + "author_github_username": "{{ cookiecutter.author_name.lower().replace(' ', '') }}", + "author_homepage": "https://github.com/{{ cookiecutter.author_github_username }}", + "author_affiliation": "Your Institute", + "author_affiliation_homepage": "http://www.{{ cookiecutter.author_affiliation.lower().replace(' ', '') }}.com", + "copyright_holder": "{{ cookiecutter.author_name }}", + "version": "0.1.0" +} diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.gitattributes b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.gitattributes new file mode 100644 index 0000000000..7fe55006f8 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.gitattributes @@ -0,0 +1 @@ +*.config linguist-language=nextflow diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.gitignore b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.gitignore new file mode 100644 index 0000000000..46f69e414b --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.gitignore @@ -0,0 +1,6 @@ +.nextflow* +work/ +data/ +results/ +.DS_Store +tests/test_data diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml new file mode 100644 index 0000000000..3fbd165c54 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml @@ -0,0 +1,38 @@ +sudo: required +language: java +jdk: openjdk8 +services: + - docker +python: + - "2.7" +cache: pip +matrix: + fast_finish: true + +before_install: + # Pull the docker image first so the test doesn't wait for this + - docker pull {{ cookiecutter.dockerhub_slug }}:{{ cookiecutter.version }} + +install: + # Install Nextflow + - mkdir /tmp/nextflow + - cd /tmp/nextflow + - wget -qO- get.nextflow.io | bash + - sudo ln -s /tmp/nextflow/nextflow /usr/local/bin/nextflow + # Install nf-core/tools + - git clone https://github.com/nf-core/tools.git /tmp/nf-core-tools + - cd /tmp/nf-core-tools + - pip install --user -e . + # Reset + - mkdir ${TRAVIS_BUILD_DIR}/tests + - cd ${TRAVIS_BUILD_DIR}/tests + +env: + - NXF_VER=0.30.0 + - NXF_VER='' + +script: + # Lint the pipeline code + - nf-core lint ${TRAVIS_BUILD_DIR} + # Run the pipeline with the test profile + - nextflow run ${TRAVIS_BUILD_DIR} -profile test,docker diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CHANGELOG.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CHANGELOG.md new file mode 100644 index 0000000000..6396d5d454 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CHANGELOG.md @@ -0,0 +1,3 @@ + +## {{ cookiecutter.version }} - {% now 'local' %} +Initial release of {{ cookiecutter.pipeline_name }}, created with the NGI-NFcookiecutter template: https://github.com/ewels/NGI-NFcookiecutter. diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Dockerfile b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Dockerfile new file mode 100644 index 0000000000..dede96cd01 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Dockerfile @@ -0,0 +1,7 @@ +FROM nfcore/base +MAINTAINER {{ cookiecutter.author_name }} <{{ cookiecutter.author_email }}> +LABEL authors="{{ cookiecutter.author_email }}" \ + description="Docker image containing all requirements for {{ cookiecutter.pipeline_name }} pipeline" + +COPY environment.yml / +RUN conda env update -n root -f /environment.yml && conda clean -a diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/LICENSE b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/LICENSE new file mode 100644 index 0000000000..7652819f1e --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright {% now 'utc', '%Y' %}, {{ cookiecutter.copyright_holder }} + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/README.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/README.md new file mode 100644 index 0000000000..ab39919bfb --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/README.md @@ -0,0 +1,30 @@ +# {{ cookiecutter.pipeline_name }} +{{ cookiecutter.pipeline_short_description }} + +[![Build Status](https://travis-ci.org/{{ cookiecutter.github_repo }}.svg?branch=master)](https://travis-ci.org/{{ cookiecutter.github_repo }}) +[![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A50.30.0-brightgreen.svg)](https://www.nextflow.io/) + +[![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](http://bioconda.github.io/) +[![Docker](https://img.shields.io/docker/automated/{{ cookiecutter.dockerhub_slug }}.svg)](https://hub.docker.com/r/{{ cookiecutter.dockerhub_slug }}) +![Singularity Container available]( +https://img.shields.io/badge/singularity-available-7E4C74.svg) + +### Introduction +{{ cookiecutter.pipeline_name }}: {{ cookiecutter.pipeline_short_description }} + +The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool to run tasks across multiple compute infrastructures in a very portable manner. It comes with docker / singularity containers making installation trivial and results highly reproducible. + + +### Documentation +The {{ cookiecutter.pipeline_name }} pipeline comes with documentation about the pipeline, found in the `docs/` directory: + +1. [Installation](docs/installation.md) +2. Pipeline configuration + * [Local installation](docs/configuration/local.md) + * [Adding your own system](docs/configuration/adding_your_own.md) +3. [Running the pipeline](docs/usage.md) +4. [Output and how to interpret the results](docs/output.md) +5. [Troubleshooting](docs/troubleshooting.md) + +### Credits +This pipeline was written by {{ cookiecutter.author_name }} ([{{ cookiecutter.author_github_username }}]({{ cookiecutter.author_homepage }})) at [{{ cookiecutter.author_affiliation }}]({{ cookiecutter.author_affiliation_homepage }}). diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Singularity b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Singularity new file mode 100644 index 0000000000..5a60a70ca8 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Singularity @@ -0,0 +1,14 @@ +From:nfcore/base +Bootstrap:docker + +%labels + MAINTAINER {{ cookiecutter.author_name }} <{{ cookiecutter.author_email }}> + DESCRIPTION Singularity image containing all requirements for {{ cookiecutter.pipeline_name }} pipeline + VERSION {{ cookiecutter.version }} + +%files + environment.yml / + +%post + /opt/conda/bin/conda env update -n root -f /environment.yml + /opt/conda/bin/conda clean -a diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.html b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.html new file mode 100644 index 0000000000..04fb453ee5 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.html @@ -0,0 +1,52 @@ + + + + + + + + + {{ cookiecutter.pipeline_name }} Pipeline Report + + +
+ +

{{ cookiecutter.pipeline_name }} v${version}

+

Run Name: $runName

+ +<% if (!success){ + out << """ +
+

{{ cookiecutter.pipeline_name }} execution completed unsuccessfully!

+

The exit status of the task that caused the workflow execution to fail was: $exitStatus.

+

The full error message was:

+
${errorReport}
+
+ """ +} else { + out << """ +
+ {{ cookiecutter.pipeline_name }} execution completed successfully! +
+ """ +} +%> + +

The workflow was completed at $dateComplete (duration: $duration)

+

The command used to launch the workflow was as follows:

+
$commandLine
+ +

Pipeline Configuration:

+ + + <% out << summary.collect{ k,v -> "" }.join("\n") %> + +
$k
$v
+ +

{{ cookiecutter.pipeline_name }}

+

{{ cookiecutter.pipeline_url }}

+ +
+ + + diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.txt b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.txt new file mode 100644 index 0000000000..e3aeb4a41e --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.txt @@ -0,0 +1,51 @@ +======================================== + {{ cookiecutter.pipeline_name }} v${version} +======================================== +Run Name: $runName + +<% if (success){ + out << "## {{ cookiecutter.pipeline_name }} execution completed successfully! ##" +} else { + out << """#################################################### +## {{ cookiecutter.pipeline_name }} execution completed unsuccessfully! ## +#################################################### +The exit status of the task that caused the workflow execution to fail was: $exitStatus. +The full error message was: + +${errorReport} +""" +} %> + + +<% if (!success){ + out << """#################################################### +## {{ cookiecutter.pipeline_name }} execution completed unsuccessfully! ## +#################################################### +The exit status of the task that caused the workflow execution to fail was: $exitStatus. +The full error message was: + +${errorReport} +""" +} else { + out << "## {{ cookiecutter.pipeline_name }} execution completed successfully! ##" +} +%> + + + + +The workflow was completed at $dateComplete (duration: $duration) + +The command used to launch the workflow was as follows: + + $commandLine + + + +Pipeline Configuration: +----------------------- +<% out << summary.collect{ k,v -> " - $k: $v" }.join("\n") %> + +-- +{{ cookiecutter.pipeline_name }} +{{ cookiecutter.pipeline_url }} diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/sendmail_template.txt b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/sendmail_template.txt new file mode 100644 index 0000000000..fd1cd7396e --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/sendmail_template.txt @@ -0,0 +1,11 @@ +To: $email +Subject: $subject +Mime-Version: 1.0 +Content-Type: multipart/related;boundary="nfmimeboundary" + +--nfmimeboundary +Content-Type: text/html; charset=utf-8 + +$email_html + +--nfmimeboundary-- diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/markdown_to_html.r b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/markdown_to_html.r new file mode 100755 index 0000000000..abe1335070 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/markdown_to_html.r @@ -0,0 +1,51 @@ +#!/usr/bin/env Rscript + +# Command line argument processing +args = commandArgs(trailingOnly=TRUE) +if (length(args) < 2) { + stop("Usage: markdown_to_html.r ", call.=FALSE) +} +markdown_fn <- args[1] +output_fn <- args[2] + +# Load / install packages +if (!require("markdown")) { + install.packages("markdown", dependencies=TRUE, repos='http://cloud.r-project.org/') + library("markdown") +} + +base_css_fn <- getOption("markdown.HTML.stylesheet") +base_css <- readChar(base_css_fn, file.info(base_css_fn)$size) +custom_css <- paste(base_css, " +body { + padding: 3em; + margin-right: 350px; + max-width: 100%; +} +#toc { + position: fixed; + right: 20px; + width: 300px; + padding-top: 20px; + overflow: scroll; + height: calc(100% - 3em - 20px); +} +#toc_header { + font-size: 1.8em; + font-weight: bold; +} +#toc > ul { + padding-left: 0; + list-style-type: none; +} +#toc > ul ul { padding-left: 20px; } +#toc > ul > li > a { display: none; } +img { max-width: 800px; } +") + +markdownToHTML( + file = markdown_fn, + output = output_fn, + stylesheet = custom_css, + options = c('toc', 'base64_images', 'highlight_code') +) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/scrape_software_versions.py b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/scrape_software_versions.py new file mode 100755 index 0000000000..3a8e5da69b --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/scrape_software_versions.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +from __future__ import print_function +from collections import OrderedDict +import re + +regexes = { + '{{ cookiecutter.pipeline_name }}': ['v_pipeline.txt', r"(\S+)"], + 'Nextflow': ['v_nextflow.txt', r"(\S+)"], + 'FastQC': ['v_fastqc.txt', r"FastQC v(\S+)"], + 'MultiQC': ['v_multiqc.txt', r"multiqc, version (\S+)"], +} +results = OrderedDict() +results['{{ cookiecutter.pipeline_name }}'] = 'N/A' +results['Nextflow'] = 'N/A' +results['FastQC'] = 'N/A' +results['MultiQC'] = 'N/A' + +# Search each file using its regex +for k, v in regexes.items(): + with open(v[0]) as x: + versions = x.read() + match = re.search(v[1], versions) + if match: + results[k] = "v{}".format(match.group(1)) + +# Dump to YAML +print (''' +id: '{{ cookiecutter.pipeline_name.lower().replace(' ', '-') }}-software-versions' +section_name: '{{ cookiecutter.pipeline_name }} Software Versions' +section_href: '{{ cookiecutter.pipeline_url }}' +plot_type: 'html' +description: 'are collected at run time from the software output.' +data: | +
+''') +for k,v in results.items(): + print("
{}
{}
".format(k,v)) +print ("
") diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/aws.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/aws.config new file mode 100644 index 0000000000..83189c3465 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/aws.config @@ -0,0 +1,26 @@ +/* + * ------------------------------------------------- + * Nextflow config file for Amazon Web Services + * ------------------------------------------------- + * Imported under the 'aws' Nextflow profile in nextflow.config + * Defines reference genomes, using iGenome paths from s3 + * Uses docker for software depedencies. + */ + +docker { + enabled = true +} + +process { + executor = 'ignite' +} + +params { + saveReference = true + igenomes_base = 's3://ngi-igenomes/igenomes/' + // Max resources based on a typical AWS m4.2xlarge EC2 instance + // Customise with --max_memory, --max_cpus and --max_time (or overwrite with another config file) + max_memory = 32.GB + max_cpus = 8 + max_time = 240.h +} diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/base.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/base.config new file mode 100644 index 0000000000..97b3aa6af4 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/base.config @@ -0,0 +1,40 @@ +/* + * ------------------------------------------------- + * {{ cookiecutter.pipeline_name }} Nextflow base config file + * ------------------------------------------------- + * A 'blank slate' config file, appropriate for general + * use on most high performace compute environments. + * Assumes that all software is installed and available + * on the PATH. Runs in `local` mode - all jobs will be + * run on the logged in environment. + */ + +process { + + container = params.container + + cpus = { check_max( 1 * task.attempt, 'cpus' ) } + memory = { check_max( 8.GB * task.attempt, 'memory' ) } + time = { check_max( 2.h * task.attempt, 'time' ) } + + errorStrategy = { task.exitStatus in [143,137] ? 'retry' : 'finish' } + maxRetries = 1 + maxErrors = '-1' + + // Process-specific resource requirements + $fastqc { + errorStrategy = { task.exitStatus in [143,137] ? 'retry' : 'ignore' } + } + $multiqc { + executor = 'local' + errorStrategy = { task.exitStatus in [143,137] ? 'retry' : 'ignore' } + } +} + +params { + // Defaults only, expecting to be overwritten + max_memory = 128.GB + max_cpus = 16 + max_time = 240.h + igenomes_base = 's3://ngi-igenomes/igenomes/' +} diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/igenomes.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/igenomes.config new file mode 100644 index 0000000000..0815499435 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/igenomes.config @@ -0,0 +1,146 @@ +/* + * ------------------------------------------------- + * Nextflow config file for iGenomes paths + * ------------------------------------------------- + * Defines reference genomes, using iGenome paths + * Can be used by any config that customises the base + * path using $params.igenomes_base / --igenomes_base + */ + +params { + // illumina iGenomes reference file paths + genomes { + 'GRCh37' { + bed12 = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/STARIndex/" + } + 'GRCm38' { + bed12 = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/STARIndex/" + } + 'TAIR10' { + bed12 = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/STARIndex/" + } + 'EB2' { + bed12 = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/STARIndex/" + } + 'UMD3.1' { + bed12 = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/STARIndex/" + } + 'WBcel235' { + bed12 = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/STARIndex/" + } + 'CanFam3.1' { + bed12 = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/STARIndex/" + } + 'GRCz10' { + bed12 = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/STARIndex/" + } + 'BDGP6' { + bed12 = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/STARIndex/" + } + 'EquCab2' { + bed12 = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/STARIndex/" + } + 'EB1' { + bed12 = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/STARIndex/" + } + 'Galgal4' { + bed12 = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/STARIndex/" + } + 'Gm01' { + bed12 = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/STARIndex/" + } + 'Mmul_1' { + bed12 = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/STARIndex/" + } + 'IRGSP-1.0' { + bed12 = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/STARIndex/" + } + 'CHIMP2.1.4' { + bed12 = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/STARIndex/" + } + 'Rnor_6.0' { + bed12 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/STARIndex/" + } + 'R64-1-1' { + bed12 = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/STARIndex/" + } + 'EF2' { + bed12 = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/STARIndex/" + } + 'Sbi1' { + bed12 = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/STARIndex/" + } + 'Sscrofa10.2' { + bed12 = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/STARIndex/" + } + 'AGPv3' { + bed12 = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.bed" + fasta = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/WholeGenomeFasta/genome.fa" + gtf = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.gtf" + star = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/STARIndex/" + } + } +} diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/multiqc_config.yaml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/multiqc_config.yaml new file mode 100644 index 0000000000..616b35e4e1 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/multiqc_config.yaml @@ -0,0 +1,7 @@ +report_comment: > + This report has been generated by the {{ cookiecutter.pipeline_name }} + analysis pipeline. For information about how to interpret these results, please see the + documentation. +report_section_order: + {{ cookiecutter.pipeline_name.lower().replace(' ', '-') }}-software-versions: + order: -1000 diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/test.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/test.config new file mode 100644 index 0000000000..f8ebeb6446 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/test.config @@ -0,0 +1,20 @@ +/* + * ------------------------------------------------- + * Nextflow config file for running tests + * ------------------------------------------------- + * Defines bundled input files and everything required + * to run a fast and simple test. Use as follows: + * nextflow run nf-core/methylseq -profile test + */ + +params { + max_cpus = 2 + max_memory = 6.GB + max_time = 48.h + // Input data + singleEnd = false + readPaths = [ + ['Testdata', ['https://github.com/nf-core/test-datasets/raw/exoseq/testdata/Testdata_R1.tiny.fastq.gz', 'https://github.com/nf-core/test-datasets/raw/exoseq/testdata/Testdata_R2.tiny.fastq.gz']], + ['SRR389222', ['https://github.com/nf-core/test-datasets/raw/methylseq/testdata/SRR389222_sub1.fastq.gz', 'https://github.com/nf-core/test-datasets/raw/methylseq/testdata/SRR389222_sub2.fastq.gz']] + ] +} diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/README.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/README.md new file mode 100644 index 0000000000..a988f4a838 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/README.md @@ -0,0 +1,11 @@ +# {{ cookiecutter.pipeline_name }} Documentation + +The {{ cookiecutter.pipeline_name }} documentation is split into the following files: + +1. [Installation](installation.md) +2. Pipeline configuration + * [Local installation](configuration/local.md) + * [Adding your own system](configuration/adding_your_own.md) +3. [Running the pipeline](usage.md) +4. [Output and how to interpret the results](output.md) +5. [Troubleshooting](troubleshooting.md) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md new file mode 100644 index 0000000000..d06a986f8b --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md @@ -0,0 +1,138 @@ +# {{ cookiecutter.pipeline_name }}: Configuration for other clusters + +It is entirely possible to run this pipeline on other clusters, though you will need to set up your own config file so that the pipeline knows how to work with your cluster. + +> If you think that there are other people using the pipeline who would benefit from your configuration (eg. other common cluster setups), please let us know. We can add a new configuration and profile which can used by specifying `-profile ` when running the pipeline. + +If you are the only person to be running this pipeline, you can create your config file as `~/.nextflow/config` and it will be applied every time you run Nextflow. Alternatively, save the file anywhere and reference it when running the pipeline with `-c path/to/config` (see the [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html) for more). + +A basic configuration comes with the pipeline, which runs by default (the `standard` config profile - see [`conf/base.config`](../conf/base.config)). This means that you only need to configure the specifics for your system and overwrite any defaults that you want to change. + +## Cluster Environment +By default, pipeline uses the `local` Nextflow executor - in other words, all jobs are run in the login session. If you're using a simple server, this may be fine. If you're using a compute cluster, this is bad as all jobs will run on the head node. + +To specify your cluster environment, add the following line to your config file: + +```nextflow +process { + executor = 'YOUR_SYSTEM_TYPE' +} +``` + +Many different cluster types are supported by Nextflow. For more information, please see the [Nextflow documentation](https://www.nextflow.io/docs/latest/executor.html). + +Note that you may need to specify cluster options, such as a project or queue. To do so, use the `clusterOptions` config option: + +```nextflow +process { + executor = 'SLURM' + clusterOptions = '-A myproject' +} +``` + + +## Software Requirements +To run the pipeline, several software packages are required. How you satisfy these requirements is essentially up to you and depends on your system. If possible, we _highly_ recommend using either Docker or Singularity. + +### Docker +Docker is a great way to run {{ cookiecutter.pipeline_name }}, as it manages all software installations and allows the pipeline to be run in an identical software environment across a range of systems. + +Nextflow has [excellent integration](https://www.nextflow.io/docs/latest/docker.html) with Docker, and beyond installing the two tools, not much else is required. + +First, install docker on your system: [Docker Installation Instructions](https://docs.docker.com/engine/installation/) + +Then, simply run the analysis pipeline: +```bash +nextflow run {{ cookiecutter.github_repo }} -profile docker --reads '' +``` + +Nextflow will recognise `{{ cookiecutter.github_repo }}` and download the pipeline from GitHub. The `-profile docker` configuration lists the [{{ cookiecutter.dockerhub_slug }}](https://hub.docker.com/r/{{ cookiecutter.dockerhub_slug }}/) image that we have created and is hosted at dockerhub, and this is downloaded. + +The public docker images are tagged with the same version numbers as the code, which you can use to ensure reproducibility. When running the pipeline, specify the pipeline version with `-r`, for example `-r v1.3`. This uses pipeline code and docker image from this tagged version. + +To add docker support to your own config file (instead of using the `docker` profile, which runs locally), add the following: + +```nextflow +docker { + enabled = true +} +process { + container = wf_container +} +``` + +The variable `wf_container` is defined dynamically and automatically specifies the image tag if Nextflow is running with `-r`. + +A test suite for docker comes with the pipeline, and can be run by moving to the [`tests` directory](https://github.com/{{ cookiecutter.github_repo }}/tree/master/tests) and running `./run_test.sh`. This will download a small yeast genome and some data, and attempt to run the pipeline through docker on that small dataset. This is automatically run using [Travis](https://travis-ci.org/{{ cookiecutter.github_repo }}/) whenever changes are made to the pipeline. + +### Singularity image +Many HPC environments are not able to run Docker due to security issues. [Singularity](http://singularity.lbl.gov/) is a tool designed to run on such HPC systems which is very similar to Docker. Even better, it can use create images directly from dockerhub. + +To use the singularity image for a single run, use `-with-singularity 'docker://{{ cookiecutter.github_repo }}'`. This will download the docker container from dockerhub and create a singularity image for you dynamically. + +To specify singularity usage in your pipeline config file, add the following: + +```nextflow +singularity { + enabled = true +} +process { + container = "docker://$wf_container" +} +``` + +The variable `wf_container` is defined dynamically and automatically specifies the image tag if Nextflow is running with `-r`. + +If you intend to run the pipeline offline, nextflow will not be able to automatically download the singularity image for you. Instead, you'll have to do this yourself manually first, transfer the image file and then point to that. + +First, pull the image file where you have an internet connection: + +```bash +singularity pull --name {{ cookiecutter.pipeline_slug }}.img docker://{{ cookiecutter.github_repo }} +``` + +Then transfer this file and run the pipeline with this path: + +```bash +nextflow run /path/to/{{ cookiecutter.pipeline_slug }} -with-singularity /path/to/{{ cookiecutter.pipeline_slug }}.img +``` + + +### Manual Installation +As a last resort, you may need to install the required software manually. We recommend using [Bioconda](https://bioconda.github.io/) to do this. The following instructions are an example only and will not be updated with the pipeline. + +#### 1) Install miniconda in your home directory +``` bash +cd +wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh +bash Miniconda3-latest-Linux-x86_64.sh +``` + +#### 2) Add the bioconda conda channel (and others) +```bash +conda config --add channels anaconda +conda config --add channels conda-forge +conda config --add channels defaults +conda config --add channels r +conda config --add channels bioconda +conda config --add channels salilab +``` + +#### 3) Create a conda environment, with all necessary packages: +```bash +conda create --name {{ cookiecutter.pipeline_slug }}_py2.7 python=2.7 +source activate {{ cookiecutter.pipeline_slug }}_py2.7 +conda install --yes \ + fastqc \ + multiqc +``` +_(Feel free to adjust versions as required.)_ + +##### 4) Usage +Once created, the conda environment can be activated before running the pipeline and deactivated afterwards: + +```bash +source activate {{ cookiecutter.pipeline_slug }}_py2.7 +# run pipeline +source deactivate +``` diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md new file mode 100644 index 0000000000..116d8e41f1 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md @@ -0,0 +1,42 @@ +# {{ cookiecutter.pipeline_name }}: Local Configuration + +If running the pipeline in a local environment, we highly recommend using either Docker or Singularity. + +## Docker +Docker is a great way to run {{ cookiecutter.pipeline_name }}, as it manages all software installations and allows the pipeline to be run in an identical software environment across a range of systems. + +Nextflow has [excellent integration](https://www.nextflow.io/docs/latest/docker.html) with Docker, and beyond installing the two tools, not much else is required. The {{ cookiecutter.pipeline_name }} profile comes with a configuration profile for docker, making it very easy to use. This also comes with the required presets to use the AWS iGenomes resource, meaning that if using common reference genomes you just specify the reference ID and it will be autaomtically downloaded from AWS S3. + +First, install docker on your system: [Docker Installation Instructions](https://docs.docker.com/engine/installation/) + +Then, simply run the analysis pipeline: +```bash +nextflow run {{ cookiecutter.github_repo }} -profile docker --reads '' +``` + +Nextflow will recognise `{{ cookiecutter.github_repo }}` and download the pipeline from GitHub. The `-profile docker` configuration lists the [{{ cookiecutter.dockerhub_slug }}](https://hub.docker.com/r/{{ cookiecutter.dockerhub_slug }}/) image that we have created and is hosted at dockerhub, and this is downloaded. + +For more information about how to work with reference genomes, see [`docs/configuration/reference_genomes.md`](docs/configuration/reference_genomes.md). + +### Pipeline versions +The public docker images are tagged with the same version numbers as the code, which you can use to ensure reproducibility. When running the pipeline, specify the pipeline version with `-r`, for example `-r v1.3`. This uses pipeline code and docker image from this tagged version. + + +## Singularity image +Many HPC environments are not able to run Docker due to security issues. [Singularity](http://singularity.lbl.gov/) is a tool designed to run on such HPC systems which is very similar to Docker. Even better, it can use create images directly from dockerhub. + +To use the singularity image for a single run, use `-with-singularity 'docker://{{ cookiecutter.dockerhub_slug }}'`. This will download the docker container from dockerhub and create a singularity image for you dynamically. + +If you intend to run the pipeline offline, nextflow will not be able to automatically download the singularity image for you. Instead, you'll have to do this yourself manually first, transfer the image file and then point to that. + +First, pull the image file where you have an internet connection: + +```bash +singularity pull --name {{ cookiecutter.pipeline_slug }}.img docker://{{ cookiecutter.dockerhub_slug }} +``` + +Then transfer this file and run the pipeline with this path: + +```bash +nextflow run /path/to/{{ cookiecutter.pipeline_name }} -with-singularity /path/to/{{ cookiecutter.pipeline_slug }}.img +``` diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/reference_genomes.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/reference_genomes.md new file mode 100644 index 0000000000..28aa10b5ca --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/reference_genomes.md @@ -0,0 +1,46 @@ +# {{ cookiecutter.pipeline_name }}: Reference Genomes Configuration + +The {{ cookiecutter.pipeline_name }} pipeline needs a reference genome for alignment and annotation. If not already available, start by downloading the relevant reference, for example from [illumina iGenomes](https://support.illumina.com/sequencing/sequencing_software/igenome.html). + +The minimal requirements are a FASTA file. + +## Adding paths to a config file +Specifying long paths every time you run the pipeline is a pain. To make this easier, the pipeline comes configured to understand reference genome keywords which correspond to preconfigured paths, meaning that you can just specify `--genome ID` when running the pipeline. + +Note that this genome key can also be specified in a config file if you always use the same genome. + +To use this system, add paths to your config file using the following template: + +```nextflow +params { + genomes { + 'YOUR-ID' { + fasta = '/genome.fa' + } + 'OTHER-GENOME' { + // [..] + } + } + // Optional - default genome. Ignored if --genome 'OTHER-GENOME' specified on command line + genome = 'YOUR-ID' +} +``` + +You can add as many genomes as you like as long as they have unique IDs. + +## illumina iGenomes +To make the use of reference genomes easier, illumina has developed a centralised resource called [iGenomes](https://support.illumina.com/sequencing/sequencing_software/igenome.html). Multiple reference index types are held together with consistent structure for multiple genomes. + +If possible, we recommend making this resource available on your cluster. We have put a copy of iGenomes up onto AWS S3 hosting and this pipeline is configured to use this for some profiles (`docker`, `aws`). These profiles will automatically pull the required reference files when you run the pipeline. + +To add iGenomes to your config file, add the following line to the end of your config file: + +```nextflow +includeConfig '/path/to/{{ cookiecutter.pipeline_name }}/conf/igenomes.config' +``` + +This works best when you have a `profile` set up in the pipeline - see [`nextflow.config`](../../nextflow.config). + +The hosting fees for AWS iGenomes are currently funded by a grant from Amazon. We hope that this work will be extended past the end of the grant expiry date (mid 2018), but we can't be sure at this point. + +For more information about the AWS iGenomes, see https://ewels.github.io/AWS-iGenomes/ diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md new file mode 100644 index 0000000000..e6ac104b66 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md @@ -0,0 +1,43 @@ +# {{ cookiecutter.pipeline_name }} Installation + +To start using the {{ cookiecutter.pipeline_name }} pipeline, there are three steps described below: + +1. [Install Nextflow](#install-nextflow) +2. [Install the pipeline](#install-the-pipeline) +3. Configure the pipeline + * [Local installation](configuration/local.md) + * [Adding your own system](configuration/adding_your_own.md) + +## 1) Install NextFlow +Nextflow runs on most POSIX systems (Linux, Mac OSX etc). It can be installed by running the following commands: + +```bash +# Make sure that Java v7+ is installed: +java -version + +# Install Nextflow +curl -fsSL get.nextflow.io | bash + +# Add Nextflow binary to your PATH: +mv nextflow ~/bin/ +# OR system-wide installation: +# sudo mv nextflow /usr/local/bin +``` + +**You need NextFlow version >= 0.24 to run this pipeline.** + +See [nextflow.io](https://www.nextflow.io/) and [NGI-NextflowDocs](https://github.com/SciLifeLab/NGI-NextflowDocs) for further instructions on how to install and configure Nextflow. + +## 2) Install the Pipeline +This pipeline itself needs no installation - NextFlow will automatically fetch it from GitHub if `{{ cookiecutter.github_repo }}` is specified as the pipeline name. + +### Offline use + +If you need to run the pipeline on a system with no internet connection, you will need to download the files yourself from GitHub and run them directly: + +```bash +wget https://github.com/{{ cookiecutter.github_repo }}/archive/master.zip +unzip master.zip -d /my-pipelines/ +cd /my_data/ +nextflow run /my-pipelines/{{ cookiecutter.pipeline_slug }}-master +``` diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/output.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/output.md new file mode 100644 index 0000000000..50ca1b35f5 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/output.md @@ -0,0 +1,35 @@ +# {{ cookiecutter.pipeline_name }} +{{ cookiecutter.pipeline_short_description }} + +This document describes the output produced by the pipeline. Most of the plots are taken from the MultiQC report, which summarises results at the end of the pipeline. + +## Pipeline overview +The pipeline is built using [Nextflow](https://www.nextflow.io/) +and processes data using the following steps: + +* [FastQC](#fastqc) - read quality control +* [MultiQC](#multiqc) - aggregate report, describing results of the whole pipeline + +## FastQC +[FastQC](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/) gives general quality metrics about your reads. It provides information about the quality score distribution across your reads, the per base sequence content (%T/A/G/C). You get information about adapter contamination and other overrepresented sequences. + +For further reading and documentation see the [FastQC help](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/). + +**Output directory: `results/fastqc`** + +* `sample_fastqc.html` + * FastQC report, containing quality metrics for your untrimmed raw fastq files +* `zips/sample_fastqc.zip` + * zip file containing the FastQC report, tab-delimited data file and plot images + +## MultiQC +[MultiQC](http://multiqc.info) is a visualisation tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in within the report data directory. + +**Output directory: `results/multiqc`** + +* `Project_multiqc_report.html` + * MultiQC report - a standalone HTML file that can be viewed in your web browser +* `Project_multiqc_data/` + * Directory containing parsed statistics from the different tools used in the pipeline + +For more information about how to use MultiQC reports, see http://multiqc.info diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/troubleshooting.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/troubleshooting.md new file mode 100644 index 0000000000..83ca907cfb --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/troubleshooting.md @@ -0,0 +1,28 @@ +# Troubleshooting + +## Input files not found + +If only no file, only one input file , or only read one and not read two is picked up then something is wrong with your input file declaration + +1. The path must be enclosed in quotes (`'` or `"`) +2. The path must have at least one `*` wildcard character. This is even if you are only running one paired end sample. +3. When using the pipeline with paired end data, the path must use `{1,2}` or `{R1,R2}` notation to specify read pairs. +4. If you are running Single end data make sure to specify `--singleEnd` + +If the pipeline can't find your files then you will get the following error + +``` +ERROR ~ Cannot find any reads matching: *{1,2}.fastq.gz +``` + +Note that if your sample name is "messy" then you have to be very particular with your glob specification. A file name like `L1-1-D-2h_S1_L002_R1_001.fastq.gz` can be difficult enough for a human to read. Specifying `*{1,2}*.gz` wont work give you what you want Whilst `*{R1,R2}*.gz` will. + + +## Data organization +The pipeline can't take a list of multiple input files - it takes a glob expression. If your input files are scattered in different paths then we recommend that you generate a directory with symlinked files. If running in paired end mode please make sure that your files are sensibly named so that they can be properly paired. See the previous point. + +## Extra resources and getting help +If you still have an issue with running the pipeline then feel free to contact us. +Have look at the [pipeline website]({{ cookiecutter.pipeline_url }}) to find out how. + +If you have problems that are related to Nextflow and not our pipeline then check out the [Nextflow gitter channel](https://gitter.im/nextflow-io/nextflow) or the [google group](https://groups.google.com/forum/#!forum/nextflow). diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md new file mode 100644 index 0000000000..b82fd28331 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md @@ -0,0 +1,186 @@ +# {{ cookiecutter.pipeline_name }} Usage + +## General Nextflow info +Nextflow handles job submissions on SLURM or other environments, and supervises running the jobs. Thus the Nextflow process must run until the pipeline is finished. We recommend that you put the process running in the background through `screen` / `tmux` or similar tool. Alternatively you can run nextflow within a cluster job submitted your job scheduler. + +It is recommended to limit the Nextflow Java virtual machines memory. We recommend adding the following line to your environment (typically in `~/.bashrc` or `~./bash_profile`): + +```bash +NXF_OPTS='-Xms1g -Xmx4g' +``` + +## Running the pipeline +The typical command for running the pipeline is as follows: +```bash +nextflow run {{ cookiecutter.github_repo }} --reads '*_R{1,2}.fastq.gz' -profile docker +``` + +This will launch the pipeline with the `docker` configuration profile. See below for more information about profiles. + +Note that the pipeline will create the following files in your working directory: + +```bash +work # Directory containing the nextflow working files +results # Finished results (configurable, see below) +.nextflow_log # Log file from Nextflow +# Other nextflow hidden files, eg. history of pipeline runs and old logs. +``` + +### Updating the pipeline +When you run the above command, Nextflow automatically pulls the pipeline code from GitHub and stores it as a cached version. When running the pipeline after this, it will always use the cached version if available - even if the pipeline has been updated since. To make sure that you're running the latest version of the pipeline, make sure that you regularly update the cached version of the pipeline: + +```bash +nextflow pull {{ cookiecutter.github_repo }} +``` + +### Reproducibility +It's a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. + +First, go to the [{{ cookiecutter.pipeline_name }} releases page](https://github.com/{{ cookiecutter.github_repo }}/releases) and find the latest version number - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. + +This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. + + +## Main Arguments + +### `-profile` +Use this parameter to choose a configuration profile. Each profile is designed for a different compute environment - follow the links below to see instructions for running on that system. Available profiles are: + +* `docker` + * A generic configuration profile to be used with [Docker](http://docker.com/) + * Runs using the `local` executor and pulls software from dockerhub: [`{{ cookiecutter.dockerhub_slug }}`](http://hub.docker.com/r/{{ cookiecutter.dockerhub_slug }}/) +* `aws` + * A starter configuration for running the pipeline on Amazon Web Services. Uses docker and Spark. + * See [`docs/configuration/aws.md`](configuration/aws.md) +* `standard` + * The default profile, used if `-profile` is not specified at all. Runs locally and expects all software to be installed and available on the `PATH`. + * This profile is mainly designed to be used as a starting point for other configurations and is inherited by most of the other profiles. +* `none` + * No configuration at all. Useful if you want to build your own config from scratch and want to avoid loading in the default `base` config profile (not recommended). + +### `--reads` +Use this to specify the location of your input FastQ files. For example: + +```bash +--reads 'path/to/data/sample_*_{1,2}.fastq' +``` + +Please note the following requirements: + +1. The path must be enclosed in quotes +2. The path must have at least one `*` wildcard character +3. When using the pipeline with paired end data, the path must use `{1,2}` notation to specify read pairs. + +If left unspecified, a default pattern is used: `data/*{1,2}.fastq.gz` + +### `--singleEnd` +By default, the pipeline expects paired-end data. If you have single-end data, you need to specify `--singleEnd` on the command line when you launch the pipeline. A normal glob pattern, enclosed in quotation marks, can then be used for `--reads`. For example: + +```bash +--singleEnd --reads '*.fastq' +``` + +It is not possible to run a mixture of single-end and paired-end files in one run. + + +## Reference Genomes + +The pipeline config files come bundled with paths to the illumina iGenomes reference index files. If running with docker or AWS, the configuration is set up to use the [AWS-iGenomes](https://ewels.github.io/AWS-iGenomes/) resource. + +### `--genome` (using iGenomes) +There are 31 different species supported in the iGenomes references. To run the pipeline, you must specify which to use with the `--genome` flag. + +You can find the keys to specify the genomes in the [iGenomes config file](../conf/igenomes.config). Common genomes that are supported are: + +* Human + * `--genome GRCh37` +* Mouse + * `--genome GRCm38` +* _Drosophila_ + * `--genome BDGP6` +* _S. cerevisiae_ + * `--genome 'R64-1-1'` + +> There are numerous others - check the config file for more. + +Note that you can use the same configuration setup to save sets of reference files for your own use, even if they are not part of the iGenomes resource. See the [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html) for instructions on where to save such a file. + +The syntax for this reference configuration is as follows: + +```nextflow +params { + genomes { + 'GRCh37' { + fasta = '' // Used if no star index given + } + // Any number of additional genomes, key is used with --genome + } +} +``` + +### `--fasta` +If you prefer, you can specify the full path to your reference genome when you run the pipeline: + +```bash +--fasta '[path to Fasta reference]' +``` + +## Job Resources +### Automatic resubmission +Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the steps in the pipeline, if the job exits with an error code of `143` (exceeded requested resources) it will automatically resubmit with higher requests (2 x original, then 3 x original). If it still fails after three times then the pipeline is stopped. + +### Custom resource requests +Wherever process-specific requirements are set in the pipeline, the default value can be changed by creating a custom config file. See the files in [`conf`](../conf) for examples. + +## Other command line parameters +### `--outdir` +The output directory where the results will be saved. + +### `--email` +Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits. If set in your user config file (`~/.nextflow/config`) then you don't need to speicfy this on the command line for every run. + +### `-name` +Name for the pipeline run. If not specified, Nextflow will automatically generate a random mnemonic. + +This is used in the MultiQC report (if not default) and in the summary HTML / e-mail (always). + +**NB:** Single hyphen (core Nextflow option) + +### `-resume` +Specify this when restarting a pipeline. Nextflow will used cached results from any pipeline steps where the inputs are the same, continuing from where it got to previously. + +You can also supply a run name to resume a specific run: `-resume [run-name]`. Use the `nextflow log` command to show previous run names. + +**NB:** Single hyphen (core Nextflow option) + +### `-c` +Specify the path to a specific config file (this is a core NextFlow command). + +**NB:** Single hyphen (core Nextflow option) + +Note - you can use this to override defaults. For example, you can specify a config file using `-c` that contains the following: + +```nextflow +process.$multiqc.module = [] +``` + +### `--max_memory` +Use to set a top-limit for the default memory requirement for each process. +Should be a string in the format integer-unit. eg. `--max_memory '8.GB'`` + +### `--max_time` +Use to set a top-limit for the default time requirement for each process. +Should be a string in the format integer-unit. eg. `--max_time '2.h'` + +### `--max_cpus` +Use to set a top-limit for the default CPU requirement for each process. +Should be a string in the format integer-unit. eg. `--max_cpus 1` + +### `--plaintext_email` +Set to receive plain-text e-mails instead of HTML formatted. + +### `--sampleLevel` +Used to turn of the edgeR MDS and heatmap. Set automatically when running on fewer than 3 samples. + +### `--multiqc_config` +If you would like to supply a custom config file to MultiQC, you can specify a path with `--multiqc_config`. This is used instead of the config file specific to the pipeline. diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml new file mode 100644 index 0000000000..ba29c1d2d0 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml @@ -0,0 +1,8 @@ +name: {{ cookiecutter.pipeline_slug }}-{{ cookiecutter.version }} +channels: + - bioconda + - conda-forge + - defaults +dependencies: + - fastqc=0.11.7 + - multiqc=1.5 diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf new file mode 100644 index 0000000000..6d6f628a69 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf @@ -0,0 +1,324 @@ +#!/usr/bin/env nextflow +/* +======================================================================================== + {{ cookiecutter.pipeline_name }} +======================================================================================== + {{ cookiecutter.pipeline_name }} Analysis Pipeline. Started {% now 'local' %}. + #### Homepage / Documentation + {{ cookiecutter.pipeline_url }} + #### Authors + {{ cookiecutter.author_name }} {{ cookiecutter.author_github_username }} <{{ cookiecutter.author_email }}> - {{ cookiecutter.author_homepage }}> +---------------------------------------------------------------------------------------- +*/ + + +def helpMessage() { + log.info""" + ========================================= + {{ cookiecutter.pipeline_name }} v${params.version} + ========================================= + Usage: + + The typical command for running the pipeline is as follows: + + nextflow run {{ cookiecutter.github_repo }} --reads '*_R{1,2}.fastq.gz' -profile docker + + Mandatory arguments: + --reads Path to input data (must be surrounded with quotes) + --genome Name of iGenomes reference + -profile Hardware config to use. docker / aws + + Options: + --singleEnd Specifies that the input is single end reads + + References If not specified in the configuration file or you wish to overwrite any of the references. + --fasta Path to Fasta reference + + Other options: + --outdir The output directory where the results will be saved + --email Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits + -name Name for the pipeline run. If not specified, Nextflow will automatically generate a random mnemonic. + """.stripIndent() +} + +/* + * SET UP CONFIGURATION VARIABLES + */ + +// Show help emssage +if (params.help){ + helpMessage() + exit 0 +} + +// Configurable variables +params.name = false +params.fasta = params.genome ? params.genomes[ params.genome ].fasta ?: false : false +params.multiqc_config = "$baseDir/conf/multiqc_config.yaml" +params.email = false +params.plaintext_email = false + +multiqc_config = file(params.multiqc_config) +output_docs = file("$baseDir/docs/output.md") + +// Validate inputs +if ( params.fasta ){ + fasta = file(params.fasta) + if( !fasta.exists() ) exit 1, "Fasta file not found: ${params.fasta}" +} +// +// NOTE - THIS IS NOT USED IN THIS PIPELINE, EXAMPLE ONLY +// If you want to use the above in a process, define the following: +// input: +// file fasta from fasta +// + + +// Has the run name been specified by the user? +// this has the bonus effect of catching both -name and --name +custom_runName = params.name +if( !(workflow.runName ==~ /[a-z]+_[a-z]+/) ){ + custom_runName = workflow.runName +} + +/* + * Create a channel for input read files + */ + if(params.readPaths){ + if(params.singleEnd){ + Channel + .from(params.readPaths) + .map { row -> [ row[0], [file(row[1][0])]] } + .ifEmpty { exit 1, "params.readPaths was empty - no input files supplied" } + .into { read_files_fastqc; read_files_trimming } + } else { + Channel + .from(params.readPaths) + .map { row -> [ row[0], [file(row[1][0]), file(row[1][1])]] } + .ifEmpty { exit 1, "params.readPaths was empty - no input files supplied" } + .into { read_files_fastqc; read_files_trimming } + } + } else { + Channel + .fromFilePairs( params.reads, size: params.singleEnd ? 1 : 2 ) + .ifEmpty { exit 1, "Cannot find any reads matching: ${params.reads}\nNB: Path needs to be enclosed in quotes!\nIf this is single-end data, please specify --singleEnd on the command line." } + .into { read_files_fastqc; read_files_trimming } + } + + +// Header log info +log.info """======================================================= + ,--./,-. + ___ __ __ __ ___ /,-._.--~\' + |\\ | |__ __ / ` / \\ |__) |__ } { + | \\| | \\__, \\__/ | \\ |___ \\`-._,-`-, + `._,._,\' + +{{ cookiecutter.pipeline_name }} v${params.version}" +=======================================================""" +def summary = [:] +summary['Pipeline Name'] = '{{ cookiecutter.pipeline_name }}' +summary['Pipeline Version'] = params.version +summary['Run Name'] = custom_runName ?: workflow.runName +summary['Reads'] = params.reads +summary['Fasta Ref'] = params.fasta +summary['Data Type'] = params.singleEnd ? 'Single-End' : 'Paired-End' +summary['Max Memory'] = params.max_memory +summary['Max CPUs'] = params.max_cpus +summary['Max Time'] = params.max_time +summary['Output dir'] = params.outdir +summary['Working dir'] = workflow.workDir +summary['Container Engine'] = workflow.containerEngine +if(workflow.containerEngine) summary['Container'] = workflow.container +summary['Current home'] = "$HOME" +summary['Current user'] = "$USER" +summary['Current path'] = "$PWD" +summary['Working dir'] = workflow.workDir +summary['Output dir'] = params.outdir +summary['Script dir'] = workflow.projectDir +summary['Config Profile'] = workflow.profile +if(params.email) summary['E-mail Address'] = params.email +log.info summary.collect { k,v -> "${k.padRight(15)}: $v" }.join("\n") +log.info "=========================================" + + +// Check that Nextflow version is up to date enough +// try / throw / catch works for NF versions < 0.25 when this was implemented +try { + if( ! nextflow.version.matches(">= $params.nf_required_version") ){ + throw GroovyException('Nextflow version too old') + } +} catch (all) { + log.error "====================================================\n" + + " Nextflow version $params.nf_required_version required! You are running v$workflow.nextflow.version.\n" + + " Pipeline execution will continue, but things may break.\n" + + " Please run `nextflow self-update` to update Nextflow.\n" + + "============================================================" +} + + +/* + * Parse software version numbers + */ +process get_software_versions { + + output: + file 'software_versions_mqc.yaml' into software_versions_yaml + + script: + """ + echo $params.version > v_pipeline.txt + echo $workflow.nextflow.version > v_nextflow.txt + fastqc --version > v_fastqc.txt + multiqc --version > v_multiqc.txt + scrape_software_versions.py > software_versions_mqc.yaml + """ +} + + + +/* + * STEP 1 - FastQC + */ +process fastqc { + tag "$name" + publishDir "${params.outdir}/fastqc", mode: 'copy', + saveAs: {filename -> filename.indexOf(".zip") > 0 ? "zips/$filename" : "$filename"} + + input: + set val(name), file(reads) from read_files_fastqc + + output: + file "*_fastqc.{zip,html}" into fastqc_results + + script: + """ + fastqc -q $reads + """ +} + + + +/* + * STEP 2 - MultiQC + */ +process multiqc { + publishDir "${params.outdir}/MultiQC", mode: 'copy' + + input: + file multiqc_config + file ('fastqc/*') from fastqc_results.collect() + file ('software_versions/*') from software_versions_yaml + + output: + file "*multiqc_report.html" into multiqc_report + file "*_data" + + script: + rtitle = custom_runName ? "--title \"$custom_runName\"" : '' + rfilename = custom_runName ? "--filename " + custom_runName.replaceAll('\\W','_').replaceAll('_+','_') + "_multiqc_report" : '' + """ + multiqc -f $rtitle $rfilename --config $multiqc_config . + """ +} + + + +/* + * STEP 3 - Output Description HTML + */ +process output_documentation { + tag "$prefix" + publishDir "${params.outdir}/Documentation", mode: 'copy' + + input: + file output_docs + + output: + file "results_description.html" + + script: + """ + markdown_to_html.r $output_docs results_description.html + """ +} + + + +/* + * Completion e-mail notification + */ +workflow.onComplete { + + // Set up the e-mail variables + def subject = "[{{ cookiecutter.pipeline_name }}] Successful: $workflow.runName" + if(!workflow.success){ + subject = "[{{ cookiecutter.pipeline_name }}] FAILED: $workflow.runName" + } + def email_fields = [:] + email_fields['version'] = params.version + email_fields['runName'] = custom_runName ?: workflow.runName + email_fields['success'] = workflow.success + email_fields['dateComplete'] = workflow.complete + email_fields['duration'] = workflow.duration + email_fields['exitStatus'] = workflow.exitStatus + email_fields['errorMessage'] = (workflow.errorMessage ?: 'None') + email_fields['errorReport'] = (workflow.errorReport ?: 'None') + email_fields['commandLine'] = workflow.commandLine + email_fields['projectDir'] = workflow.projectDir + email_fields['summary'] = summary + email_fields['summary']['Date Started'] = workflow.start + email_fields['summary']['Date Completed'] = workflow.complete + email_fields['summary']['Pipeline script file path'] = workflow.scriptFile + email_fields['summary']['Pipeline script hash ID'] = workflow.scriptId + if(workflow.repository) email_fields['summary']['Pipeline repository Git URL'] = workflow.repository + if(workflow.commitId) email_fields['summary']['Pipeline repository Git Commit'] = workflow.commitId + if(workflow.revision) email_fields['summary']['Pipeline Git branch/tag'] = workflow.revision + email_fields['summary']['Nextflow Version'] = workflow.nextflow.version + email_fields['summary']['Nextflow Build'] = workflow.nextflow.build + email_fields['summary']['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp + + // Render the TXT template + def engine = new groovy.text.GStringTemplateEngine() + def tf = new File("$baseDir/assets/email_template.txt") + def txt_template = engine.createTemplate(tf).make(email_fields) + def email_txt = txt_template.toString() + + // Render the HTML template + def hf = new File("$baseDir/assets/email_template.html") + def html_template = engine.createTemplate(hf).make(email_fields) + def email_html = html_template.toString() + + // Render the sendmail template + def smail_fields = [ email: params.email, subject: subject, email_txt: email_txt, email_html: email_html, baseDir: "$baseDir" ] + def sf = new File("$baseDir/assets/sendmail_template.txt") + def sendmail_template = engine.createTemplate(sf).make(smail_fields) + def sendmail_html = sendmail_template.toString() + + // Send the HTML e-mail + if (params.email) { + try { + if( params.plaintext_email ){ throw GroovyException('Send plaintext e-mail, not HTML') } + // Try to send HTML e-mail using sendmail + [ 'sendmail', '-t' ].execute() << sendmail_html + log.info "[{{ cookiecutter.pipeline_name }}] Sent summary e-mail to $params.email (sendmail)" + } catch (all) { + // Catch failures and try with plaintext + [ 'mail', '-s', subject, params.email ].execute() << email_txt + log.info "[{{ cookiecutter.pipeline_name }}] Sent summary e-mail to $params.email (mail)" + } + } + + // Write summary e-mail HTML to a file + def output_d = new File( "${params.outdir}/Documentation/" ) + if( !output_d.exists() ) { + output_d.mkdirs() + } + def output_hf = new File( output_d, "pipeline_report.html" ) + output_hf.withWriter { w -> w << email_html } + def output_tf = new File( output_d, "pipeline_report.txt" ) + output_tf.withWriter { w -> w << email_txt } + + log.info "[{{ cookiecutter.pipeline_name }}] Pipeline Complete" + +} diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config new file mode 100644 index 0000000000..84cffc0eb0 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config @@ -0,0 +1,108 @@ +/* + * ------------------------------------------------- + * {{ cookiecutter.pipeline_name }} Nextflow config file + * ------------------------------------------------- + * Default config options for all environments. + * Cluster-specific config options should be saved + * in the conf folder and imported under a profile + * name here. + */ + +// Global default params, used in configs +params { + + version = "{{ cookiecutter.version }}" // Pipeline version + container = '{{ cookiecutter.dockerhub_slug }}:{{ cookiecutter.version }}' // Container slug. Stable releases should specify release tag! + nf_required_version = '0.30.0' // Minimum version of nextflow required + + help = false + reads = "data/*{1,2}.fastq.gz" + singleEnd = false + outdir = './results' + version = version + nf_required_version = nf_required_version + igenomes_base = "./iGenomes" + clusterOptions = false +} + +profiles { + + standard { + includeConfig 'conf/base.config' + } + conda { process.conda = "$baseDir/environment.yml" } + docker { docker.enabled = true } + singularity { singularity.enabled = true } + aws { + includeConfig 'conf/base.config' + includeConfig 'conf/aws.config' + includeConfig 'conf/igenomes.config' + } + test { + includeConfig 'conf/base.config' + includeConfig 'conf/test.config' + } + none { + // Don't load any config (for use with custom home configs) + } + +} + +// Capture exit codes from upstream processes when piping +process.shell = ['/bin/bash', '-euo', 'pipefail'] + +timeline { + enabled = true + file = "${params.outdir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_timeline.html" +} +report { + enabled = true + file = "${params.outdir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_report.html" +} +trace { + enabled = true + file = "${params.outdir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_trace.txt" +} +dag { + enabled = true + file = "${params.outdir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_dag.svg" +} + +manifest { + homePage = '{{ cookiecutter.pipeline_url }}' + description = '{{ cookiecutter.pipeline_short_description }}' + mainScript = 'main.nf' +} + +// Function to ensure that resource requirements don't go beyond +// a maximum limit +def check_max(obj, type) { + if(type == 'memory'){ + try { + if(obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1) + return params.max_memory as nextflow.util.MemoryUnit + else + return obj + } catch (all) { + println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj" + return obj + } + } else if(type == 'time'){ + try { + if(obj.compareTo(params.max_time as nextflow.util.Duration) == 1) + return params.max_time as nextflow.util.Duration + else + return obj + } catch (all) { + println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj" + return obj + } + } else if(type == 'cpus'){ + try { + return Math.min( obj, params.max_cpus as int ) + } catch (all) { + println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj" + return obj + } + } +} diff --git a/requirements.txt b/requirements.txt index 027346bfcf..498b0ec380 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +cookiecutter click GitPython pyyaml diff --git a/scripts/nf-core b/scripts/nf-core index 973a7e2d68..70181ba2e5 100755 --- a/scripts/nf-core +++ b/scripts/nf-core @@ -3,6 +3,7 @@ from __future__ import print_function +from cookiecutter.main import cookiecutter import click import sys import os @@ -106,6 +107,36 @@ def release(pipeline_dir, new_version): # Bump the version number in relevant files nf_core.release.make_release(lint_obj, new_version) +@nf_core_cli.command() +@click.option( + '--no-input', + is_flag = True, + default = False, + help = "Do not prompt for parameters and only use default values" +) +@click.option( + '-f', '--force', + is_flag = True, + default = False, + help = "Overwrite the contents of the output directory if it already exists" +) +@click.option( + '-o', '--outdir', + type = str, + help = "Where to output the generated project dir into" +) +@click.option( + '-c', '--config-file', + type = click.Path(exists=True), + help = "User configuration file" +) +def init(no_input, force, outdir, config_file): + """ Create a new pipeline using the nf-core template """ + # nf_core.cookiecutter(no_input, overwrite_if_exists, outdir, config_file) + ccdir = os.path.join(os.path.dirname(os.path.realpath(nf_core.__file__)), 'pipeline-template/') + cookiecutter(ccdir) + + if __name__ == '__main__': print(""" ,--./,-. From b7553fdb7f499b4fc7d3dc872f965c8be3334949 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 7 Aug 2018 16:43:07 +0200 Subject: [PATCH 04/75] Simplified template variables. Removed large number of cookiecutter variables, made init command use flags only. --- nf_core/pipeline-template/cookiecutter.json | 14 +----- .../.travis.yml | 2 +- .../CHANGELOG.md | 2 +- .../{{cookiecutter.pipeline_slug}}/Dockerfile | 4 +- .../{{cookiecutter.pipeline_slug}}/LICENSE | 2 - .../{{cookiecutter.pipeline_slug}}/README.md | 7 +-- .../Singularity | 1 - .../assets/email_template.html | 2 +- .../assets/email_template.txt | 2 +- .../bin/scrape_software_versions.py | 2 +- .../conf/multiqc_config.yaml | 4 +- .../docs/configuration/adding_your_own.md | 10 ++--- .../docs/configuration/local.md | 8 ++-- .../docs/installation.md | 4 +- .../docs/troubleshooting.md | 2 +- .../docs/usage.md | 8 ++-- .../{{cookiecutter.pipeline_slug}}/main.nf | 8 ++-- .../nextflow.config | 4 +- scripts/nf-core | 45 ++++++++++--------- 19 files changed, 58 insertions(+), 73 deletions(-) diff --git a/nf_core/pipeline-template/cookiecutter.json b/nf_core/pipeline-template/cookiecutter.json index 0806ef25a2..6680606540 100644 --- a/nf_core/pipeline-template/cookiecutter.json +++ b/nf_core/pipeline-template/cookiecutter.json @@ -1,16 +1,6 @@ { - "pipeline_name": "nf-core/example", + "pipeline_name": "example", "pipeline_short_description": "This pipeline takes some data and does something with it.", - "github_repo": "{{ cookiecutter.pipeline_name.replace(' ', '-') }}", - "pipeline_url": "https://github.com/{{ cookiecutter.github_repo }}", - "dockerhub_slug": "{{ cookiecutter.pipeline_name.lower().replace(' ', '-').replace('nf-core', 'nfcore') }}", - "pipeline_slug": "{{ cookiecutter.dockerhub_slug.replace('/', '-') }}", - "author_name": "Your Name", - "author_email": "{{ cookiecutter.author_name.lower().replace(' ', '.') }}@gmail.com", - "author_github_username": "{{ cookiecutter.author_name.lower().replace(' ', '') }}", - "author_homepage": "https://github.com/{{ cookiecutter.author_github_username }}", - "author_affiliation": "Your Institute", - "author_affiliation_homepage": "http://www.{{ cookiecutter.author_affiliation.lower().replace(' ', '') }}.com", - "copyright_holder": "{{ cookiecutter.author_name }}", + "pipeline_slug": "{{ cookiecutter.pipeline_name.lower().replace(' ', '-').replace('/', '-') }}", "version": "0.1.0" } diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml index 3fbd165c54..d88853357d 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml @@ -11,7 +11,7 @@ matrix: before_install: # Pull the docker image first so the test doesn't wait for this - - docker pull {{ cookiecutter.dockerhub_slug }}:{{ cookiecutter.version }} + - docker pull {{ cookiecutter.pipeline_slug }}:{{ cookiecutter.version }} install: # Install Nextflow diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CHANGELOG.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CHANGELOG.md index 6396d5d454..04c1fc9830 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CHANGELOG.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CHANGELOG.md @@ -1,3 +1,3 @@ -## {{ cookiecutter.version }} - {% now 'local' %} +## {{ cookiecutter.version }} - Initial release of {{ cookiecutter.pipeline_name }}, created with the NGI-NFcookiecutter template: https://github.com/ewels/NGI-NFcookiecutter. diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Dockerfile b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Dockerfile index dede96cd01..05e941b29d 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Dockerfile +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Dockerfile @@ -1,7 +1,5 @@ FROM nfcore/base -MAINTAINER {{ cookiecutter.author_name }} <{{ cookiecutter.author_email }}> -LABEL authors="{{ cookiecutter.author_email }}" \ - description="Docker image containing all requirements for {{ cookiecutter.pipeline_name }} pipeline" +LABEL description="Docker image containing all requirements for {{ cookiecutter.pipeline_name }} pipeline" COPY environment.yml / RUN conda env update -n root -f /environment.yml && conda clean -a diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/LICENSE b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/LICENSE index 7652819f1e..9cf106272a 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/LICENSE +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/LICENSE @@ -1,7 +1,5 @@ MIT License -Copyright {% now 'utc', '%Y' %}, {{ cookiecutter.copyright_holder }} - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/README.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/README.md index ab39919bfb..5b676b8af6 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/README.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/README.md @@ -1,11 +1,11 @@ # {{ cookiecutter.pipeline_name }} {{ cookiecutter.pipeline_short_description }} -[![Build Status](https://travis-ci.org/{{ cookiecutter.github_repo }}.svg?branch=master)](https://travis-ci.org/{{ cookiecutter.github_repo }}) +[![Build Status](https://travis-ci.org/nf-core/{{ cookiecutter.pipeline_name }}.svg?branch=master)](https://travis-ci.org/nf-core/{{ cookiecutter.pipeline_name }}) [![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A50.30.0-brightgreen.svg)](https://www.nextflow.io/) [![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](http://bioconda.github.io/) -[![Docker](https://img.shields.io/docker/automated/{{ cookiecutter.dockerhub_slug }}.svg)](https://hub.docker.com/r/{{ cookiecutter.dockerhub_slug }}) +[![Docker](https://img.shields.io/docker/automated/{{ cookiecutter.pipeline_slug }}.svg)](https://hub.docker.com/r/{{ cookiecutter.pipeline_slug }}) ![Singularity Container available]( https://img.shields.io/badge/singularity-available-7E4C74.svg) @@ -25,6 +25,3 @@ The {{ cookiecutter.pipeline_name }} pipeline comes with documentation about the 3. [Running the pipeline](docs/usage.md) 4. [Output and how to interpret the results](docs/output.md) 5. [Troubleshooting](docs/troubleshooting.md) - -### Credits -This pipeline was written by {{ cookiecutter.author_name }} ([{{ cookiecutter.author_github_username }}]({{ cookiecutter.author_homepage }})) at [{{ cookiecutter.author_affiliation }}]({{ cookiecutter.author_affiliation_homepage }}). diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Singularity b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Singularity index 5a60a70ca8..15ab3945ea 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Singularity +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Singularity @@ -2,7 +2,6 @@ From:nfcore/base Bootstrap:docker %labels - MAINTAINER {{ cookiecutter.author_name }} <{{ cookiecutter.author_email }}> DESCRIPTION Singularity image containing all requirements for {{ cookiecutter.pipeline_name }} pipeline VERSION {{ cookiecutter.version }} diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.html b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.html index 04fb453ee5..775aad0b4b 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.html +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.html @@ -44,7 +44,7 @@

Pipeline Configuration:

{{ cookiecutter.pipeline_name }}

-

{{ cookiecutter.pipeline_url }}

+

https://github.com/nf-core/{{ cookiecutter.pipeline_name }}

diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.txt b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.txt index e3aeb4a41e..19b196f997 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.txt +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.txt @@ -48,4 +48,4 @@ Pipeline Configuration: -- {{ cookiecutter.pipeline_name }} -{{ cookiecutter.pipeline_url }} +https://github.com/nf-core/{{ cookiecutter.pipeline_name }} diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/scrape_software_versions.py b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/scrape_software_versions.py index 3a8e5da69b..7ceaf3becf 100755 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/scrape_software_versions.py +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/scrape_software_versions.py @@ -27,7 +27,7 @@ print (''' id: '{{ cookiecutter.pipeline_name.lower().replace(' ', '-') }}-software-versions' section_name: '{{ cookiecutter.pipeline_name }} Software Versions' -section_href: '{{ cookiecutter.pipeline_url }}' +section_href: 'https://github.com/nf-core/{{ cookiecutter.pipeline_name }}' plot_type: 'html' description: 'are collected at run time from the software output.' data: | diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/multiqc_config.yaml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/multiqc_config.yaml index 616b35e4e1..f4c2ce9005 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/multiqc_config.yaml +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/multiqc_config.yaml @@ -1,7 +1,7 @@ report_comment: > - This report has been generated by the {{ cookiecutter.pipeline_name }} + This report has been generated by the {{ cookiecutter.pipeline_name }} analysis pipeline. For information about how to interpret these results, please see the - documentation. + documentation. report_section_order: {{ cookiecutter.pipeline_name.lower().replace(' ', '-') }}-software-versions: order: -1000 diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md index d06a986f8b..4f831d48f8 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md @@ -43,10 +43,10 @@ First, install docker on your system: [Docker Installation Instructions](https:/ Then, simply run the analysis pipeline: ```bash -nextflow run {{ cookiecutter.github_repo }} -profile docker --reads '' +nextflow run nf-core/{{ cookiecutter.pipeline_name }} -profile docker --reads '' ``` -Nextflow will recognise `{{ cookiecutter.github_repo }}` and download the pipeline from GitHub. The `-profile docker` configuration lists the [{{ cookiecutter.dockerhub_slug }}](https://hub.docker.com/r/{{ cookiecutter.dockerhub_slug }}/) image that we have created and is hosted at dockerhub, and this is downloaded. +Nextflow will recognise `nf-core/{{ cookiecutter.pipeline_name }}` and download the pipeline from GitHub. The `-profile docker` configuration lists the [{{ cookiecutter.pipeline_slug }}](https://hub.docker.com/r/{{ cookiecutter.pipeline_slug }}/) image that we have created and is hosted at dockerhub, and this is downloaded. The public docker images are tagged with the same version numbers as the code, which you can use to ensure reproducibility. When running the pipeline, specify the pipeline version with `-r`, for example `-r v1.3`. This uses pipeline code and docker image from this tagged version. @@ -63,12 +63,12 @@ process { The variable `wf_container` is defined dynamically and automatically specifies the image tag if Nextflow is running with `-r`. -A test suite for docker comes with the pipeline, and can be run by moving to the [`tests` directory](https://github.com/{{ cookiecutter.github_repo }}/tree/master/tests) and running `./run_test.sh`. This will download a small yeast genome and some data, and attempt to run the pipeline through docker on that small dataset. This is automatically run using [Travis](https://travis-ci.org/{{ cookiecutter.github_repo }}/) whenever changes are made to the pipeline. +A test suite for docker comes with the pipeline, and can be run by moving to the [`tests` directory](https://github.com/nf-core/{{ cookiecutter.pipeline_name }}/tree/master/tests) and running `./run_test.sh`. This will download a small yeast genome and some data, and attempt to run the pipeline through docker on that small dataset. This is automatically run using [Travis](https://travis-ci.org/nf-core/{{ cookiecutter.pipeline_name }}/) whenever changes are made to the pipeline. ### Singularity image Many HPC environments are not able to run Docker due to security issues. [Singularity](http://singularity.lbl.gov/) is a tool designed to run on such HPC systems which is very similar to Docker. Even better, it can use create images directly from dockerhub. -To use the singularity image for a single run, use `-with-singularity 'docker://{{ cookiecutter.github_repo }}'`. This will download the docker container from dockerhub and create a singularity image for you dynamically. +To use the singularity image for a single run, use `-with-singularity 'docker://nf-core/{{ cookiecutter.pipeline_name }}'`. This will download the docker container from dockerhub and create a singularity image for you dynamically. To specify singularity usage in your pipeline config file, add the following: @@ -88,7 +88,7 @@ If you intend to run the pipeline offline, nextflow will not be able to automati First, pull the image file where you have an internet connection: ```bash -singularity pull --name {{ cookiecutter.pipeline_slug }}.img docker://{{ cookiecutter.github_repo }} +singularity pull --name {{ cookiecutter.pipeline_slug }}.img docker://nf-core/{{ cookiecutter.pipeline_name }} ``` Then transfer this file and run the pipeline with this path: diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md index 116d8e41f1..7ea5364c96 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md @@ -11,10 +11,10 @@ First, install docker on your system: [Docker Installation Instructions](https:/ Then, simply run the analysis pipeline: ```bash -nextflow run {{ cookiecutter.github_repo }} -profile docker --reads '' +nextflow run nf-core/{{ cookiecutter.pipeline_name }} -profile docker --reads '' ``` -Nextflow will recognise `{{ cookiecutter.github_repo }}` and download the pipeline from GitHub. The `-profile docker` configuration lists the [{{ cookiecutter.dockerhub_slug }}](https://hub.docker.com/r/{{ cookiecutter.dockerhub_slug }}/) image that we have created and is hosted at dockerhub, and this is downloaded. +Nextflow will recognise `nf-core/{{ cookiecutter.pipeline_name }}` and download the pipeline from GitHub. The `-profile docker` configuration lists the [{{ cookiecutter.pipeline_slug }}](https://hub.docker.com/r/{{ cookiecutter.pipeline_slug }}/) image that we have created and is hosted at dockerhub, and this is downloaded. For more information about how to work with reference genomes, see [`docs/configuration/reference_genomes.md`](docs/configuration/reference_genomes.md). @@ -25,14 +25,14 @@ The public docker images are tagged with the same version numbers as the code, w ## Singularity image Many HPC environments are not able to run Docker due to security issues. [Singularity](http://singularity.lbl.gov/) is a tool designed to run on such HPC systems which is very similar to Docker. Even better, it can use create images directly from dockerhub. -To use the singularity image for a single run, use `-with-singularity 'docker://{{ cookiecutter.dockerhub_slug }}'`. This will download the docker container from dockerhub and create a singularity image for you dynamically. +To use the singularity image for a single run, use `-with-singularity 'docker://{{ cookiecutter.pipeline_slug }}'`. This will download the docker container from dockerhub and create a singularity image for you dynamically. If you intend to run the pipeline offline, nextflow will not be able to automatically download the singularity image for you. Instead, you'll have to do this yourself manually first, transfer the image file and then point to that. First, pull the image file where you have an internet connection: ```bash -singularity pull --name {{ cookiecutter.pipeline_slug }}.img docker://{{ cookiecutter.dockerhub_slug }} +singularity pull --name {{ cookiecutter.pipeline_slug }}.img docker://{{ cookiecutter.pipeline_slug }} ``` Then transfer this file and run the pipeline with this path: diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md index e6ac104b66..838a67d5ea 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md @@ -29,14 +29,14 @@ mv nextflow ~/bin/ See [nextflow.io](https://www.nextflow.io/) and [NGI-NextflowDocs](https://github.com/SciLifeLab/NGI-NextflowDocs) for further instructions on how to install and configure Nextflow. ## 2) Install the Pipeline -This pipeline itself needs no installation - NextFlow will automatically fetch it from GitHub if `{{ cookiecutter.github_repo }}` is specified as the pipeline name. +This pipeline itself needs no installation - NextFlow will automatically fetch it from GitHub if `nf-core/{{ cookiecutter.pipeline_name }}` is specified as the pipeline name. ### Offline use If you need to run the pipeline on a system with no internet connection, you will need to download the files yourself from GitHub and run them directly: ```bash -wget https://github.com/{{ cookiecutter.github_repo }}/archive/master.zip +wget https://github.com/nf-core/{{ cookiecutter.pipeline_name }}/archive/master.zip unzip master.zip -d /my-pipelines/ cd /my_data/ nextflow run /my-pipelines/{{ cookiecutter.pipeline_slug }}-master diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/troubleshooting.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/troubleshooting.md index 83ca907cfb..305cedece0 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/troubleshooting.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/troubleshooting.md @@ -23,6 +23,6 @@ The pipeline can't take a list of multiple input files - it takes a glob express ## Extra resources and getting help If you still have an issue with running the pipeline then feel free to contact us. -Have look at the [pipeline website]({{ cookiecutter.pipeline_url }}) to find out how. +Have look at the [pipeline website](https://github.com/nf-core/{{ cookiecutter.pipeline_name }}) to find out how. If you have problems that are related to Nextflow and not our pipeline then check out the [Nextflow gitter channel](https://gitter.im/nextflow-io/nextflow) or the [google group](https://groups.google.com/forum/#!forum/nextflow). diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md index b82fd28331..62bf0fe2e6 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md @@ -12,7 +12,7 @@ NXF_OPTS='-Xms1g -Xmx4g' ## Running the pipeline The typical command for running the pipeline is as follows: ```bash -nextflow run {{ cookiecutter.github_repo }} --reads '*_R{1,2}.fastq.gz' -profile docker +nextflow run nf-core/{{ cookiecutter.pipeline_name }} --reads '*_R{1,2}.fastq.gz' -profile docker ``` This will launch the pipeline with the `docker` configuration profile. See below for more information about profiles. @@ -30,13 +30,13 @@ results # Finished results (configurable, see below) When you run the above command, Nextflow automatically pulls the pipeline code from GitHub and stores it as a cached version. When running the pipeline after this, it will always use the cached version if available - even if the pipeline has been updated since. To make sure that you're running the latest version of the pipeline, make sure that you regularly update the cached version of the pipeline: ```bash -nextflow pull {{ cookiecutter.github_repo }} +nextflow pull nf-core/{{ cookiecutter.pipeline_name }} ``` ### Reproducibility It's a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. -First, go to the [{{ cookiecutter.pipeline_name }} releases page](https://github.com/{{ cookiecutter.github_repo }}/releases) and find the latest version number - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. +First, go to the [{{ cookiecutter.pipeline_name }} releases page](https://github.com/nf-core/{{ cookiecutter.pipeline_name }}/releases) and find the latest version number - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. @@ -48,7 +48,7 @@ Use this parameter to choose a configuration profile. Each profile is designed f * `docker` * A generic configuration profile to be used with [Docker](http://docker.com/) - * Runs using the `local` executor and pulls software from dockerhub: [`{{ cookiecutter.dockerhub_slug }}`](http://hub.docker.com/r/{{ cookiecutter.dockerhub_slug }}/) + * Runs using the `local` executor and pulls software from dockerhub: [`{{ cookiecutter.pipeline_slug }}`](http://hub.docker.com/r/{{ cookiecutter.pipeline_slug }}/) * `aws` * A starter configuration for running the pipeline on Amazon Web Services. Uses docker and Spark. * See [`docs/configuration/aws.md`](configuration/aws.md) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf index 6d6f628a69..3d63af4310 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf @@ -3,11 +3,9 @@ ======================================================================================== {{ cookiecutter.pipeline_name }} ======================================================================================== - {{ cookiecutter.pipeline_name }} Analysis Pipeline. Started {% now 'local' %}. + {{ cookiecutter.pipeline_name }} Analysis Pipeline. #### Homepage / Documentation - {{ cookiecutter.pipeline_url }} - #### Authors - {{ cookiecutter.author_name }} {{ cookiecutter.author_github_username }} <{{ cookiecutter.author_email }}> - {{ cookiecutter.author_homepage }}> + https://github.com/nf-core/{{ cookiecutter.pipeline_name }} ---------------------------------------------------------------------------------------- */ @@ -21,7 +19,7 @@ def helpMessage() { The typical command for running the pipeline is as follows: - nextflow run {{ cookiecutter.github_repo }} --reads '*_R{1,2}.fastq.gz' -profile docker + nextflow run nf-core/{{ cookiecutter.pipeline_name }} --reads '*_R{1,2}.fastq.gz' -profile docker Mandatory arguments: --reads Path to input data (must be surrounded with quotes) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config index 84cffc0eb0..b378fd165b 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config @@ -12,7 +12,7 @@ params { version = "{{ cookiecutter.version }}" // Pipeline version - container = '{{ cookiecutter.dockerhub_slug }}:{{ cookiecutter.version }}' // Container slug. Stable releases should specify release tag! + container = '{{ cookiecutter.pipeline_slug }}:{{ cookiecutter.version }}' // Container slug. Stable releases should specify release tag! nf_required_version = '0.30.0' // Minimum version of nextflow required help = false @@ -69,7 +69,7 @@ dag { } manifest { - homePage = '{{ cookiecutter.pipeline_url }}' + homePage = 'https://github.com/nf-core/{{ cookiecutter.pipeline_name }}' description = '{{ cookiecutter.pipeline_short_description }}' mainScript = 'main.nf' } diff --git a/scripts/nf-core b/scripts/nf-core index 70181ba2e5..6f95d456bf 100755 --- a/scripts/nf-core +++ b/scripts/nf-core @@ -3,7 +3,7 @@ from __future__ import print_function -from cookiecutter.main import cookiecutter +import cookiecutter.main, cookiecutter.exceptions import click import sys import os @@ -109,32 +109,37 @@ def release(pipeline_dir, new_version): @nf_core_cli.command() @click.option( - '--no-input', - is_flag = True, - default = False, - help = "Do not prompt for parameters and only use default values" -) -@click.option( - '-f', '--force', - is_flag = True, - default = False, - help = "Overwrite the contents of the output directory if it already exists" + '-n', '--name', + required = True, + type = str, + help = 'The name of your new pipeline' ) @click.option( - '-o', '--outdir', + '-d', '--description', + required = True, type = str, - help = "Where to output the generated project dir into" + help = 'A short description of your pipeline' ) @click.option( - '-c', '--config-file', - type = click.Path(exists=True), - help = "User configuration file" + '-f', '--force', + is_flag = True, + default = False, + help = "Overwrite output directory if it already exists" ) -def init(no_input, force, outdir, config_file): +def init(name, description, force): """ Create a new pipeline using the nf-core template """ - # nf_core.cookiecutter(no_input, overwrite_if_exists, outdir, config_file) - ccdir = os.path.join(os.path.dirname(os.path.realpath(nf_core.__file__)), 'pipeline-template/') - cookiecutter(ccdir) + logging.info("Initialising new nf-core pipeline: {}".format(name)) + template = os.path.join(os.path.dirname(os.path.realpath(nf_core.__file__)), 'pipeline-template/') + try: + cookiecutter.main.cookiecutter ( + template, + extra_context={'pipeline_name':name, 'pipeline_short_description':description}, + no_input=True, + overwrite_if_exists=force + ) + except (cookiecutter.exceptions.OutputDirExistsException) as e: + logging.error(e) + logging.info("Use -f / --force to overwrite existing files") if __name__ == '__main__': From e531afdda00506d7541b197d1bc9629150990f1f Mon Sep 17 00:00:00 2001 From: sven1103 Date: Wed, 8 Aug 2018 11:49:43 +0200 Subject: [PATCH 05/75] Provide helper script for template changes This script should be executed, once a new release is done for nf-core tools. It will traverse all pipelines and make changes according to the cookiecutter template and commit them into each pipelines branch 'TEMPLATE'. --- bin/broadcast_prs.py | 114 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 114 insertions(+) create mode 100755 bin/broadcast_prs.py diff --git a/bin/broadcast_prs.py b/bin/broadcast_prs.py new file mode 100755 index 0000000000..c5ba79f7a0 --- /dev/null +++ b/bin/broadcast_prs.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python + +import os +import sys +import subprocess +from cookiecutter.main import cookiecutter +import requests +from requests.auth import HTTPBasicAuth +import json + +GH_BASE_URL = "https://github.com/nf-core" +NF_CORE_TEMPLATE = "https://github.com/nf-core/cookiecutter.git" +# The JSON file is updated on every push event on the nf-core GitHub +# project +NF_CORE_PIPELINE_INFO = "http://nf-co.re/pipelines.json" +GITHUB_PR_URL_TEMPL = "https://api.github.com/repos/nf-core/{pipeline}/pulls" + + +class UpdateTemplate: + """Updates the template content of an nf-core pipeline in + its `TEMPLATE` branch. + + Args: - pipeline: The pipeline name + - context: a parsed dictionary of a cookiecutter.json file + - branch: The template branch name, default=`TEMPLATE` + """ + + def __init__(self, pipeline, context, branch='TEMPLATE'): + """Basic constructor + """ + self.pipeline = pipeline + self.repo_url = "{base_url}/{pipeline}".format( + base_url=GH_BASE_URL, + pipeline=pipeline) + self.context = context + self.branch = branch + + + def run(self): + """Execute the template update. + """ + self._clone_repo() + self._apply_changes() + self._commit_changes() + + def _clone_repo(self): + """Clone the repo and switch to the configured branch. + """ + subprocess.run(["git", "clone", self.repo_url, "-b", self.branch, self.pipeline]) + + def _apply_changes(self): + """Apply the changes of the cookiecutter template + to the pipelines template branch. + """ + cookiecutter(NF_CORE_TEMPLATE, + no_input=True, + extra_context=None, + overwrite_if_exists=True, + output_dir=self.pipeline) + + def _commit_changes(self): + """Commits the changes of the new template to the current branch. + """ + subprocess.run(["git", "add", "-A", "."], cwd=self.pipeline) + subprocess.run(["git", "commit", "-m", "Update nf-core template"], cwd=self.pipeline) + + +def create_pullrequest(pipeline, origin="master", template="TEMPLATE", token="", user="nf-core"): + """Create a pull request to a base branch (default: master), + from a head branch (default: TEMPLATE) + + Returns: An instance of class requests.Response + """ + content = {} + content['title'] = "Important pipeline nf-core update!" + content['body'] = "Some important changes have been made in the nf-core pipelines templates.\n" + + "Please make sure to merge this in ASAP and make a new minor release of your pipeline." + content['head'] = "{}:{}".format(pipeline, template) + content['base'] = master + return requests.post(url=GITHUB_PR_URL_TEMPL.format(pipeline=pipeline), + data=json.dumps(content) + auth=HTTPBasicAuth(user, token)) + +def get_context(pipeline): + """Get the template context for a given pipeline. + + Returns: A context dictionary + """ + pass + +def main(): + res = requests.get(NF_CORE_PIPELINE_INFO) + pipelines = json.loads(res.content).get('remote_workflows') + if not pipelines: + print("Pipeline information was empty!") + for pipeline in pipelines: + # Get context from pipeline and load it into a dictionary + # context = load_context(pipeline) + print(pipeline['name']) # Just for testing, can be safely deleted + ut.UpdateTemplate(pipeline['name'], context) + + for pipeline in pipelines: + print("Trying to open pull request for pipeline {}...".format(pipeline['name'])) + response = create_pullrequest(pipeline['name']) + if response.status_code != 201: + print("Pull-request for pipeline \'{pipeline}\' failed," + " got return code {return_code}." + .format(pipeline=pipeline, return_code=response.status_code)) + else: + print("Created pull-request for pipeline \'{pipeline}\' successfully.".format(pipeline=pipeline)) + +if __name__ == "__main__": + main() + From 9b1f5f33ec219bb0aaac67367b0b2be03c7b1d81 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 8 Aug 2018 11:53:05 +0200 Subject: [PATCH 06/75] Added git init to cookiecutter * Added new command line option to specify version for new pipeline * Split functionality out into its own file * Wrote feature to initialise a git repository and make a commit in new pipeline --- nf_core/init.py | 49 +++++++++++++++++++++ nf_core/pipeline-template/cookiecutter.json | 2 +- scripts/nf-core | 30 ++++++------- 3 files changed, 65 insertions(+), 16 deletions(-) create mode 100644 nf_core/init.py diff --git a/nf_core/init.py b/nf_core/init.py new file mode 100644 index 0000000000..4b584b1dbe --- /dev/null +++ b/nf_core/init.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +""" Release code for the nf-core python package. + +Bumps the version number in all appropriate files for +a nf-core pipeline +""" + +import cookiecutter.main, cookiecutter.exceptions +import git +import logging +import os +import re + +import nf_core + +def init_pipeline(name, description, new_version='1.0dev', no_git=False, force=False): + """Function to init a new pipeline. Called by the main cli""" + + # Make the new pipeline + run_cookiecutter(name, description, new_version, force) + + # Init the git repository and make the first commit + if not no_git: + git_init_pipeline(name) + +def run_cookiecutter(name, description, new_version='1.0dev', force=False): + """Run cookiecutter to create a new pipeline""" + + logging.info("Creating new nf-core pipeline: {}".format(name)) + template = os.path.join(os.path.dirname(os.path.realpath(nf_core.__file__)), 'pipeline-template/') + try: + cookiecutter.main.cookiecutter ( + template, + extra_context={'pipeline_name':name, 'pipeline_short_description':description, 'version':new_version}, + no_input=True, + overwrite_if_exists=force + ) + except (cookiecutter.exceptions.OutputDirExistsException) as e: + logging.error(e) + logging.info("Use -f / --force to overwrite existing files") + +def git_init_pipeline(name): + """Initialise the new pipeline as a git repo and make first commit""" + logging.info("Initialising pipeline git repository") + pipeline_dir = os.path.join(os.getcwd(), name) + repo = git.Repo.init(pipeline_dir) + repo.git.add(A=True) + repo.index.commit("initial commit") + logging.info("Done. Remember to add a remote and push to GitHub!") diff --git a/nf_core/pipeline-template/cookiecutter.json b/nf_core/pipeline-template/cookiecutter.json index 6680606540..197784e282 100644 --- a/nf_core/pipeline-template/cookiecutter.json +++ b/nf_core/pipeline-template/cookiecutter.json @@ -2,5 +2,5 @@ "pipeline_name": "example", "pipeline_short_description": "This pipeline takes some data and does something with it.", "pipeline_slug": "{{ cookiecutter.pipeline_name.lower().replace(' ', '-').replace('/', '-') }}", - "version": "0.1.0" + "version": "1.0dev" } diff --git a/scripts/nf-core b/scripts/nf-core index 6f95d456bf..ef16ba09f5 100755 --- a/scripts/nf-core +++ b/scripts/nf-core @@ -3,13 +3,12 @@ from __future__ import print_function -import cookiecutter.main, cookiecutter.exceptions import click import sys import os import nf_core -import nf_core.lint, nf_core.list, nf_core.download, nf_core.release +import nf_core.lint, nf_core.list, nf_core.download, nf_core.release, nf_core.init import logging @@ -120,26 +119,27 @@ def release(pipeline_dir, new_version): type = str, help = 'A short description of your pipeline' ) +@click.option( + '--new-version', + type = str, + default = '1.0dev', + help = 'The initial version number to use' +) +@click.option( + '--no-git', + is_flag = True, + default = False, + help = "Do not initialise pipeline as new git repository" +) @click.option( '-f', '--force', is_flag = True, default = False, help = "Overwrite output directory if it already exists" ) -def init(name, description, force): +def init(name, description, new_version, no_git, force): """ Create a new pipeline using the nf-core template """ - logging.info("Initialising new nf-core pipeline: {}".format(name)) - template = os.path.join(os.path.dirname(os.path.realpath(nf_core.__file__)), 'pipeline-template/') - try: - cookiecutter.main.cookiecutter ( - template, - extra_context={'pipeline_name':name, 'pipeline_short_description':description}, - no_input=True, - overwrite_if_exists=force - ) - except (cookiecutter.exceptions.OutputDirExistsException) as e: - logging.error(e) - logging.info("Use -f / --force to overwrite existing files") + nf_core.init.init_pipeline(name, description, new_version, no_git, force) if __name__ == '__main__': From a993d855fe5e2cd10c7cde6a33d9f47c56f060d7 Mon Sep 17 00:00:00 2001 From: sven1103 Date: Wed, 8 Aug 2018 11:57:47 +0200 Subject: [PATCH 07/75] Perform some small refactoring --- bin/broadcast_prs.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/bin/broadcast_prs.py b/bin/broadcast_prs.py index c5ba79f7a0..752f497c75 100755 --- a/bin/broadcast_prs.py +++ b/bin/broadcast_prs.py @@ -1,18 +1,20 @@ #!/usr/bin/env python -import os -import sys -import subprocess from cookiecutter.main import cookiecutter +import json +import os import requests from requests.auth import HTTPBasicAuth -import json +import sys +import subprocess +# The GitHub base url or the nf-core project GH_BASE_URL = "https://github.com/nf-core" +# The current cookiecutter template url for nf-core pipelines NF_CORE_TEMPLATE = "https://github.com/nf-core/cookiecutter.git" -# The JSON file is updated on every push event on the nf-core GitHub -# project +# The JSON file is updated on every push event on the nf-core GitHub project NF_CORE_PIPELINE_INFO = "http://nf-co.re/pipelines.json" +# The API endpoint for creating pull requests GITHUB_PR_URL_TEMPL = "https://api.github.com/repos/nf-core/{pipeline}/pulls" @@ -34,7 +36,6 @@ def __init__(self, pipeline, context, branch='TEMPLATE'): pipeline=pipeline) self.context = context self.branch = branch - def run(self): """Execute the template update. From 9b4ef3ec79d25e299bb1af5b9c0475d677484cb5 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 8 Aug 2018 12:17:46 +0200 Subject: [PATCH 08/75] Don't use reserved name init Instead call the command nf-core create --- nf_core/{init.py => create.py} | 0 scripts/nf-core | 6 +++--- 2 files changed, 3 insertions(+), 3 deletions(-) rename nf_core/{init.py => create.py} (100%) diff --git a/nf_core/init.py b/nf_core/create.py similarity index 100% rename from nf_core/init.py rename to nf_core/create.py diff --git a/scripts/nf-core b/scripts/nf-core index ef16ba09f5..cab2878263 100755 --- a/scripts/nf-core +++ b/scripts/nf-core @@ -8,7 +8,7 @@ import sys import os import nf_core -import nf_core.lint, nf_core.list, nf_core.download, nf_core.release, nf_core.init +import nf_core.lint, nf_core.list, nf_core.download, nf_core.release, nf_core.create import logging @@ -137,9 +137,9 @@ def release(pipeline_dir, new_version): default = False, help = "Overwrite output directory if it already exists" ) -def init(name, description, new_version, no_git, force): +def create(name, description, new_version, no_git, force): """ Create a new pipeline using the nf-core template """ - nf_core.init.init_pipeline(name, description, new_version, no_git, force) + nf_core.create.init_pipeline(name, description, new_version, no_git, force) if __name__ == '__main__': From 98209b7e07960cc987f859f30d9c0f6192524c4f Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 8 Aug 2018 12:22:12 +0200 Subject: [PATCH 09/75] Remove redundant config linesW --- .../{{cookiecutter.pipeline_slug}}/nextflow.config | 2 -- 1 file changed, 2 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config index b378fd165b..1ba01cb124 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config @@ -19,8 +19,6 @@ params { reads = "data/*{1,2}.fastq.gz" singleEnd = false outdir = './results' - version = version - nf_required_version = nf_required_version igenomes_base = "./iGenomes" clusterOptions = false } From 913bb5cd3a9350d1fa10e985b2c1ea4faa4eae46 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 8 Aug 2018 12:26:42 +0200 Subject: [PATCH 10/75] Changelog update --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6552db137c..8dbdf8fca8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,8 @@ ## v1.1dev * Improved Python readme parsing for PyPI * Update linting and release tools to support new style of Docker & Singularity conda installations +* Merged the cookiecutter template into this tools package +* Added new subcommand to initialise a new pipeline with a local git repo and an initial commit ## [v1.0.1](https://github.com/nf-core/tools/releases/tag/1.0.1) - 2018-07-18 From 2859f7fe15e674a01fb18c61222c50e889414046 Mon Sep 17 00:00:00 2001 From: Maxime Garcia Date: Wed, 8 Aug 2018 13:05:27 +0200 Subject: [PATCH 11/75] add templates for community requirements cf #87 --- .../.github/CONTRIBUTING.md | 25 ++++++++++ .../.github/bug_report.md | 29 ++++++++++++ .../.github/feature_request.md | 29 ++++++++++++ .../.github/pull_request.md | 8 ++++ .../CODE_OF_CONDUCT.md | 46 +++++++++++++++++++ 5 files changed, 137 insertions(+) create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/bug_report.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CODE_OF_CONDUCT.md diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md new file mode 100644 index 0000000000..9fa1dc6abb --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md @@ -0,0 +1,25 @@ +# {{ cookiecutter.pipeline_slug }} Contributing Guidelines + +Hi there! Many thanks for taking an interest in improving {{ cookiecutter.pipeline_slug }}. + +We try to manage the required tasks for {{ cookiecutter.pipeline_slug }} using GitHub issues, you probably came to this page when creating one. Please use the prefilled template to save time. + +However, don't be put off by this template - other more general issues and suggestions are welcome! Contributions to the code are even more welcome ;) + +> If you need help using {{ cookiecutter.pipeline_slug }} then the best place to go is the Gitter chatroom where you can ask us questions directly: https://gitter.im/nf-core/Lobby + +## Contribution workflow +If you'd like to write some code for {{ cookiecutter.pipeline_slug }}, the standard workflow +is as follows: + +1. Check that there isn't already an issue about your idea in the + [{{ cookiecutter.pipeline_slug }} issues](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}/issues) to avoid + duplicating work. + * Feel free to add a new issue here for the same reason. +2. Fork the {{ cookiecutter.pipeline_slug }} repository to your GitHub account +3. Make the necessary changes / additions within your forked repository +4. Submit a Pull Request against the master branch and wait for the code to be reviewed and merged. + +If you're not used to this workflow with git, you can start with some [basic docs from GitHub](https://help.github.com/articles/fork-a-repo/) or even their [excellent interactive tutorial](https://try.github.io/). + +For further information/help, please consult the [{{ cookiecutter.pipeline_slug }} documentation](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}README.md#documentation) and don't hesitate to get in touch on [Gitter](https://gitter.im/nf-core/Lobby) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/bug_report.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/bug_report.md new file mode 100644 index 0000000000..d0405d120a --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/bug_report.md @@ -0,0 +1,29 @@ +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Command line '...' +2. See error **Please provide your error message** + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**System (please complete the following information):** + - Hardware: [e.g. HPC, Desktop, Cloud...] + - Executor: [e.g. slurm, local, awsbatch...] + - OS: [e.g. CentOS Linux, macOS, Linux Mint...] + - Version [e.g. 7, 10.13.6, 18.3...] + +**Nextflow (please complete the following information):** + - Version: [e.g. 0.31.0] + +**Container engine (please complete the following information):** + - Engine: [e.g. Conda, Docker or Singularity] + - version: [e.g. 1.0.0] + +**Container (please complete the following information):** + - tag: [e.g. 1.0.0] + +**Additional context** +Add any other context about the problem here. diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md new file mode 100644 index 0000000000..d0405d120a --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md @@ -0,0 +1,29 @@ +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Command line '...' +2. See error **Please provide your error message** + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**System (please complete the following information):** + - Hardware: [e.g. HPC, Desktop, Cloud...] + - Executor: [e.g. slurm, local, awsbatch...] + - OS: [e.g. CentOS Linux, macOS, Linux Mint...] + - Version [e.g. 7, 10.13.6, 18.3...] + +**Nextflow (please complete the following information):** + - Version: [e.g. 0.31.0] + +**Container engine (please complete the following information):** + - Engine: [e.g. Conda, Docker or Singularity] + - version: [e.g. 1.0.0] + +**Container (please complete the following information):** + - tag: [e.g. 1.0.0] + +**Additional context** +Add any other context about the problem here. diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md new file mode 100644 index 0000000000..12a9c8eb7e --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md @@ -0,0 +1,8 @@ +**Before submitting a pull request,** please make sure the following is done: + +1. Fork [the repository](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}) and create your branch from `master`. +2. If you've fixed a bug or added code that should be tested, add tests! +3. Ensure the test suite passes (`nextflow run . -profile test,docker`). +4. Make sure your code lints (`nf-core lint`). + +**Learn more about contributing:** https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}/.github/CONTRIBUTING.md diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CODE_OF_CONDUCT.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..21096193ad --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CODE_OF_CONDUCT.md @@ -0,0 +1,46 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team on the [Gitter channel](https://gitter.im/nf-core/Lobby). The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ From 8c773bc6bed33e2819ae0d5e90a4aebaec15fc5f Mon Sep 17 00:00:00 2001 From: Maxime Garcia Date: Wed, 8 Aug 2018 13:22:08 +0200 Subject: [PATCH 12/75] add nf-core to cookiecutter.pipeline_slug + some typos --- .../.github/CONTRIBUTING.md | 8 ++--- .../.github/feature_request.md | 32 ++++--------------- .../.github/pull_request.md | 2 +- 3 files changed, 12 insertions(+), 30 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md index 9fa1dc6abb..d88ada68a9 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md @@ -1,4 +1,4 @@ -# {{ cookiecutter.pipeline_slug }} Contributing Guidelines +# nf-core/{{ cookiecutter.pipeline_slug }} Contributing Guidelines Hi there! Many thanks for taking an interest in improving {{ cookiecutter.pipeline_slug }}. @@ -13,13 +13,13 @@ If you'd like to write some code for {{ cookiecutter.pipeline_slug }}, the stand is as follows: 1. Check that there isn't already an issue about your idea in the - [{{ cookiecutter.pipeline_slug }} issues](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}/issues) to avoid + [nf-core/{{ cookiecutter.pipeline_slug }} issues](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}/issues) to avoid duplicating work. * Feel free to add a new issue here for the same reason. -2. Fork the {{ cookiecutter.pipeline_slug }} repository to your GitHub account +2. Fork the [nf-core/{{ cookiecutter.pipeline_slug }} repository](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}) to your GitHub account 3. Make the necessary changes / additions within your forked repository 4. Submit a Pull Request against the master branch and wait for the code to be reviewed and merged. If you're not used to this workflow with git, you can start with some [basic docs from GitHub](https://help.github.com/articles/fork-a-repo/) or even their [excellent interactive tutorial](https://try.github.io/). -For further information/help, please consult the [{{ cookiecutter.pipeline_slug }} documentation](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}README.md#documentation) and don't hesitate to get in touch on [Gitter](https://gitter.im/nf-core/Lobby) +For further information/help, please consult the [nf-core/{{ cookiecutter.pipeline_slug }} documentation](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}README.md#documentation) and don't hesitate to get in touch on [Gitter](https://gitter.im/nf-core/Lobby) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md index d0405d120a..de51af1ec8 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md @@ -1,29 +1,11 @@ -**Describe the bug** -A clear and concise description of what the bug is. +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] -**To Reproduce** -Steps to reproduce the behavior: -1. Command line '...' -2. See error **Please provide your error message** +**Describe the solution you'd like** +A clear and concise description of what you want to happen. -**Expected behavior** -A clear and concise description of what you expected to happen. - -**System (please complete the following information):** - - Hardware: [e.g. HPC, Desktop, Cloud...] - - Executor: [e.g. slurm, local, awsbatch...] - - OS: [e.g. CentOS Linux, macOS, Linux Mint...] - - Version [e.g. 7, 10.13.6, 18.3...] - -**Nextflow (please complete the following information):** - - Version: [e.g. 0.31.0] - -**Container engine (please complete the following information):** - - Engine: [e.g. Conda, Docker or Singularity] - - version: [e.g. 1.0.0] - -**Container (please complete the following information):** - - tag: [e.g. 1.0.0] +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. **Additional context** -Add any other context about the problem here. +Add any other context about the feature request here. diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md index 12a9c8eb7e..c961a1e71b 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md @@ -1,6 +1,6 @@ **Before submitting a pull request,** please make sure the following is done: -1. Fork [the repository](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}) and create your branch from `master`. +1. Fork the [nf-core/{{ cookiecutter.pipeline_slug }} repository](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}) and create your branch from `master`. 2. If you've fixed a bug or added code that should be tested, add tests! 3. Ensure the test suite passes (`nextflow run . -profile test,docker`). 4. Make sure your code lints (`nf-core lint`). From fee2facb1646c2fbd7363f5219375545e7f0e36a Mon Sep 17 00:00:00 2001 From: Maxime Garcia Date: Wed, 8 Aug 2018 13:25:41 +0200 Subject: [PATCH 13/75] forgot some nf-core ... --- .../.github/CONTRIBUTING.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md index d88ada68a9..461168d373 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md @@ -1,15 +1,15 @@ # nf-core/{{ cookiecutter.pipeline_slug }} Contributing Guidelines -Hi there! Many thanks for taking an interest in improving {{ cookiecutter.pipeline_slug }}. +Hi there! Many thanks for taking an interest in improving nf-core/{{ cookiecutter.pipeline_slug }}. -We try to manage the required tasks for {{ cookiecutter.pipeline_slug }} using GitHub issues, you probably came to this page when creating one. Please use the prefilled template to save time. +We try to manage the required tasks for nf-core/{{ cookiecutter.pipeline_slug }} using GitHub issues, you probably came to this page when creating one. Please use the prefilled template to save time. However, don't be put off by this template - other more general issues and suggestions are welcome! Contributions to the code are even more welcome ;) -> If you need help using {{ cookiecutter.pipeline_slug }} then the best place to go is the Gitter chatroom where you can ask us questions directly: https://gitter.im/nf-core/Lobby +> If you need help using nf-core/{{ cookiecutter.pipeline_slug }} then the best place to go is the Gitter chatroom where you can ask us questions directly: https://gitter.im/nf-core/Lobby ## Contribution workflow -If you'd like to write some code for {{ cookiecutter.pipeline_slug }}, the standard workflow +If you'd like to write some code for nf-core/{{ cookiecutter.pipeline_slug }}, the standard workflow is as follows: 1. Check that there isn't already an issue about your idea in the From 4f12d3163ed026dd42680d1c9821a740ea23d9b5 Mon Sep 17 00:00:00 2001 From: Maxime Garcia Date: Wed, 8 Aug 2018 13:37:18 +0200 Subject: [PATCH 14/75] improve PR template --- .../.github/pull_request.md | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md index c961a1e71b..32a719638d 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md @@ -1,8 +1,14 @@ -**Before submitting a pull request,** please make sure the following is done: +Many thanks to contributing to nf-core/{{ cookiecutter.pipeline_slug }}! -1. Fork the [nf-core/{{ cookiecutter.pipeline_slug }} repository](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}) and create your branch from `master`. -2. If you've fixed a bug or added code that should be tested, add tests! -3. Ensure the test suite passes (`nextflow run . -profile test,docker`). -4. Make sure your code lints (`nf-core lint`). +Please fill in the appropriate checklist below (delete whatever is not relevant). These are the most common things requested on pull requests (PRs). + +## PR checklist + - [ ] This comment contains a description of changes (with reason) + - [ ] If you've fixed a bug or added code that should be tested, add tests! + - [ ] If necessary, also make a PR on the [{{ cookiecutter.pipeline_slug }} branch on the nf-core/test-datasets repo]( https://github.com/nf-core/test-datasets/pull/new{{ cookiecutter.pipeline_slug }}) + - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker`). + - [ ] Make sure your code lints (`nf-core lint .`). + - [ ] `CHANGELOG.md` is updated + - [ ] `README.md` is updated **Learn more about contributing:** https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}/.github/CONTRIBUTING.md From 0fbc200270674cff8565496ec2af630d25744748 Mon Sep 17 00:00:00 2001 From: jemten Date: Wed, 8 Aug 2018 14:39:55 +0200 Subject: [PATCH 15/75] add withName syntax, fixes #100 --- .../{{cookiecutter.pipeline_slug}}/conf/base.config | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/base.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/base.config index 97b3aa6af4..abef183e60 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/base.config +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/base.config @@ -22,10 +22,10 @@ process { maxErrors = '-1' // Process-specific resource requirements - $fastqc { + withName: fastqc { errorStrategy = { task.exitStatus in [143,137] ? 'retry' : 'ignore' } } - $multiqc { + withName: multiqc { executor = 'local' errorStrategy = { task.exitStatus in [143,137] ? 'retry' : 'ignore' } } From 188b5cb9a4422c5f2239afaa6653d631c09cb408 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 8 Aug 2018 15:01:51 +0200 Subject: [PATCH 16/75] Use new manifest.nextflowVersion instead of params.minNextflowVersion --- docs/lint_errors.md | 11 ++++--- nf_core/lint.py | 32 ++++++++++++------- .../environment.yml | 2 +- .../{{cookiecutter.pipeline_slug}}/main.nf | 16 ---------- .../nextflow.config | 2 +- .../failing_example/nextflow.config | 1 + .../minimal_working_example/environment.yml | 2 +- .../minimal_working_example/nextflow.config | 3 +- tests/test_lint.py | 18 +++++------ 9 files changed, 40 insertions(+), 47 deletions(-) diff --git a/docs/lint_errors.md b/docs/lint_errors.md index 9b6984f2a3..3de1aabd31 100644 --- a/docs/lint_errors.md +++ b/docs/lint_errors.md @@ -59,11 +59,12 @@ The following variables fail the test if missing: * `params.version` * The version of this pipeline. This should correspond to a [GitHub release](https://help.github.com/articles/creating-releases/). -* `params.nf_required_version` - * The minimum version of Nextflow required to run the pipeline. - * This should correspond to the `NXF_VER` version tested by Travis. * `params.outdir` * A directory in which all pipeline results should be saved +* `manifest.nextflowVersion` + * The minimum version of Nextflow required to run the pipeline. + * Should `>=` a version number, eg. `>=0.31.0` + * This should correspond to the `NXF_VER` version tested by Travis. * `manifest.description` * A description of the pipeline * `manifest.homePage` @@ -99,14 +100,14 @@ This test fails if the following happens: * `.travis.yml` does not contain the string `nf-core lint ${TRAVIS_BUILD_DIR}` under `script` * `.travis.yml` does not contain the string `docker pull ` under `before_install` * Where `` is fetched from `params.container` in the `nextflow.config` file -* `.travis.yml` does not test the Nextflow version specified in the pipeline as `nf_required_version` +* `.travis.yml` does not test the Nextflow version specified in the pipeline as `manifest.nextflowVersion` * This is expected in the `env` section of the config, eg: ```yaml env: - NXF_VER=0.27.0 - NXF_VER='' ``` - * At least one of these `NXF_VER` variables must match the `params.nf_required_version` version specified in the pipeline config + * At least one of these `NXF_VER` variables must match the `manifest.nextflowVersion` version specified in the pipeline config * Other variables can be specified on these lines as long as they are space separated. ## Error #6 - Repository `README.md` tests ## {#6} diff --git a/nf_core/lint.py b/nf_core/lint.py index a074bfcbbb..9f096f6edd 100644 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -72,6 +72,7 @@ def __init__(self, pipeline_dir): self.files = [] self.config = {} self.pipeline_name = None + self.minNextflowVersion = None self.dockerfile = [] self.singularityfile = [] self.conda_config = {} @@ -273,7 +274,7 @@ def check_nextflow_config(self): # NB: Should all be files, not directories config_fail = [ 'params.version', - 'params.nf_required_version', + 'manifest.nextflowVersion', 'manifest.description', 'manifest.homePage', 'timeline.enabled', @@ -333,11 +334,20 @@ def check_nextflow_config(self): else: self.failed.append((4, "Config variable 'dag.file' did not end with .svg")) + # Check that the minimum nextflowVersion is set properly + if 'manifest.nextflowVersion' in self.config: + if self.config['manifest.nextflowVersion'].strip('"\'').startswith('>='): + self.passed.append((4, "Config variable 'manifest.nextflowVersion' started with >=")) + # Save self.minNextflowVersion for convenience + self.minNextflowVersion = re.sub(r'[^0-9\.]', '', self.config.get('manifest.nextflowVersion', '')) + else: + self.failed.append((4, "Config variable 'manifest.nextflowVersion' did not start with '>=' : '{}'".format(self.config['manifest.nextflowVersion']).strip('"\''))) + def check_ci_config(self): """ Check that the Travis or Circle CI YAML config is valid Makes sure that `nf-core lint` runs in travis tests - Checks that tests run with the stated nf_required_version + Checks that tests run with the required nextflow version """ for cf in ['.travis.yml', 'circle.yml']: @@ -362,8 +372,8 @@ def check_ci_config(self): else: self.passed.append((5, "CI is pulling the correct docker image: {}".format(docker_pull_cmd))) - # Check that we're testing the nf_required_version - nf_required_version_tested = False + # Check that we're testing the minimum nextflow version + minNextflowVersion_tested = False env = ciconf.get('env', []) if type(env) is dict: env = env.get('matrix', []) @@ -373,11 +383,10 @@ def check_ci_config(self): k,v = s.split('=') if k == 'NXF_VER': ci_ver = v.strip('\'"') - cv = self.config.get('params.nf_required_version', '').strip('\'"') - if ci_ver == cv: - nf_required_version_tested = True + if ci_ver == self.minNextflowVersion: + minNextflowVersion_tested = True self.passed.append((5, "Continuous integration checks minimum NF version: '{}'".format(fn))) - if not nf_required_version_tested: + if not minNextflowVersion_tested: self.failed.append((5, "Continuous integration does not check minimum NF version: '{}'".format(fn))) @@ -395,13 +404,12 @@ def check_readme(self): match = re.search(nf_badge_re, content) if match: nf_badge_version = match.group(1).strip('\'"') - nf_config_version = self.config.get('params.nf_required_version').strip('\'"') try: - assert nf_badge_version == nf_config_version + assert nf_badge_version == self.minNextflowVersion except (AssertionError, KeyError): - self.failed.append((6, "README Nextflow minimum version badge does not match config. Badge: '{}', Config: '{}'".format(nf_badge_version, nf_config_version))) + self.failed.append((6, "README Nextflow minimum version badge does not match config. Badge: '{}', Config: '{}'".format(nf_badge_version, self.minNextflowVersion))) else: - self.passed.append((6, "README Nextflow minimum version badge matched config. Badge: '{}', Config: '{}'".format(nf_badge_version, nf_config_version))) + self.passed.append((6, "README Nextflow minimum version badge matched config. Badge: '{}', Config: '{}'".format(nf_badge_version, self.minNextflowVersion))) else: self.warned.append((6, "README did not have a Nextflow minimum version badge.")) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml index ba29c1d2d0..bf2b38fbda 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml @@ -1,4 +1,4 @@ -name: {{ cookiecutter.pipeline_slug }}-{{ cookiecutter.version }} +name: nfcore-{{ cookiecutter.pipeline_slug }}-{{ cookiecutter.version }} channels: - bioconda - conda-forge diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf index 3d63af4310..da107c0908 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf @@ -139,22 +139,6 @@ if(params.email) summary['E-mail Address'] = params.email log.info summary.collect { k,v -> "${k.padRight(15)}: $v" }.join("\n") log.info "=========================================" - -// Check that Nextflow version is up to date enough -// try / throw / catch works for NF versions < 0.25 when this was implemented -try { - if( ! nextflow.version.matches(">= $params.nf_required_version") ){ - throw GroovyException('Nextflow version too old') - } -} catch (all) { - log.error "====================================================\n" + - " Nextflow version $params.nf_required_version required! You are running v$workflow.nextflow.version.\n" + - " Pipeline execution will continue, but things may break.\n" + - " Please run `nextflow self-update` to update Nextflow.\n" + - "============================================================" -} - - /* * Parse software version numbers */ diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config index 1ba01cb124..be44d0933f 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config @@ -13,7 +13,6 @@ params { version = "{{ cookiecutter.version }}" // Pipeline version container = '{{ cookiecutter.pipeline_slug }}:{{ cookiecutter.version }}' // Container slug. Stable releases should specify release tag! - nf_required_version = '0.30.0' // Minimum version of nextflow required help = false reads = "data/*{1,2}.fastq.gz" @@ -70,6 +69,7 @@ manifest { homePage = 'https://github.com/nf-core/{{ cookiecutter.pipeline_name }}' description = '{{ cookiecutter.pipeline_short_description }}' mainScript = 'main.nf' + nextflowVersion = '>=0.30.0' } // Function to ensure that resource requirements don't go beyond diff --git a/tests/lint_examples/failing_example/nextflow.config b/tests/lint_examples/failing_example/nextflow.config index 3c89db677e..658527344d 100644 --- a/tests/lint_examples/failing_example/nextflow.config +++ b/tests/lint_examples/failing_example/nextflow.config @@ -1,3 +1,4 @@ manifest.homePage = 'http://nf-co.re/pipelines' +manifest.nextflowVersion = '0.30.1' dag.file = "dag.html" diff --git a/tests/lint_examples/minimal_working_example/environment.yml b/tests/lint_examples/minimal_working_example/environment.yml index 86a5af6faa..16cd7fe0e2 100644 --- a/tests/lint_examples/minimal_working_example/environment.yml +++ b/tests/lint_examples/minimal_working_example/environment.yml @@ -9,4 +9,4 @@ dependencies: - conda-forge::openjdk=8.0.144 - fastqc=0.11.7 - pip: - - multiqc=1.6 + - multiqc=1.4 diff --git a/tests/lint_examples/minimal_working_example/nextflow.config b/tests/lint_examples/minimal_working_example/nextflow.config index a121d9448b..86fe062184 100644 --- a/tests/lint_examples/minimal_working_example/nextflow.config +++ b/tests/lint_examples/minimal_working_example/nextflow.config @@ -2,8 +2,6 @@ params { // Pipeline version version = "0.4" - // Minimum version of nextflow required - nf_required_version = '0.27.0' // Container slug. Tag for releases container = 'nfcore/tools:0.4' outdir = './results' @@ -39,4 +37,5 @@ manifest { homePage = 'https://github.com/nf-core/tools' description = 'Minimal working example pipeline' mainScript = 'main.nf' + nextflowVersion = '>=0.27.0' } diff --git a/tests/test_lint.py b/tests/test_lint.py index 2de9e5e708..5c1baf3d7a 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -58,7 +58,7 @@ def test_call_lint_pipeline_pass(self): This should not result in any exception for the minimal working example""" lint_obj = nf_core.lint.run_linting(PATH_WORKING_EXAMPLE, False) - expectations = {"failed": 0, "warned": 0, "passed": MAX_PASS_CHECKS} + expectations = {"failed": 0, "warned": 1, "passed": MAX_PASS_CHECKS} self.assess_lint_status(lint_obj, **expectations) @pytest.mark.xfail(raises=AssertionError) @@ -73,7 +73,7 @@ def test_call_lint_pipeline_release(self): """Test the main execution function of PipelineLint when running with --release""" lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) lint_obj.lint_pipeline(release=True) - expectations = {"failed": 0, "warned": 0, "passed": MAX_PASS_CHECKS + ADD_PASS_RELEASE} + expectations = {"failed": 0, "warned": 1, "passed": MAX_PASS_CHECKS + ADD_PASS_RELEASE} self.assess_lint_status(lint_obj, **expectations) def test_failing_dockerfile_example(self): @@ -112,14 +112,14 @@ def test_config_variable_example_pass(self): """Tests that config variable existence test works with good pipeline example""" good_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) good_lint_obj.check_nextflow_config() - expectations = {"failed": 0, "warned": 0, "passed": 27} + expectations = {"failed": 0, "warned": 0, "passed": 28} self.assess_lint_status(good_lint_obj, **expectations) def test_config_variable_example_with_failed(self): """Tests that config variable existence test fails with bad pipeline example""" bad_lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) bad_lint_obj.check_nextflow_config() - expectations = {"failed": 17, "warned": 8, "passed": 2} + expectations = {"failed": 17, "warned": 8, "passed": 3} self.assess_lint_status(bad_lint_obj, **expectations) @pytest.mark.xfail(raises=AssertionError) @@ -131,7 +131,7 @@ def test_config_variable_error(self): def test_ci_conf_pass(self): """Tests that the continous integration config checks work with a good example""" lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.config['params.nf_required_version'] = '0.27.0' + lint_obj.minNextflowVersion = '0.27.0' lint_obj.check_ci_config() expectations = {"failed": 0, "warned": 0, "passed": 2} self.assess_lint_status(lint_obj, **expectations) @@ -139,7 +139,7 @@ def test_ci_conf_pass(self): def test_ci_conf_fail_wrong_nf_version(self): """Tests that the CI check fails with the wrong NXF version""" lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.config['params.nf_required_version'] = '0.28.0' + lint_obj.minNextflowVersion = '0.28.0' lint_obj.check_ci_config() expectations = {"failed": 1, "warned": 0, "passed": 1} self.assess_lint_status(lint_obj, **expectations) @@ -168,7 +168,7 @@ def test_missing_license_example(self): def test_readme_pass(self): """Tests that the pipeline README file checks work with a good example""" lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.config['params.nf_required_version'] = '0.27.0' + lint_obj.minNextflowVersion = '0.27.0' lint_obj.files = ['environment.yml'] lint_obj.check_readme() expectations = {"failed": 0, "warned": 0, "passed": 2} @@ -177,7 +177,7 @@ def test_readme_pass(self): def test_readme_warn(self): """Tests that the pipeline README file checks fail """ lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.config['params.nf_required_version'] = '0.28.0' + lint_obj.minNextflowVersion = '0.28.0' lint_obj.check_readme() expectations = {"failed": 1, "warned": 0, "passed": 0} self.assess_lint_status(lint_obj, **expectations) @@ -264,7 +264,7 @@ def test_conda_env_pass(self): lint_obj.pipeline_name = 'tools' lint_obj.config['params.version'] = '0.4' lint_obj.check_conda_env_yaml() - expectations = {"failed": 0, "warned": 0, "passed": 7} + expectations = {"failed": 0, "warned": 1, "passed": 6} self.assess_lint_status(lint_obj, **expectations) def test_conda_env_fail(self): From 430b678b6b7081d305f19a382fa1511464d0c14a Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 8 Aug 2018 15:50:59 +0200 Subject: [PATCH 17/75] Updated with recent cookiecutter updates --- .../.travis.yml | 4 +-- .../conf/aws.config | 26 ------------------- .../conf/awsbatch.config | 13 ++++++++++ .../conf/base.config | 1 - .../docs/installation.md | 6 +++++ .../docs/usage.md | 14 +++++++--- .../{{cookiecutter.pipeline_slug}}/main.nf | 13 +++++++++- .../nextflow.config | 25 ++++++++++++------ 8 files changed, 61 insertions(+), 41 deletions(-) delete mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/aws.config create mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/awsbatch.config diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml index d88853357d..6a5cf34233 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml @@ -28,8 +28,8 @@ install: - cd ${TRAVIS_BUILD_DIR}/tests env: - - NXF_VER=0.30.0 - - NXF_VER='' + - NXF_VER=0.30.0 # Specify a minimum NF version that should be tested and work + - NXF_VER='' # Plus: get the latest NF version and check, that it works script: # Lint the pipeline code diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/aws.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/aws.config deleted file mode 100644 index 83189c3465..0000000000 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/aws.config +++ /dev/null @@ -1,26 +0,0 @@ -/* - * ------------------------------------------------- - * Nextflow config file for Amazon Web Services - * ------------------------------------------------- - * Imported under the 'aws' Nextflow profile in nextflow.config - * Defines reference genomes, using iGenome paths from s3 - * Uses docker for software depedencies. - */ - -docker { - enabled = true -} - -process { - executor = 'ignite' -} - -params { - saveReference = true - igenomes_base = 's3://ngi-igenomes/igenomes/' - // Max resources based on a typical AWS m4.2xlarge EC2 instance - // Customise with --max_memory, --max_cpus and --max_time (or overwrite with another config file) - max_memory = 32.GB - max_cpus = 8 - max_time = 240.h -} diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/awsbatch.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/awsbatch.config new file mode 100644 index 0000000000..79078c7bd0 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/awsbatch.config @@ -0,0 +1,13 @@ +/* + * ------------------------------------------------- + * Nextflow config file for AWS Batch + * ------------------------------------------------- + * Imported under the 'awsbatch' Nextflow profile in nextflow.config + * Uses docker for software depedencies automagically, so not specified here. + */ + +aws.region = params.awsregion +process.executor = 'awsbatch' +process.queue = params.awsqueue +executor.awscli = '/home/ec2-user/miniconda/bin/aws' +params.tracedir = './' diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/base.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/base.config index 97b3aa6af4..46957c1ee6 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/base.config +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/base.config @@ -26,7 +26,6 @@ process { errorStrategy = { task.exitStatus in [143,137] ? 'retry' : 'ignore' } } $multiqc { - executor = 'local' errorStrategy = { task.exitStatus in [143,137] ? 'retry' : 'ignore' } } } diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md index 838a67d5ea..d11c018813 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md @@ -41,3 +41,9 @@ unzip master.zip -d /my-pipelines/ cd /my_data/ nextflow run /my-pipelines/{{ cookiecutter.pipeline_slug }}-master ``` + +To stop nextflow from looking for updates online, you can tell it to run in offline mode by specifying the following environment variable in your ~/.bashrc file: + +```bash +export NXF_OFFLINE='TRUE' +``` diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md index 62bf0fe2e6..21bac49f03 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md @@ -49,9 +49,8 @@ Use this parameter to choose a configuration profile. Each profile is designed f * `docker` * A generic configuration profile to be used with [Docker](http://docker.com/) * Runs using the `local` executor and pulls software from dockerhub: [`{{ cookiecutter.pipeline_slug }}`](http://hub.docker.com/r/{{ cookiecutter.pipeline_slug }}/) -* `aws` - * A starter configuration for running the pipeline on Amazon Web Services. Uses docker and Spark. - * See [`docs/configuration/aws.md`](configuration/aws.md) +* `awsbatch` + * A generic configuration profile to be used with AWS Batch. * `standard` * The default profile, used if `-profile` is not specified at all. Runs locally and expects all software to be installed and available on the `PATH`. * This profile is mainly designed to be used as a starting point for other configurations and is inherited by most of the other profiles. @@ -132,6 +131,15 @@ Each step in the pipeline has a default set of requirements for number of CPUs, ### Custom resource requests Wherever process-specific requirements are set in the pipeline, the default value can be changed by creating a custom config file. See the files in [`conf`](../conf) for examples. +## AWS Batch specific parameters +Running the pipeline on AWS Batch requires a couple of specific parameters to be set according to your AWS Batch configuration. Please use the `-awsbatch` profile and then specify all of the following parameters. +### `--awsqueue` +The JobQueue that you intend to use on AWS Batch. +### `--awsregion` +The AWS region to run your job in. Default is set to `eu-west-1` but can be adjusted to your needs. + +Please make sure to also set the `-w/--work-dir` and `--outdir` parameters to a S3 storage bucket of your choice - you'll get an error message notifying you if you didn't. + ## Other command line parameters ### `--outdir` The output directory where the results will be saved. diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf index 3d63af4310..8ad54ce29c 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf @@ -24,7 +24,8 @@ def helpMessage() { Mandatory arguments: --reads Path to input data (must be surrounded with quotes) --genome Name of iGenomes reference - -profile Hardware config to use. docker / aws + -profile Configuration profile to use. Can use multiple (comma separated) + Available: standard, conda, docker, singularity, awsbatch, test Options: --singleEnd Specifies that the input is single end reads @@ -79,6 +80,12 @@ if( !(workflow.runName ==~ /[a-z]+_[a-z]+/) ){ custom_runName = workflow.runName } +// Check workDir/outdir paths to be S3 buckets if running on AWSBatch +// related: https://github.com/nextflow-io/nextflow/issues/813 +if( workflow.profile == 'awsbatch') { + if(!workflow.workDir.startsWith('s3:') || !params.outdir.startsWith('s3:')) exit 1, "Workdir or Outdir not on S3 - specify S3 Buckets for each to run on AWSBatch!" +} + /* * Create a channel for input read files */ @@ -135,6 +142,10 @@ summary['Working dir'] = workflow.workDir summary['Output dir'] = params.outdir summary['Script dir'] = workflow.projectDir summary['Config Profile'] = workflow.profile +if(workflow.profile == 'awsbatch'){ + summary['AWS Region'] = params.awsregion + summary['AWS Queue'] = params.awsqueue +} if(params.email) summary['E-mail Address'] = params.email log.info summary.collect { k,v -> "${k.padRight(15)}: $v" }.join("\n") log.info "=========================================" diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config index 1ba01cb124..acdb89b99e 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config @@ -20,7 +20,10 @@ params { singleEnd = false outdir = './results' igenomes_base = "./iGenomes" + tracedir = "${params.outdir}/pipeline_info" clusterOptions = false + awsqueue = false + awsregion = 'eu-west-1' } profiles { @@ -29,11 +32,17 @@ profiles { includeConfig 'conf/base.config' } conda { process.conda = "$baseDir/environment.yml" } - docker { docker.enabled = true } - singularity { singularity.enabled = true } - aws { + docker { + docker.enabled = true + process.container = 'nfcore/{{ cookiecutter.pipeline_slug }}:{{ cookiecutter.version }}' + } + singularity { + enabled = true + process.container = 'shub://nf-core/{{ cookiecutter.pipeline_slug }}:{{ cookiecutter.version }}' + } + awsbatch { includeConfig 'conf/base.config' - includeConfig 'conf/aws.config' + includeConfig 'conf/awsbatch.config' includeConfig 'conf/igenomes.config' } test { @@ -51,19 +60,19 @@ process.shell = ['/bin/bash', '-euo', 'pipefail'] timeline { enabled = true - file = "${params.outdir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_timeline.html" + file = "${params.tracedir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_timeline.html" } report { enabled = true - file = "${params.outdir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_report.html" + file = "${params.tracedir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_report.html" } trace { enabled = true - file = "${params.outdir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_trace.txt" + file = "${params.tracedir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_trace.txt" } dag { enabled = true - file = "${params.outdir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_dag.svg" + file = "${params.tracedir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_dag.svg" } manifest { From 5f5b71762948b42c72893875f8fc290e68cf62f1 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 8 Aug 2018 16:01:04 +0200 Subject: [PATCH 18/75] Travis: Make new pipeline and run lint tests --- .travis.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 067bc2316b..0a8b1b5df8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,7 +17,10 @@ install: - pip install . - pip install codecov pytest pytest-datafiles pytest-cov mock -script: python -m pytest --cov=nf_core . +script: + - python -m pytest --cov=nf_core . + - nf-core create -n testpipeline -d "This pipeline is for testing" + - nf-core lint testpipeline after_success: - codecov From 6b9fff99135c7da3e0632cb0602ec83838b7c03a Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 8 Aug 2018 16:25:57 +0200 Subject: [PATCH 19/75] Readme: Add docs about nf-core create --- README.md | 48 ++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 40 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 99a08c8ba5..3a3dfe8612 100644 --- a/README.md +++ b/README.md @@ -110,6 +110,46 @@ nf-core-methylseq-1.0/ 7 directories, 8 files ``` +## Creating a new workflow +The `create` subcommand makes a new workflow using the nf-core base template. With a given pipeline name and description, it gives you a starter pipeline which follows nf-core best practices. + +After creating the files, the command initialises the folder as a git repository and makes an initial commit. This first "vanilla" commit which is identical to the output from the templating tool is important, as it allows us to keep your pipeline in sync with the base template in the future. + +``` +$ nf-core create -n nextbigthing -d "This pipeline analyses data from the next big 'omics technique" + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + +INFO: Creating new nf-core pipeline: nextbigthing + +INFO: Initialising pipeline git repository + +INFO: Done. Remember to add a remote and push to GitHub! +``` + +Once you have run the command, create a new empty repository on GitHub under your username (not the `nf-core` organisation, yet). +On your computer, add this repository as a git remote and push to it: + +```bash +git remote add origin https://github.com/ewels/nf-core-nextbigthing.git +git push --set-upstream origin master +``` + +You can then continue to edit, commit and push normally as you build your pipeline. +When you're ready, create a new repository under the `nf-core` organisation (or ask someone to do this for you on the gitter channel) and make a pull-request. + +Final tasks (needs more documentation): +* Set up travis CI on fork and nf-core repository +* Create a dockerhub repository +* Create a singularity hub repository +* Add a description and keywords to the github repositories +* Protect the `master` branch on the nf-core repository + ## Linting a workflow The `lint` subcommand checks a given pipeline for all nf-core community guidelines. This is the same test that is used on the automated continuous integration tests. @@ -186,12 +226,4 @@ INFO: Updating version in Singularity INFO: Updating version in environment.yml - name: nfcore-methylseq-1.3dev + name: nfcore-methylseq-1.3 - -INFO: Updating version in Dockerfile - - ENV PATH /opt/conda/envs/nfcore-methylseq-1.3dev/bin:$PATH - + ENV PATH /opt/conda/envs/nfcore-methylseq-1.3/bin:$PATH - -INFO: Updating version in Singularity - - PATH=/opt/conda/envs/nfcore-methylseq-1.3dev/bin:$PATH - + PATH=/opt/conda/envs/nfcore-methylseq-1.3/bin:$PATH ``` From d4972469f69623240602c508f926ecc7ef5630e0 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 8 Aug 2018 22:40:02 +0200 Subject: [PATCH 20/75] Applied changes from @apeltzer in nf-core/cookiecutter#57 (#109) --- .../{{cookiecutter.pipeline_slug}}/main.nf | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf index 8ad54ce29c..eeaf025fb0 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf @@ -37,6 +37,10 @@ def helpMessage() { --outdir The output directory where the results will be saved --email Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits -name Name for the pipeline run. If not specified, Nextflow will automatically generate a random mnemonic. + + AWSBatch options: + --awsqueue The AWSBatch JobQueue that needs to be set when running on AWSBatch + --awsregion The AWS Region for your AWS Batch job to run on """.stripIndent() } @@ -65,6 +69,11 @@ if ( params.fasta ){ fasta = file(params.fasta) if( !fasta.exists() ) exit 1, "Fasta file not found: ${params.fasta}" } +// AWSBatch sanity checking +if(workflow.profile == 'awsbatch'){ + if (!params.awsqueue || !params.awsregion) exit 1, "Specify correct --awsqueue and --awsregion parameters on AWSBatch!" + if (!workflow.workDir.startsWith('s3') || !params.outdir.startsWith('s3')) exit 1, "Specify S3 URLs for workDir and outdir parameters on AWSBatch!" +} // // NOTE - THIS IS NOT USED IN THIS PIPELINE, EXAMPLE ONLY // If you want to use the above in a process, define the following: From 89dd63b0e89d2f7bbc886ce1831738fc8c9698e5 Mon Sep 17 00:00:00 2001 From: Maxime Garcia Date: Thu, 9 Aug 2018 09:30:22 +0200 Subject: [PATCH 21/75] more line-breaks and update docs --- .../{{cookiecutter.pipeline_slug}}/.github/feature_request.md | 3 ++- .../{{cookiecutter.pipeline_slug}}/.github/pull_request.md | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md index de51af1ec8..aa28ead641 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md @@ -1,5 +1,6 @@ **Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] +A clear and concise description of what the problem is. +Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md index 32a719638d..cbc424cc0e 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md @@ -8,6 +8,7 @@ Please fill in the appropriate checklist below (delete whatever is not relevant) - [ ] If necessary, also make a PR on the [{{ cookiecutter.pipeline_slug }} branch on the nf-core/test-datasets repo]( https://github.com/nf-core/test-datasets/pull/new{{ cookiecutter.pipeline_slug }}) - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker`). - [ ] Make sure your code lints (`nf-core lint .`). + - [ ] Documentation in `docs` is updated - [ ] `CHANGELOG.md` is updated - [ ] `README.md` is updated From db5d5e03b287d53df337e5dc4befb894f921ae48 Mon Sep 17 00:00:00 2001 From: Senthilkumar Panneerselvam Date: Thu, 9 Aug 2018 09:48:54 +0200 Subject: [PATCH 22/75] Add check to lint for deprecated syntax (#104) * Resolves #78 * Add test for lint --- nf_core/lint.py | 5 +++++ tests/lint_examples/failing_example/nextflow.config | 7 +++++++ tests/test_lint.py | 2 +- 3 files changed, 13 insertions(+), 1 deletion(-) mode change 100644 => 100755 nf_core/lint.py diff --git a/nf_core/lint.py b/nf_core/lint.py old mode 100644 new mode 100755 index a074bfcbbb..1f8bd69bbd --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -309,6 +309,11 @@ def check_nextflow_config(self): self.passed.append((4, "Config variable found: {}".format(cf))) else: self.warned.append((4, "Config variable not found: {}".format(cf))) + + # Check and warn if the process configuration is done with deprecated syntax + process_with_deprecated_syntax = list(set([re.search('^(process\.\$.*?)\.+.*$', ck).group(1) for ck in self.config.keys() if re.match(r'^(process\.\$.*?)\.+.*$', ck)])) + for pd in process_with_deprecated_syntax: + self.warned.append((4, "Process configuration is done with deprecated_syntax: {}".format(pd))) # Check the variables that should be set to 'true' for k in ['timeline.enabled', 'report.enabled', 'trace.enabled', 'dag.enabled']: diff --git a/tests/lint_examples/failing_example/nextflow.config b/tests/lint_examples/failing_example/nextflow.config index 3c89db677e..58a0ab7038 100644 --- a/tests/lint_examples/failing_example/nextflow.config +++ b/tests/lint_examples/failing_example/nextflow.config @@ -1,3 +1,10 @@ manifest.homePage = 'http://nf-co.re/pipelines' dag.file = "dag.html" + +process { + $deprecatedSyntax { + cpu = 1 + } +} + diff --git a/tests/test_lint.py b/tests/test_lint.py index 2de9e5e708..b3e1ecb334 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -119,7 +119,7 @@ def test_config_variable_example_with_failed(self): """Tests that config variable existence test fails with bad pipeline example""" bad_lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) bad_lint_obj.check_nextflow_config() - expectations = {"failed": 17, "warned": 8, "passed": 2} + expectations = {"failed": 17, "warned": 9, "passed": 2} self.assess_lint_status(bad_lint_obj, **expectations) @pytest.mark.xfail(raises=AssertionError) From 91b94d806cc75c181f871a6a09e78813951c9182 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hadrien=20Gourl=C3=A9?= Date: Thu, 9 Aug 2018 10:49:39 +0200 Subject: [PATCH 23/75] lint fails more gracefully in travis dont have before_install --- nf_core/lint.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nf_core/lint.py b/nf_core/lint.py index a074bfcbbb..8d77e9e397 100644 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -359,6 +359,8 @@ def check_ci_config(self): assert(docker_pull_cmd in ciconf.get('before_install')) except AssertionError: self.failed.append((5, "CI is not pulling the correct docker image: {}".format(docker_pull_cmd))) + except TypeError: + self.failed.append((5, "CI does not contain a before_install step that pulls the docker image")) else: self.passed.append((5, "CI is pulling the correct docker image: {}".format(docker_pull_cmd))) From 686f2426283e4a875ec9b108c40f0844b94bb15c Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 9 Aug 2018 12:09:40 +0200 Subject: [PATCH 24/75] Added manifest.name and manifest.pipelineVersion --- docs/lint_errors.md | 6 ++-- nf_core/lint.py | 19 ++++++++--- .../{{cookiecutter.pipeline_slug}}/main.nf | 10 +++--- .../nextflow.config | 5 +-- nf_core/release.py | 8 ++--- .../failing_example/nextflow.config | 1 + .../minimal_working_example/nextflow.config | 4 +-- tests/test_lint.py | 32 +++++++++---------- tests/test_release.py | 10 +++--- 9 files changed, 54 insertions(+), 41 deletions(-) diff --git a/docs/lint_errors.md b/docs/lint_errors.md index 9b6984f2a3..6469b22593 100644 --- a/docs/lint_errors.md +++ b/docs/lint_errors.md @@ -57,15 +57,17 @@ names. This test fails or throws warnings if required variables are not set. The following variables fail the test if missing: -* `params.version` - * The version of this pipeline. This should correspond to a [GitHub release](https://help.github.com/articles/creating-releases/). * `params.nf_required_version` * The minimum version of Nextflow required to run the pipeline. * This should correspond to the `NXF_VER` version tested by Travis. * `params.outdir` * A directory in which all pipeline results should be saved +* `manifest.name` + * The pipeline name. Should begin with `nf-core/` * `manifest.description` * A description of the pipeline +* `manifest.version` + * The version of this pipeline. This should correspond to a [GitHub release](https://help.github.com/articles/creating-releases/). * `manifest.homePage` * The homepage for the pipeline. Should be the nf-core GitHub repository URL, so beginning with `https://github.com/nf-core/` diff --git a/nf_core/lint.py b/nf_core/lint.py index a074bfcbbb..a0f46687bb 100644 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -272,9 +272,10 @@ def check_nextflow_config(self): # NB: Should all be files, not directories config_fail = [ - 'params.version', 'params.nf_required_version', + 'manifest.name', 'manifest.description', + 'manifest.pipelineVersion', 'manifest.homePage', 'timeline.enabled', 'trace.enabled', @@ -317,6 +318,15 @@ def check_nextflow_config(self): else: self.failed.append((4, "Config variable '{}' did not have correct value: {}".format(k, self.config.get(k)))) + # Check that the pipeline name starts with nf-core + try: + assert self.config['manifest.name'].strip('\'"')[0:8] == 'nf-core/' + except (AssertionError, IndexError): + self.failed.append((4, "Config variable 'manifest.name' did not begin with nf-core/:\n {}".format(self.config['manifest.name'].strip('\'"')))) + else: + self.passed.append((4, "Config variable 'manifest.name' began with 'nf-core/'")) + self.pipeline_name = self.config['manifest.name'].strip("'").replace('nf-core/', '') + # Check that the homePage is set to the GitHub URL try: assert self.config['manifest.homePage'].strip('\'"')[0:27] == 'https://github.com/nf-core/' @@ -324,7 +334,6 @@ def check_nextflow_config(self): self.failed.append((4, "Config variable 'manifest.homePage' did not begin with https://github.com/nf-core/:\n {}".format(self.config['manifest.homePage'].strip('\'"')))) else: self.passed.append((4, "Config variable 'manifest.homePage' began with 'https://github.com/nf-core/'")) - self.pipeline_name = self.config['manifest.homePage'][28:].rstrip("'") # Check that the DAG filename ends in `.svg` if 'dag.file' in self.config: @@ -425,7 +434,7 @@ def check_version_consistency(self): versions = {} # Get the version definitions # Get version from nextflow.config - versions['params.version'] = self.config['params.version'].strip(' \'"') + versions['manifest.pipelineVersion'] = self.config['manifest.pipelineVersion'].strip(' \'"') # Get version from the docker slug if self.config.get('params.container') and \ @@ -472,7 +481,7 @@ def check_conda_env_yaml(self): return # Check that the environment name matches the pipeline name - pipeline_version = self.config['params.version'].strip(' \'"') + pipeline_version = self.config['manifest.pipelineVersion'].strip(' \'"') expected_env_name = 'nfcore-{}-{}'.format(self.pipeline_name.lower(), pipeline_version) if self.conda_config['name'] != expected_env_name: self.failed.append((8, "Conda environment name is incorrect ({}, should be {})".format(self.conda_config['name'], expected_env_name))) @@ -578,7 +587,7 @@ def check_conda_singularityfile(self): expected_strings = [ 'From:nfcore/base', 'Bootstrap:docker', - 'VERSION {}'.format(self.config['params.version'].strip(' \'"')), + 'VERSION {}'.format(self.config['manifest.pipelineVersion'].strip(' \'"')), 'environment.yml /', '/opt/conda/bin/conda env update -n root -f /environment.yml', '/opt/conda/bin/conda clean -a', diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf index 3d63af4310..0e39a174bf 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf @@ -13,7 +13,7 @@ def helpMessage() { log.info""" ========================================= - {{ cookiecutter.pipeline_name }} v${params.version} + {{ cookiecutter.pipeline_name }} v${manifest.pipelineVersion} ========================================= Usage: @@ -112,11 +112,11 @@ log.info """======================================================= | \\| | \\__, \\__/ | \\ |___ \\`-._,-`-, `._,._,\' -{{ cookiecutter.pipeline_name }} v${params.version}" +{{ cookiecutter.pipeline_name }} v${manifest.pipelineVersion}" =======================================================""" def summary = [:] summary['Pipeline Name'] = '{{ cookiecutter.pipeline_name }}' -summary['Pipeline Version'] = params.version +summary['Pipeline Version'] = manifest.pipelineVersion summary['Run Name'] = custom_runName ?: workflow.runName summary['Reads'] = params.reads summary['Fasta Ref'] = params.fasta @@ -165,7 +165,7 @@ process get_software_versions { script: """ - echo $params.version > v_pipeline.txt + echo $manifest.pipelineVersion > v_pipeline.txt echo $workflow.nextflow.version > v_nextflow.txt fastqc --version > v_fastqc.txt multiqc --version > v_multiqc.txt @@ -254,7 +254,7 @@ workflow.onComplete { subject = "[{{ cookiecutter.pipeline_name }}] FAILED: $workflow.runName" } def email_fields = [:] - email_fields['version'] = params.version + email_fields['version'] = manifest.pipelineVersion email_fields['runName'] = custom_runName ?: workflow.runName email_fields['success'] = workflow.success email_fields['dateComplete'] = workflow.complete diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config index 1ba01cb124..2657ec7df6 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config @@ -11,7 +11,6 @@ // Global default params, used in configs params { - version = "{{ cookiecutter.version }}" // Pipeline version container = '{{ cookiecutter.pipeline_slug }}:{{ cookiecutter.version }}' // Container slug. Stable releases should specify release tag! nf_required_version = '0.30.0' // Minimum version of nextflow required @@ -67,8 +66,10 @@ dag { } manifest { - homePage = 'https://github.com/nf-core/{{ cookiecutter.pipeline_name }}' + name = 'nf-core/{{ cookiecutter.pipeline_name }}' description = '{{ cookiecutter.pipeline_short_description }}' + homePage = 'https://github.com/nf-core/{{ cookiecutter.pipeline_name }}' + pipelineVersion = '{{ cookiecutter.version }}' mainScript = 'main.nf' } diff --git a/nf_core/release.py b/nf_core/release.py index f8c6469415..7c9749434f 100644 --- a/nf_core/release.py +++ b/nf_core/release.py @@ -13,15 +13,15 @@ def make_release(lint_obj, new_version): """ Function to make the release. Called by the main script """ # Collect the old and new version numbers - current_version = lint_obj.config['params.version'].strip(' \'"') + current_version = lint_obj.config['manifest.pipelineVersion'].strip(' \'"') if new_version.startswith('v'): logging.warn("Stripping leading 'v' from new version number") new_version = new_version[1:] logging.info("Changing version number:\n Current version number is '{}'\n New version number will be '{}'".format(current_version, new_version)) # Update nextflow.config - nfconfig_pattern = r"version\s*=\s*[\'\"]?{}[\'\"]?".format(current_version.replace('.','\.')) - nfconfig_newstr = "version = '{}'".format(new_version) + nfconfig_pattern = r"pipelineVersion\s*=\s*[\'\"]?{}[\'\"]?".format(current_version.replace('.','\.')) + nfconfig_newstr = "pipelineVersion = '{}'".format(new_version) update_file_version("nextflow.config", lint_obj, nfconfig_pattern, nfconfig_newstr) # Update container tag @@ -46,7 +46,7 @@ def make_release(lint_obj, new_version): update_file_version("environment.yml", lint_obj, nfconfig_pattern, nfconfig_newstr) def update_file_version(filename, lint_obj, pattern, newstr): - """ Update params.version in the nextflow config file """ + """ Update manifest.pipelineVersion in the nextflow config file """ # Load the file fn = os.path.join(lint_obj.path, filename) diff --git a/tests/lint_examples/failing_example/nextflow.config b/tests/lint_examples/failing_example/nextflow.config index 3c89db677e..785e76c313 100644 --- a/tests/lint_examples/failing_example/nextflow.config +++ b/tests/lint_examples/failing_example/nextflow.config @@ -1,3 +1,4 @@ manifest.homePage = 'http://nf-co.re/pipelines' +manifest.name = 'pipelines' dag.file = "dag.html" diff --git a/tests/lint_examples/minimal_working_example/nextflow.config b/tests/lint_examples/minimal_working_example/nextflow.config index a121d9448b..dbc768c7f2 100644 --- a/tests/lint_examples/minimal_working_example/nextflow.config +++ b/tests/lint_examples/minimal_working_example/nextflow.config @@ -1,7 +1,5 @@ params { - // Pipeline version - version = "0.4" // Minimum version of nextflow required nf_required_version = '0.27.0' // Container slug. Tag for releases @@ -36,7 +34,9 @@ dag { } manifest { + name = 'nf-core/tools' homePage = 'https://github.com/nf-core/tools' description = 'Minimal working example pipeline' mainScript = 'main.nf' + pipelineVersion = '0.4' } diff --git a/tests/test_lint.py b/tests/test_lint.py index 2de9e5e708..155d675e1c 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -38,7 +38,7 @@ def pf(wd, path): pf(WD, 'lint_examples/license_incomplete_example')] # The maximum sum of passed tests currently possible -MAX_PASS_CHECKS = 57 +MAX_PASS_CHECKS = 59 # The additional tests passed for releases ADD_PASS_RELEASE = 1 @@ -112,14 +112,14 @@ def test_config_variable_example_pass(self): """Tests that config variable existence test works with good pipeline example""" good_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) good_lint_obj.check_nextflow_config() - expectations = {"failed": 0, "warned": 0, "passed": 27} + expectations = {"failed": 0, "warned": 0, "passed": 29} self.assess_lint_status(good_lint_obj, **expectations) def test_config_variable_example_with_failed(self): """Tests that config variable existence test fails with bad pipeline example""" bad_lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) bad_lint_obj.check_nextflow_config() - expectations = {"failed": 17, "warned": 8, "passed": 2} + expectations = {"failed": 18, "warned": 8, "passed": 3} self.assess_lint_status(bad_lint_obj, **expectations) @pytest.mark.xfail(raises=AssertionError) @@ -200,7 +200,7 @@ def test_dockerfile_pass(self): def test_version_consistency_pass(self): """Tests the workflow version and container version sucessfully""" lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.config["params.version"] = "0.4" + lint_obj.config["manifest.pipelineVersion"] = "0.4" lint_obj.config["params.container"] = "nfcore/tools:0.4" lint_obj.check_version_consistency() expectations = {"failed": 0, "warned": 0, "passed": 1} @@ -212,7 +212,7 @@ def test_version_consistency_with_env_fail(self): os.environ["TRAVIS_TAG"] = "0.5" os.environ["TRAVIS_REPO_SLUG"] = "nf-core/testpipeline" lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.config["params.version"] = "0.4" + lint_obj.config["manifest.pipelineVersion"] = "0.4" lint_obj.config["params.container"] = "nfcore/tools:0.4" lint_obj.config["process.container"] = "nfcore/tools:0.4" lint_obj.check_version_consistency() @@ -225,7 +225,7 @@ def test_version_consistency_with_numeric_fail(self): os.environ["TRAVIS_TAG"] = "0.5dev" os.environ["TRAVIS_REPO_SLUG"] = "nf-core/testpipeline" lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.config["params.version"] = "0.4" + lint_obj.config["manifest.pipelineVersion"] = "0.4" lint_obj.config["params.container"] = "nfcore/tools:0.4" lint_obj.check_version_consistency() expectations = {"failed": 1, "warned": 0, "passed": 0} @@ -237,7 +237,7 @@ def test_version_consistency_with_no_docker_version_fail(self): os.environ["TRAVIS_TAG"] = "0.4" os.environ["TRAVIS_REPO_SLUG"] = "nf-core/testpipeline" lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.config["params.version"] = "0.4" + lint_obj.config["manifest.pipelineVersion"] = "0.4" lint_obj.config["params.container"] = "nfcore/tools" lint_obj.check_version_consistency() expectations = {"failed": 1, "warned": 0, "passed": 0} @@ -249,7 +249,7 @@ def test_version_consistency_with_env_pass(self): os.environ["TRAVIS_TAG"] = "0.4" os.environ["TRAVIS_REPO_SLUG"] = "nf-core/testpipeline" lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.config["params.version"] = "0.4" + lint_obj.config["manifest.pipelineVersion"] = "0.4" lint_obj.config["params.container"] = "nfcore/tools:0.4" lint_obj.check_version_consistency() expectations = {"failed": 0, "warned": 0, "passed": 1} @@ -262,7 +262,7 @@ def test_conda_env_pass(self): with open(os.path.join(PATH_WORKING_EXAMPLE, 'environment.yml'), 'r') as fh: lint_obj.conda_config = yaml.load(fh) lint_obj.pipeline_name = 'tools' - lint_obj.config['params.version'] = '0.4' + lint_obj.config['manifest.pipelineVersion'] = '0.4' lint_obj.check_conda_env_yaml() expectations = {"failed": 0, "warned": 0, "passed": 7} self.assess_lint_status(lint_obj, **expectations) @@ -275,7 +275,7 @@ def test_conda_env_fail(self): lint_obj.conda_config = yaml.load(fh) lint_obj.conda_config['dependencies'] = ['fastqc', 'multiqc=0.9', 'notapackaage=0.4'] lint_obj.pipeline_name = 'not_tools' - lint_obj.config['params.version'] = '0.23' + lint_obj.config['manifest.pipelineVersion'] = '0.23' lint_obj.check_conda_env_yaml() expectations = {"failed": 3, "warned": 1, "passed": 2} self.assess_lint_status(lint_obj, **expectations) @@ -291,7 +291,7 @@ def test_conda_env_timeout(self, mock_get): with open(os.path.join(PATH_WORKING_EXAMPLE, 'environment.yml'), 'r') as fh: lint_obj.conda_config = yaml.load(fh) lint_obj.pipeline_name = 'tools' - lint_obj.config['params.version'] = '0.4' + lint_obj.config['manifest.pipelineVersion'] = '0.4' lint_obj.check_conda_env_yaml() expectations = {"failed": 2, "warned": 5, "passed": 4} self.assess_lint_status(lint_obj, **expectations) @@ -336,7 +336,7 @@ def test_pip_no_version_fail(self): lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) lint_obj.files = ['environment.yml'] lint_obj.pipeline_name = 'tools' - lint_obj.config['params.version'] = '0.4' + lint_obj.config['manifest.pipelineVersion'] = '0.4' lint_obj.conda_config = {'name': 'nfcore-tools-0.4', 'dependencies': [{'pip': ['multiqc']}]} lint_obj.check_conda_env_yaml() expectations = {"failed": 1, "warned": 0, "passed": 1} @@ -347,7 +347,7 @@ def test_pip_package_not_latest_warn(self): lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) lint_obj.files = ['environment.yml'] lint_obj.pipeline_name = 'tools' - lint_obj.config['params.version'] = '0.4' + lint_obj.config['manifest.pipelineVersion'] = '0.4' lint_obj.conda_config = {'name': 'nfcore-tools-0.4', 'dependencies': [{'pip': ['multiqc=1.4']}]} lint_obj.check_conda_env_yaml() expectations = {"failed": 0, "warned": 1, "passed": 2} @@ -363,7 +363,7 @@ def test_pypi_timeout_warn(self, mock_get): lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) lint_obj.files = ['environment.yml'] lint_obj.pipeline_name = 'tools' - lint_obj.config['params.version'] = '0.4' + lint_obj.config['manifest.pipelineVersion'] = '0.4' lint_obj.conda_config = {'name': 'nfcore-tools-0.4', 'dependencies': [{'pip': ['multiqc=1.5']}]} lint_obj.check_conda_env_yaml() expectations = {"failed": 0, "warned": 1, "passed": 2} @@ -379,7 +379,7 @@ def test_pypi_connection_error_warn(self, mock_get): lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) lint_obj.files = ['environment.yml'] lint_obj.pipeline_name = 'tools' - lint_obj.config['params.version'] = '0.4' + lint_obj.config['manifest.pipelineVersion'] = '0.4' lint_obj.conda_config = {'name': 'nfcore-tools-0.4', 'dependencies': [{'pip': ['multiqc=1.5']}]} lint_obj.check_conda_env_yaml() expectations = {"failed": 0, "warned": 1, "passed": 2} @@ -390,7 +390,7 @@ def test_pip_dependency_fail(self): lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) lint_obj.files = ['environment.yml'] lint_obj.pipeline_name = 'tools' - lint_obj.config['params.version'] = '0.4' + lint_obj.config['manifest.pipelineVersion'] = '0.4' lint_obj.conda_config = {'name': 'nfcore-tools-0.4', 'dependencies': [{'pip': ['notpresent=1.5']}]} lint_obj.check_conda_env_yaml() expectations = {"failed": 1, "warned": 0, "passed": 2} diff --git a/tests/test_release.py b/tests/test_release.py index 87178e8ba8..87f19b8ccd 100644 --- a/tests/test_release.py +++ b/tests/test_release.py @@ -16,7 +16,7 @@ def test_working_release(datafiles): """ Test that making a release with the working example files works """ lint_obj = nf_core.lint.PipelineLint(str(datafiles)) lint_obj.pipeline_name = 'tools' - lint_obj.config['params.version'] = '0.4' + lint_obj.config['manifest.pipelineVersion'] = '0.4' lint_obj.files = ['nextflow.config', 'Dockerfile', 'environment.yml'] nf_core.release.make_release(lint_obj, '1.1') @@ -25,7 +25,7 @@ def test_dev_release(datafiles): """ Test that making a release works with a dev name and a leading v """ lint_obj = nf_core.lint.PipelineLint(str(datafiles)) lint_obj.pipeline_name = 'tools' - lint_obj.config['params.version'] = '0.4' + lint_obj.config['manifest.pipelineVersion'] = '0.4' lint_obj.files = ['nextflow.config', 'Dockerfile', 'environment.yml'] nf_core.release.make_release(lint_obj, 'v1.2dev') @@ -35,7 +35,7 @@ def test_pattern_not_found(datafiles): """ Test that making a release raises and error if a pattern isn't found """ lint_obj = nf_core.lint.PipelineLint(str(datafiles)) lint_obj.pipeline_name = 'tools' - lint_obj.config['params.version'] = '0.5' + lint_obj.config['manifest.pipelineVersion'] = '0.5' lint_obj.files = ['nextflow.config', 'Dockerfile', 'environment.yml'] nf_core.release.make_release(lint_obj, '1.2dev') @@ -45,8 +45,8 @@ def test_multiple_patterns_found(datafiles): """ Test that making a release raises if a version number is found twice """ lint_obj = nf_core.lint.PipelineLint(str(datafiles)) with open(os.path.join(str(datafiles), 'nextflow.config'), "a") as nfcfg: - nfcfg.write("params.version = '0.4'") + nfcfg.write("manifest.pipelineVersion = '0.4'") lint_obj.pipeline_name = 'tools' - lint_obj.config['params.version'] = '0.4' + lint_obj.config['manifest.pipelineVersion'] = '0.4' lint_obj.files = ['nextflow.config', 'Dockerfile', 'environment.yml'] nf_core.release.make_release(lint_obj, '1.2dev') From ca3d0f1cfa5037ac7002650e11f376311aa8fe98 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 9 Aug 2018 12:19:43 +0200 Subject: [PATCH 25/75] Added a few more line breaks --- .../{{cookiecutter.pipeline_slug}}/.github/feature_request.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md index aa28ead641..3616d75c9e 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md @@ -1,12 +1,16 @@ **Is your feature request related to a problem? Please describe.** + A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** + A clear and concise description of what you want to happen. **Describe alternatives you've considered** + A clear and concise description of any alternative solutions or features you've considered. **Additional context** + Add any other context about the feature request here. From 0a5fe49baf32958155829d3af1e38e72e114f657 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 9 Aug 2018 12:31:53 +0200 Subject: [PATCH 26/75] Added nfcore- prefix to conda env name in template --- .../{{cookiecutter.pipeline_slug}}/environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml index ba29c1d2d0..bf2b38fbda 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml @@ -1,4 +1,4 @@ -name: {{ cookiecutter.pipeline_slug }}-{{ cookiecutter.version }} +name: nfcore-{{ cookiecutter.pipeline_slug }}-{{ cookiecutter.version }} channels: - bioconda - conda-forge From dce3f12d9e87b698b2ab27e55b051aad7f3d15a4 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 9 Aug 2018 12:52:29 +0200 Subject: [PATCH 27/75] Update release, add nf-core prefix to template. * Added the travis config docker pull to the release update command * Added missing nf-core prefixes to a bunch of locations in the template * Updated the template docker & singularity profiles to use params.container --- .../{{cookiecutter.pipeline_slug}}/.travis.yml | 2 +- .../{{cookiecutter.pipeline_slug}}/environment.yml | 2 +- .../{{cookiecutter.pipeline_slug}}/nextflow.config | 6 +++--- nf_core/release.py | 5 +++++ 4 files changed, 10 insertions(+), 5 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml index 6a5cf34233..360ed257c4 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml @@ -11,7 +11,7 @@ matrix: before_install: # Pull the docker image first so the test doesn't wait for this - - docker pull {{ cookiecutter.pipeline_slug }}:{{ cookiecutter.version }} + - docker pull nfcore/{{ cookiecutter.pipeline_slug }}:latest install: # Install Nextflow diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml index ba29c1d2d0..bf2b38fbda 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml @@ -1,4 +1,4 @@ -name: {{ cookiecutter.pipeline_slug }}-{{ cookiecutter.version }} +name: nfcore-{{ cookiecutter.pipeline_slug }}-{{ cookiecutter.version }} channels: - bioconda - conda-forge diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config index acdb89b99e..978a8d9279 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config @@ -12,7 +12,7 @@ params { version = "{{ cookiecutter.version }}" // Pipeline version - container = '{{ cookiecutter.pipeline_slug }}:{{ cookiecutter.version }}' // Container slug. Stable releases should specify release tag! + container = 'nfcore/{{ cookiecutter.pipeline_slug }}:latest' // Container slug. Stable releases should specify release tag! nf_required_version = '0.30.0' // Minimum version of nextflow required help = false @@ -34,11 +34,11 @@ profiles { conda { process.conda = "$baseDir/environment.yml" } docker { docker.enabled = true - process.container = 'nfcore/{{ cookiecutter.pipeline_slug }}:{{ cookiecutter.version }}' + process.container = params.container } singularity { enabled = true - process.container = 'shub://nf-core/{{ cookiecutter.pipeline_slug }}:{{ cookiecutter.version }}' + process.container = {"shub://${params.container.replace('nfcore', 'nf-core')}"} } awsbatch { includeConfig 'conf/base.config' diff --git a/nf_core/release.py b/nf_core/release.py index f8c6469415..abc1759ab6 100644 --- a/nf_core/release.py +++ b/nf_core/release.py @@ -34,6 +34,11 @@ def make_release(lint_obj, new_version): nfconfig_newstr = "container = 'nfcore/{}:{}'".format(lint_obj.pipeline_name.lower(), docker_tag) update_file_version("nextflow.config", lint_obj, nfconfig_pattern, nfconfig_newstr) + # Update travis image pull + nfconfig_pattern = r"docker pull nfcore/{}:(?:{}|latest)".format(lint_obj.pipeline_name.lower(), current_version.replace('.','\.')) + nfconfig_newstr = "docker pull nfcore/{}:{}".format(lint_obj.pipeline_name.lower(), docker_tag) + update_file_version(".travis.yml", lint_obj, nfconfig_pattern, nfconfig_newstr) + # Update Singularity version name nfconfig_pattern = r"VERSION {}".format(current_version.replace('.','\.')) nfconfig_newstr = "VERSION {}".format(new_version) From d9095330a6d8314e2f0d3383cc846a402ada3d16 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 9 Aug 2018 13:37:46 +0200 Subject: [PATCH 28/75] Make lint error command more user friendly --- nf_core/lint.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/lint.py b/nf_core/lint.py index 1f8bd69bbd..9999c97255 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -309,7 +309,7 @@ def check_nextflow_config(self): self.passed.append((4, "Config variable found: {}".format(cf))) else: self.warned.append((4, "Config variable not found: {}".format(cf))) - + # Check and warn if the process configuration is done with deprecated syntax process_with_deprecated_syntax = list(set([re.search('^(process\.\$.*?)\.+.*$', ck).group(1) for ck in self.config.keys() if re.match(r'^(process\.\$.*?)\.+.*$', ck)])) for pd in process_with_deprecated_syntax: @@ -363,7 +363,7 @@ def check_ci_config(self): try: assert(docker_pull_cmd in ciconf.get('before_install')) except AssertionError: - self.failed.append((5, "CI is not pulling the correct docker image: {}".format(docker_pull_cmd))) + self.failed.append((5, "CI is not pulling the correct docker image. Should be:\n '{}'".format(docker_pull_cmd))) else: self.passed.append((5, "CI is pulling the correct docker image: {}".format(docker_pull_cmd))) From 2691610eba9682a07d6bca187a831608a6859733 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 9 Aug 2018 13:52:47 +0200 Subject: [PATCH 29/75] Travis: Don't pull a docker image tag --- nf_core/lint.py | 1 + .../{{cookiecutter.pipeline_slug}}/.travis.yml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/lint.py b/nf_core/lint.py index 1f8bd69bbd..1527da4277 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -360,6 +360,7 @@ def check_ci_config(self): # Check that we're pulling the right docker image if self.config.get('params.container'): docker_pull_cmd = 'docker pull {}'.format(self.config['params.container'].strip('"\'')) + docker_pull_cmd = re.sub(r':(?:[\.\d]+|latest)$', '', docker_pull_cmd) try: assert(docker_pull_cmd in ciconf.get('before_install')) except AssertionError: diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml index 6a5cf34233..cc0f7e0121 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml @@ -11,7 +11,7 @@ matrix: before_install: # Pull the docker image first so the test doesn't wait for this - - docker pull {{ cookiecutter.pipeline_slug }}:{{ cookiecutter.version }} + - docker pull {{ cookiecutter.pipeline_slug }} install: # Install Nextflow From ab9e653c2cc29c029ca0a260980c6df5f2eebac7 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 9 Aug 2018 13:54:44 +0200 Subject: [PATCH 30/75] Docs and lint test example --- docs/lint_errors.md | 2 +- tests/lint_examples/minimal_working_example/.travis.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/lint_errors.md b/docs/lint_errors.md index 9b6984f2a3..30be7a1667 100644 --- a/docs/lint_errors.md +++ b/docs/lint_errors.md @@ -98,7 +98,7 @@ This test fails if the following happens: * `.travis.yml` does not contain the string `nf-core lint ${TRAVIS_BUILD_DIR}` under `script` * `.travis.yml` does not contain the string `docker pull ` under `before_install` - * Where `` is fetched from `params.container` in the `nextflow.config` file + * Where `` is fetched from `params.container` in the `nextflow.config` file, without the docker tag _(if we have the tag the tests fail when making a release)_ * `.travis.yml` does not test the Nextflow version specified in the pipeline as `nf_required_version` * This is expected in the `env` section of the config, eg: ```yaml diff --git a/tests/lint_examples/minimal_working_example/.travis.yml b/tests/lint_examples/minimal_working_example/.travis.yml index 949f8b0aae..9fc7d635c3 100644 --- a/tests/lint_examples/minimal_working_example/.travis.yml +++ b/tests/lint_examples/minimal_working_example/.travis.yml @@ -11,7 +11,7 @@ matrix: before_install: # Pull the docker image first so the test doesn't wait for this - - docker pull nfcore/tools:0.4 + - docker pull nfcore/tools install: # Install Nextflow From d495cc071a1cca1e5a160480169d277b43289721 Mon Sep 17 00:00:00 2001 From: sven1103 Date: Thu, 9 Aug 2018 13:59:57 +0200 Subject: [PATCH 31/75] Refactor a lot of stuff --- bin/broadcast_prs.py | 67 ++++++++++++++++++++++++++------------------ bin/utils.py | 40 ++++++++++++++++++++++++++ 2 files changed, 79 insertions(+), 28 deletions(-) create mode 100644 bin/utils.py diff --git a/bin/broadcast_prs.py b/bin/broadcast_prs.py index 752f497c75..6f77861674 100755 --- a/bin/broadcast_prs.py +++ b/bin/broadcast_prs.py @@ -1,17 +1,22 @@ #!/usr/bin/env python from cookiecutter.main import cookiecutter +import git import json import os import requests from requests.auth import HTTPBasicAuth import sys import subprocess +import tempfile +import utils + +#run_cookiecutter(self.pipeline_name, self.pipeline_description, self.pipeline_version) # The GitHub base url or the nf-core project GH_BASE_URL = "https://github.com/nf-core" # The current cookiecutter template url for nf-core pipelines -NF_CORE_TEMPLATE = "https://github.com/nf-core/cookiecutter.git" +NF_CORE_TEMPLATE = "" # The JSON file is updated on every push event on the nf-core GitHub project NF_CORE_PIPELINE_INFO = "http://nf-co.re/pipelines.json" # The API endpoint for creating pull requests @@ -27,43 +32,57 @@ class UpdateTemplate: - branch: The template branch name, default=`TEMPLATE` """ - def __init__(self, pipeline, context, branch='TEMPLATE'): + def __init__(self, pipeline, branch='TEMPLATE'): """Basic constructor """ self.pipeline = pipeline self.repo_url = "{base_url}/{pipeline}".format( base_url=GH_BASE_URL, pipeline=pipeline) - self.context = context self.branch = branch + self.tmpdir = tempfile.mkdtemp() + self.repo = None def run(self): """Execute the template update. """ - self._clone_repo() - self._apply_changes() + config = self._clone_repo() + assert self.repo + context = utils.create_context(config) + self._apply_changes(context) self._commit_changes() + self._push_changes() + def _clone_repo(self): """Clone the repo and switch to the configured branch. """ - subprocess.run(["git", "clone", self.repo_url, "-b", self.branch, self.pipeline]) + self.repo = git.Repo.clone_from(self.repo_url, self.tmpdir) + config = utils.fetch_wf_config(wf_path=self.tmpdir) + print(self.branch) + self.repo.git.checkout("origin/{branch}".format(branch=self.branch), + b="{branch}".format(branch=self.branch)) + return config - def _apply_changes(self): + def _apply_changes(self, context): """Apply the changes of the cookiecutter template to the pipelines template branch. """ cookiecutter(NF_CORE_TEMPLATE, no_input=True, - extra_context=None, + extra_context=context, overwrite_if_exists=True, - output_dir=self.pipeline) + output_dir=self.tmpdir) def _commit_changes(self): """Commits the changes of the new template to the current branch. """ - subprocess.run(["git", "add", "-A", "."], cwd=self.pipeline) - subprocess.run(["git", "commit", "-m", "Update nf-core template"], cwd=self.pipeline) + self.repo.git.add(A=True) + self.repo.index.commit("Update nf-core pipeline template.") + + def _push_changes(self): + print("Push it...") + #self.repo.git.push() # we need a different way to push otherwise it will ask for credentials def create_pullrequest(pipeline, origin="master", template="TEMPLATE", token="", user="nf-core"): @@ -74,39 +93,31 @@ def create_pullrequest(pipeline, origin="master", template="TEMPLATE", token="", """ content = {} content['title'] = "Important pipeline nf-core update!" - content['body'] = "Some important changes have been made in the nf-core pipelines templates.\n" + + content['body'] = "Some important changes have been made in the nf-core pipelines templates.\n" \ "Please make sure to merge this in ASAP and make a new minor release of your pipeline." - content['head'] = "{}:{}".format(pipeline, template) - content['base'] = master + content['head'] = "{}".format(template) + content['base'] = origin return requests.post(url=GITHUB_PR_URL_TEMPL.format(pipeline=pipeline), - data=json.dumps(content) + data=json.dumps(content), auth=HTTPBasicAuth(user, token)) -def get_context(pipeline): - """Get the template context for a given pipeline. - - Returns: A context dictionary - """ - pass - def main(): res = requests.get(NF_CORE_PIPELINE_INFO) pipelines = json.loads(res.content).get('remote_workflows') if not pipelines: print("Pipeline information was empty!") + pipelines = [{"name":"hlatyping"}] # just for testing for pipeline in pipelines: - # Get context from pipeline and load it into a dictionary - # context = load_context(pipeline) - print(pipeline['name']) # Just for testing, can be safely deleted - ut.UpdateTemplate(pipeline['name'], context) + UpdateTemplate(pipeline['name']).run() for pipeline in pipelines: print("Trying to open pull request for pipeline {}...".format(pipeline['name'])) - response = create_pullrequest(pipeline['name']) + response = create_pullrequest(pipeline['name'], token="117962f70c156268d02a8b8f42be04bf7676141e") if response.status_code != 201: print("Pull-request for pipeline \'{pipeline}\' failed," " got return code {return_code}." - .format(pipeline=pipeline, return_code=response.status_code)) + .format(pipeline=pipeline["name"], return_code=response.status_code)) + print(response.content) else: print("Created pull-request for pipeline \'{pipeline}\' successfully.".format(pipeline=pipeline)) diff --git a/bin/utils.py b/bin/utils.py new file mode 100644 index 0000000000..f5580f97d8 --- /dev/null +++ b/bin/utils.py @@ -0,0 +1,40 @@ +import os +import subprocess + +def fetch_wf_config(wf_path): + """ + Use nextflow to retrieve the nf configuration variables from a workflow + """ + config = dict() + # Call `nextflow config` and pipe stderr to /dev/null + try: + with open(os.devnull, 'w') as devnull: + nfconfig_raw = subprocess.check_output(['nextflow', 'config', '-flat', wf_path], stderr=devnull) + except subprocess.CalledProcessError as e: + raise AssertionError("`nextflow config` returned non-zero error code: %s,\n %s", e.returncode, e.output) + else: + for l in nfconfig_raw.splitlines(): + ul = l.decode() + k, v = ul.split(' = ', 1) + config[k] = v + return config + +def create_context(config): + """Consumes a flat Nextflow config file and will create + a context dictionary with information for the nf-core template creation. + + Returns: A dictionary with: + { + 'pipeline_name': '' + 'pipeline_short_description': '' + 'new_version': '' + } + """ + context = {} + context["name"] = config.get("manifest.name") if config.get("manifest.name") else get_name_from_url(config.get("manifest.homePage")) + context["description"] = config.get("manifest.description") + context["version"] = config.get("manifest.version") if config.get("manifest.version") else config.get("params.version") + return context + +def get_name_from_url(url): + return url.split("/")[-1] if url else "" From f78a175c5a7a9a227b422b79a80b36788e4cc0d2 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Thu, 9 Aug 2018 14:01:05 +0200 Subject: [PATCH 32/75] Remove last warning for template update --- .../{{cookiecutter.pipeline_slug}}/environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml index bf2b38fbda..f853553f7d 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml @@ -5,4 +5,4 @@ channels: - defaults dependencies: - fastqc=0.11.7 - - multiqc=1.5 + - multiqc=1.6 From 3c20fb0358bc5ff358d9e5aef314abefe9cc8e87 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Thu, 9 Aug 2018 14:01:40 +0200 Subject: [PATCH 33/75] Get rid of last pipeline linting warning by using newest MQC --- .../{{cookiecutter.pipeline_slug}}/environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml index bf2b38fbda..f853553f7d 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml @@ -5,4 +5,4 @@ channels: - defaults dependencies: - fastqc=0.11.7 - - multiqc=1.5 + - multiqc=1.6 From 575fa2d8aadf877562d9a51e11be02b47b88d016 Mon Sep 17 00:00:00 2001 From: sven1103 Date: Thu, 9 Aug 2018 14:42:26 +0200 Subject: [PATCH 34/75] Overwrite template content from new version --- bin/broadcast_prs.py | 28 ++++++++++++++++++++++------ bin/utils.py | 6 +++--- 2 files changed, 25 insertions(+), 9 deletions(-) diff --git a/bin/broadcast_prs.py b/bin/broadcast_prs.py index 6f77861674..6beeb1d157 100755 --- a/bin/broadcast_prs.py +++ b/bin/broadcast_prs.py @@ -6,17 +6,19 @@ import os import requests from requests.auth import HTTPBasicAuth +import shutil import sys import subprocess import tempfile import utils -#run_cookiecutter(self.pipeline_name, self.pipeline_description, self.pipeline_version) - # The GitHub base url or the nf-core project GH_BASE_URL = "https://github.com/nf-core" # The current cookiecutter template url for nf-core pipelines -NF_CORE_TEMPLATE = "" +NF_CORE_TEMPLATE = os.path.join( + os.path.dirname( + os.path.dirname(os.path.realpath(__file__)) + ), "nf_core/pipeline-template") # The JSON file is updated on every push event on the nf-core GitHub project NF_CORE_PIPELINE_INFO = "http://nf-co.re/pipelines.json" # The API endpoint for creating pull requests @@ -41,6 +43,7 @@ def __init__(self, pipeline, branch='TEMPLATE'): pipeline=pipeline) self.branch = branch self.tmpdir = tempfile.mkdtemp() + self.templatedir = tempfile.mkdtemp() self.repo = None def run(self): @@ -53,7 +56,6 @@ def run(self): self._commit_changes() self._push_changes() - def _clone_repo(self): """Clone the repo and switch to the configured branch. """ @@ -72,8 +74,22 @@ def _apply_changes(self, context): no_input=True, extra_context=context, overwrite_if_exists=True, - output_dir=self.tmpdir) - + output_dir=self.templatedir) + # Clear the template branch content + for f in os.listdir(self.tmpdir): + if f == ".git": continue + try: + shutil.rmtree(os.path.join(self.tmpdir, f)) + except: + os.remove(os.path.join(self.tmpdir, f)) + # Move the new template content into the template branch + template_path = os.path.join(self.templatedir, self.pipeline) + for f in os.listdir(template_path): + shutil.move( + os.path.join(template_path, f), # src + os.path.join(self.tmpdir, f), # dest + ) + def _commit_changes(self): """Commits the changes of the new template to the current branch. """ diff --git a/bin/utils.py b/bin/utils.py index f5580f97d8..0a536b6c78 100644 --- a/bin/utils.py +++ b/bin/utils.py @@ -16,7 +16,7 @@ def fetch_wf_config(wf_path): for l in nfconfig_raw.splitlines(): ul = l.decode() k, v = ul.split(' = ', 1) - config[k] = v + config[k] = v.replace("\'", "").replace("\"", "") return config def create_context(config): @@ -31,8 +31,8 @@ def create_context(config): } """ context = {} - context["name"] = config.get("manifest.name") if config.get("manifest.name") else get_name_from_url(config.get("manifest.homePage")) - context["description"] = config.get("manifest.description") + context["pipeline_name"] = config.get("manifest.name") if config.get("manifest.name") else get_name_from_url(config.get("manifest.homePage")) + context["pipeline_short_description"] = config.get("manifest.description") context["version"] = config.get("manifest.version") if config.get("manifest.version") else config.get("params.version") return context From d55023d302eac8617957833f34e13b3c4a8fa4a2 Mon Sep 17 00:00:00 2001 From: sven1103 Date: Thu, 9 Aug 2018 14:51:05 +0200 Subject: [PATCH 35/75] Assert for travis tag and add comments --- bin/broadcast_prs.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/bin/broadcast_prs.py b/bin/broadcast_prs.py index 6beeb1d157..055ea70cfd 100755 --- a/bin/broadcast_prs.py +++ b/bin/broadcast_prs.py @@ -118,14 +118,19 @@ def create_pullrequest(pipeline, origin="master", template="TEMPLATE", token="", auth=HTTPBasicAuth(user, token)) def main(): + # Check that the commit event is a GitHub tag event + assert os.environ('TRAVIS_TAG') + # Get nf-core pipelines info res = requests.get(NF_CORE_PIPELINE_INFO) pipelines = json.loads(res.content).get('remote_workflows') if not pipelines: print("Pipeline information was empty!") pipelines = [{"name":"hlatyping"}] # just for testing + # Update the template branch of each pipeline repo for pipeline in pipelines: UpdateTemplate(pipeline['name']).run() + # Create a pull request from each template branch to the origin branch for pipeline in pipelines: print("Trying to open pull request for pipeline {}...".format(pipeline['name'])) response = create_pullrequest(pipeline['name'], token="117962f70c156268d02a8b8f42be04bf7676141e") From 9dceee9e149d3bba70b643d4a34deeac65c2de94 Mon Sep 17 00:00:00 2001 From: jemten Date: Thu, 9 Aug 2018 15:00:50 +0200 Subject: [PATCH 36/75] Add workflow summary to MultiQC --- .../{{cookiecutter.pipeline_slug}}/main.nf | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf index eeaf025fb0..d02a727bc3 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf @@ -160,6 +160,25 @@ log.info summary.collect { k,v -> "${k.padRight(15)}: $v" }.join("\n") log.info "=========================================" +def create_workflow_summary(summary) { + + def yaml_file = workDir.resolve('workflow_summary_mqc.yaml') + yaml_file.text = """ + id: '{{ cookiecutter.pipeline_name }}-summary' + description: " - this information is collected when the pipeline is started." + section_name: 'nfcore/{{ cookiecutter.pipeline_name }} Workflow Summary' + section_href: 'https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}' + plot_type: 'html' + data: | +
+${summary.collect { k,v -> "
$k
${v ?: 'N/A'}
" }.join("\n")} +
+ """.stripIndent() + + return yaml_file +} + + // Check that Nextflow version is up to date enough // try / throw / catch works for NF versions < 0.25 when this was implemented try { @@ -227,6 +246,7 @@ process multiqc { file multiqc_config file ('fastqc/*') from fastqc_results.collect() file ('software_versions/*') from software_versions_yaml + file workflow_summary from create_workflow_summary(summary) output: file "*multiqc_report.html" into multiqc_report From ac8690f07ef44f96b031a7b958dd8d85883aad21 Mon Sep 17 00:00:00 2001 From: sven1103 Date: Thu, 9 Aug 2018 15:04:46 +0200 Subject: [PATCH 37/75] Provide Travis Tag in PR --- bin/broadcast_prs.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/bin/broadcast_prs.py b/bin/broadcast_prs.py index 055ea70cfd..18f24802d3 100755 --- a/bin/broadcast_prs.py +++ b/bin/broadcast_prs.py @@ -108,9 +108,10 @@ def create_pullrequest(pipeline, origin="master", template="TEMPLATE", token="", Returns: An instance of class requests.Response """ content = {} - content['title'] = "Important pipeline nf-core update!" + content['title'] = "Important pipeline nf-core update! (version {tag})".format(tag=os.environ['TRAVIS_TAG']) content['body'] = "Some important changes have been made in the nf-core pipelines templates.\n" \ - "Please make sure to merge this in ASAP and make a new minor release of your pipeline." + "Please make sure to merge this in ASAP and make a new minor release of your pipeline.\n\n" \ + "Follow the link [nf-core/tools](https://github.com/nf-core/tools/releases/tag/{}".format(os.environ['TRAVIS_TAG']) content['head'] = "{}".format(template) content['base'] = origin return requests.post(url=GITHUB_PR_URL_TEMPL.format(pipeline=pipeline), @@ -119,7 +120,7 @@ def create_pullrequest(pipeline, origin="master", template="TEMPLATE", token="", def main(): # Check that the commit event is a GitHub tag event - assert os.environ('TRAVIS_TAG') + assert os.environ['TRAVIS_TAG'] # Get nf-core pipelines info res = requests.get(NF_CORE_PIPELINE_INFO) pipelines = json.loads(res.content).get('remote_workflows') @@ -133,7 +134,7 @@ def main(): # Create a pull request from each template branch to the origin branch for pipeline in pipelines: print("Trying to open pull request for pipeline {}...".format(pipeline['name'])) - response = create_pullrequest(pipeline['name'], token="117962f70c156268d02a8b8f42be04bf7676141e") + response = create_pullrequest(pipeline['name'], token="1cd1cb0721c246346f97e6af38a8332c747a2f79") if response.status_code != 201: print("Pull-request for pipeline \'{pipeline}\' failed," " got return code {return_code}." From 38c7e41da373f6358e2a572b52c5faa34d7319fe Mon Sep 17 00:00:00 2001 From: sven1103 Date: Thu, 9 Aug 2018 15:08:50 +0200 Subject: [PATCH 38/75] Fix broken markdown syntax --- bin/broadcast_prs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bin/broadcast_prs.py b/bin/broadcast_prs.py index 18f24802d3..5c88fd34f3 100755 --- a/bin/broadcast_prs.py +++ b/bin/broadcast_prs.py @@ -111,7 +111,7 @@ def create_pullrequest(pipeline, origin="master", template="TEMPLATE", token="", content['title'] = "Important pipeline nf-core update! (version {tag})".format(tag=os.environ['TRAVIS_TAG']) content['body'] = "Some important changes have been made in the nf-core pipelines templates.\n" \ "Please make sure to merge this in ASAP and make a new minor release of your pipeline.\n\n" \ - "Follow the link [nf-core/tools](https://github.com/nf-core/tools/releases/tag/{}".format(os.environ['TRAVIS_TAG']) + "Follow the link [nf-core/tools](https://github.com/nf-core/tools/releases/tag/{})".format(os.environ['TRAVIS_TAG']) content['head'] = "{}".format(template) content['base'] = origin return requests.post(url=GITHUB_PR_URL_TEMPL.format(pipeline=pipeline), @@ -134,7 +134,7 @@ def main(): # Create a pull request from each template branch to the origin branch for pipeline in pipelines: print("Trying to open pull request for pipeline {}...".format(pipeline['name'])) - response = create_pullrequest(pipeline['name'], token="1cd1cb0721c246346f97e6af38a8332c747a2f79") + response = create_pullrequest(pipeline['name'], token="") if response.status_code != 201: print("Pull-request for pipeline \'{pipeline}\' failed," " got return code {return_code}." From ce0d6e1edf6c9083970a256c83cab2ba1cf0e84d Mon Sep 17 00:00:00 2001 From: jemten Date: Thu, 9 Aug 2018 15:00:50 +0200 Subject: [PATCH 39/75] Add workflow summary to MultiQC --- .../{{cookiecutter.pipeline_slug}}/main.nf | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf index b243ce35ac..d02a727bc3 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf @@ -159,6 +159,41 @@ if(params.email) summary['E-mail Address'] = params.email log.info summary.collect { k,v -> "${k.padRight(15)}: $v" }.join("\n") log.info "=========================================" + +def create_workflow_summary(summary) { + + def yaml_file = workDir.resolve('workflow_summary_mqc.yaml') + yaml_file.text = """ + id: '{{ cookiecutter.pipeline_name }}-summary' + description: " - this information is collected when the pipeline is started." + section_name: 'nfcore/{{ cookiecutter.pipeline_name }} Workflow Summary' + section_href: 'https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}' + plot_type: 'html' + data: | +
+${summary.collect { k,v -> "
$k
${v ?: 'N/A'}
" }.join("\n")} +
+ """.stripIndent() + + return yaml_file +} + + +// Check that Nextflow version is up to date enough +// try / throw / catch works for NF versions < 0.25 when this was implemented +try { + if( ! nextflow.version.matches(">= $params.nf_required_version") ){ + throw GroovyException('Nextflow version too old') + } +} catch (all) { + log.error "====================================================\n" + + " Nextflow version $params.nf_required_version required! You are running v$workflow.nextflow.version.\n" + + " Pipeline execution will continue, but things may break.\n" + + " Please run `nextflow self-update` to update Nextflow.\n" + + "============================================================" +} + + /* * Parse software version numbers */ @@ -211,6 +246,7 @@ process multiqc { file multiqc_config file ('fastqc/*') from fastqc_results.collect() file ('software_versions/*') from software_versions_yaml + file workflow_summary from create_workflow_summary(summary) output: file "*multiqc_report.html" into multiqc_report From ff8e8a2c782cbde27496f93611e891748d367477 Mon Sep 17 00:00:00 2001 From: sven1103 Date: Thu, 9 Aug 2018 15:28:26 +0200 Subject: [PATCH 40/75] Use bot token for authenticated services --- bin/broadcast_prs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bin/broadcast_prs.py b/bin/broadcast_prs.py index 5c88fd34f3..417430855b 100755 --- a/bin/broadcast_prs.py +++ b/bin/broadcast_prs.py @@ -121,6 +121,7 @@ def create_pullrequest(pipeline, origin="master", template="TEMPLATE", token="", def main(): # Check that the commit event is a GitHub tag event assert os.environ['TRAVIS_TAG'] + assert os.environ['NF_CORE_BOT'] # Get nf-core pipelines info res = requests.get(NF_CORE_PIPELINE_INFO) pipelines = json.loads(res.content).get('remote_workflows') @@ -134,7 +135,7 @@ def main(): # Create a pull request from each template branch to the origin branch for pipeline in pipelines: print("Trying to open pull request for pipeline {}...".format(pipeline['name'])) - response = create_pullrequest(pipeline['name'], token="") + response = create_pullrequest(pipeline['name'], token=os.environ["NF_CORE_BOT"]) if response.status_code != 201: print("Pull-request for pipeline \'{pipeline}\' failed," " got return code {return_code}." From 5dc24e8c6dd0b82de480dff710614ebdf210a26b Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Thu, 9 Aug 2018 15:47:06 +0200 Subject: [PATCH 41/75] Probably a line feed error --- .../{{cookiecutter.pipeline_slug}}/docs/usage.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md index 21bac49f03..cd96293181 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md @@ -190,5 +190,5 @@ Set to receive plain-text e-mails instead of HTML formatted. ### `--sampleLevel` Used to turn of the edgeR MDS and heatmap. Set automatically when running on fewer than 3 samples. -### `--multiqc_config` +### `--multiqc_config` If you would like to supply a custom config file to MultiQC, you can specify a path with `--multiqc_config`. This is used instead of the config file specific to the pipeline. From a141b3d73057949775ce56f17dd70ef5d4ca9beb Mon Sep 17 00:00:00 2001 From: sven1103 Date: Thu, 9 Aug 2018 16:22:17 +0200 Subject: [PATCH 42/75] Tidy up --- bin/broadcast_prs.py | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/bin/broadcast_prs.py b/bin/broadcast_prs.py index 417430855b..dad63592ba 100755 --- a/bin/broadcast_prs.py +++ b/bin/broadcast_prs.py @@ -13,7 +13,7 @@ import utils # The GitHub base url or the nf-core project -GH_BASE_URL = "https://github.com/nf-core" +GH_BASE_URL = "https://{token}@github.com/nf-core" # The current cookiecutter template url for nf-core pipelines NF_CORE_TEMPLATE = os.path.join( os.path.dirname( @@ -30,16 +30,16 @@ class UpdateTemplate: its `TEMPLATE` branch. Args: - pipeline: The pipeline name - - context: a parsed dictionary of a cookiecutter.json file - branch: The template branch name, default=`TEMPLATE` + - token: GitHub auth token """ - def __init__(self, pipeline, branch='TEMPLATE'): + def __init__(self, pipeline, branch='TEMPLATE', token=""): """Basic constructor """ self.pipeline = pipeline self.repo_url = "{base_url}/{pipeline}".format( - base_url=GH_BASE_URL, + base_url=GH_BASE_URL.format(token=token), pipeline=pipeline) self.branch = branch self.tmpdir = tempfile.mkdtemp() @@ -61,7 +61,6 @@ def _clone_repo(self): """ self.repo = git.Repo.clone_from(self.repo_url, self.tmpdir) config = utils.fetch_wf_config(wf_path=self.tmpdir) - print(self.branch) self.repo.git.checkout("origin/{branch}".format(branch=self.branch), b="{branch}".format(branch=self.branch)) return config @@ -97,12 +96,11 @@ def _commit_changes(self): self.repo.index.commit("Update nf-core pipeline template.") def _push_changes(self): - print("Push it...") - #self.repo.git.push() # we need a different way to push otherwise it will ask for credentials + self.repo.git.push() -def create_pullrequest(pipeline, origin="master", template="TEMPLATE", token="", user="nf-core"): - """Create a pull request to a base branch (default: master), +def create_pullrequest(pipeline, origin="dev", template="TEMPLATE", token="", user="nf-core"): + """Create a pull request to a base branch (default: dev), from a head branch (default: TEMPLATE) Returns: An instance of class requests.Response @@ -127,10 +125,14 @@ def main(): pipelines = json.loads(res.content).get('remote_workflows') if not pipelines: print("Pipeline information was empty!") + + # TODO: Remove this line, once we go for production pipelines = [{"name":"hlatyping"}] # just for testing + # Update the template branch of each pipeline repo for pipeline in pipelines: - UpdateTemplate(pipeline['name']).run() + print("Update template branch for pipeline '{pipeline}'... ".format(pipeline=pipeline['name'])) + UpdateTemplate(pipeline['name'], token=os.environ['NF_CORE_BOT']).run() # Create a pull request from each template branch to the origin branch for pipeline in pipelines: @@ -142,7 +144,8 @@ def main(): .format(pipeline=pipeline["name"], return_code=response.status_code)) print(response.content) else: - print("Created pull-request for pipeline \'{pipeline}\' successfully.".format(pipeline=pipeline)) + print("Created pull-request for pipeline \'{pipeline}\' successfully." + .format(pipeline=pipeline["name"])) if __name__ == "__main__": main() From 3a235bb051291b07de090090488a8a37c75656ae Mon Sep 17 00:00:00 2001 From: jemten Date: Thu, 9 Aug 2018 16:24:31 +0200 Subject: [PATCH 43/75] cookiecutter.pipeline_name -> cookiecutter.pipeline_slug --- .../pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf index d02a727bc3..9fcf2d3330 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf @@ -166,7 +166,7 @@ def create_workflow_summary(summary) { yaml_file.text = """ id: '{{ cookiecutter.pipeline_name }}-summary' description: " - this information is collected when the pipeline is started." - section_name: 'nfcore/{{ cookiecutter.pipeline_name }} Workflow Summary' + section_name: 'nfcore/{{ cookiecutter.pipeline_slug }} Workflow Summary' section_href: 'https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}' plot_type: 'html' data: | From 0618869504657b36e9e48e37c7228dfa8d845219 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 9 Aug 2018 16:37:56 +0200 Subject: [PATCH 44/75] Upate linting and tests for new travis docker tag hack --- nf_core/lint.py | 13 +++++++++++-- .../{{cookiecutter.pipeline_slug}}/.travis.yml | 2 +- nf_core/release.py | 6 +++--- .../minimal_working_example/.travis.yml | 2 ++ tests/test_lint.py | 2 +- 5 files changed, 18 insertions(+), 7 deletions(-) diff --git a/nf_core/lint.py b/nf_core/lint.py index 1f217246c1..b7a42d1354 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -369,8 +369,8 @@ def check_ci_config(self): self.passed.append((5, "Continuous integration runs nf-core lint Tests: '{}'".format(fn))) # Check that we're pulling the right docker image if self.config.get('params.container'): - docker_pull_cmd = 'docker pull {}'.format(self.config['params.container'].strip('"\'')) - docker_pull_cmd = re.sub(r':(?:[\.\d]+|latest)$', '', docker_pull_cmd) + docker_notag = re.sub(r':(?:[\.\d]+|latest)$', '', self.config['params.container'].strip('"\'')) + docker_pull_cmd = 'docker pull {}'.format(docker_notag) try: assert(docker_pull_cmd in ciconf.get('before_install')) except AssertionError: @@ -378,6 +378,15 @@ def check_ci_config(self): else: self.passed.append((5, "CI is pulling the correct docker image: {}".format(docker_pull_cmd))) + # Check that we tag the docker image properly + docker_tag_cmd = 'docker tag {} {}'.format(docker_notag, self.config['params.container'].strip('"\'')) + try: + assert(docker_tag_cmd in ciconf.get('before_install')) + except AssertionError: + self.failed.append((5, "CI is not tagging docker image correctly. Should be:\n '{}'".format(docker_tag_cmd))) + else: + self.passed.append((5, "CI is tagging docker image correctly: {}".format(docker_tag_cmd))) + # Check that we're testing the minimum nextflow version minNextflowVersion_tested = False env = ciconf.get('env', []) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml index 9b1e8ffbd3..3454bb8528 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml @@ -12,7 +12,7 @@ matrix: before_install: # Pull the docker image first so the test doesn't wait for this - docker pull nfcore/{{ cookiecutter.pipeline_slug }} - # Fake the tag locally so that the pipeline runs properly + # Fake the tag locally so that the pipeline runs properly - docker tag nfcore/{{ cookiecutter.pipeline_slug }} nfcore/{{ cookiecutter.pipeline_slug }}:{{ cookiecutter.version }} install: diff --git a/nf_core/release.py b/nf_core/release.py index abc1759ab6..d3730f27e5 100644 --- a/nf_core/release.py +++ b/nf_core/release.py @@ -34,9 +34,9 @@ def make_release(lint_obj, new_version): nfconfig_newstr = "container = 'nfcore/{}:{}'".format(lint_obj.pipeline_name.lower(), docker_tag) update_file_version("nextflow.config", lint_obj, nfconfig_pattern, nfconfig_newstr) - # Update travis image pull - nfconfig_pattern = r"docker pull nfcore/{}:(?:{}|latest)".format(lint_obj.pipeline_name.lower(), current_version.replace('.','\.')) - nfconfig_newstr = "docker pull nfcore/{}:{}".format(lint_obj.pipeline_name.lower(), docker_tag) + # Update travis image tag + nfconfig_pattern = r"docker tag nfcore/{name} nfcore/{name}:(?:{tag}|latest)".format(name=lint_obj.pipeline_name.lower(), tag=current_version.replace('.','\.')) + nfconfig_newstr = "docker tag nfcore/{name} nfcore/{name}:{tag}".format(name=lint_obj.pipeline_name.lower(), tag=docker_tag) update_file_version(".travis.yml", lint_obj, nfconfig_pattern, nfconfig_newstr) # Update Singularity version name diff --git a/tests/lint_examples/minimal_working_example/.travis.yml b/tests/lint_examples/minimal_working_example/.travis.yml index 9fc7d635c3..47f3a1c8c0 100644 --- a/tests/lint_examples/minimal_working_example/.travis.yml +++ b/tests/lint_examples/minimal_working_example/.travis.yml @@ -12,6 +12,8 @@ matrix: before_install: # Pull the docker image first so the test doesn't wait for this - docker pull nfcore/tools + # Fake the tag locally so that the pipeline runs properly + - docker tag nfcore/tools nfcore/tools:0.4 install: # Install Nextflow diff --git a/tests/test_lint.py b/tests/test_lint.py index e1af57d210..5c98255241 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -38,7 +38,7 @@ def pf(wd, path): pf(WD, 'lint_examples/license_incomplete_example')] # The maximum sum of passed tests currently possible -MAX_PASS_CHECKS = 57 +MAX_PASS_CHECKS = 58 # The additional tests passed for releases ADD_PASS_RELEASE = 1 From ecb93b68ee21feaf735b000fb766460fdcfcdf62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hadrien=20Gourl=C3=A9?= Date: Thu, 9 Aug 2018 16:42:25 +0200 Subject: [PATCH 45/75] reverse conda env handling (fix for #114) --- nf_core/lint.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/nf_core/lint.py b/nf_core/lint.py index 8d77e9e397..ddf9cdca96 100644 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -557,7 +557,8 @@ def check_conda_dockerfile(self): expected_strings = [ 'FROM nfcore/base', 'COPY environment.yml /', - 'RUN conda env update -n root -f /environment.yml && conda clean -a' + 'RUN conda env create -f /environment.yml && conda clean -a', + 'ENV PATH /opt/conda/envs/{}/bin:$PATH'.format(self.conda_config['name']) ] difference = set(expected_strings) - set(self.dockerfile) @@ -581,8 +582,10 @@ def check_conda_singularityfile(self): 'From:nfcore/base', 'Bootstrap:docker', 'VERSION {}'.format(self.config['params.version'].strip(' \'"')), + 'PATH=/opt/conda/envs/{}/bin:$PATH'.format(self.conda_config['name']), + 'export PATH', 'environment.yml /', - '/opt/conda/bin/conda env update -n root -f /environment.yml', + '/opt/conda/bin/conda env create -f /environment.yml', '/opt/conda/bin/conda clean -a', ] From 943e09e94e954dc7d78be1bec2dc151188d92a79 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 9 Aug 2018 16:42:28 +0200 Subject: [PATCH 46/75] Updated template to tag docker as latest in travis --- .../{{cookiecutter.pipeline_slug}}/.travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml index 3454bb8528..1155c4091e 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml @@ -13,7 +13,7 @@ before_install: # Pull the docker image first so the test doesn't wait for this - docker pull nfcore/{{ cookiecutter.pipeline_slug }} # Fake the tag locally so that the pipeline runs properly - - docker tag nfcore/{{ cookiecutter.pipeline_slug }} nfcore/{{ cookiecutter.pipeline_slug }}:{{ cookiecutter.version }} + - docker tag nfcore/{{ cookiecutter.pipeline_slug }} nfcore/{{ cookiecutter.pipeline_slug }}:latest install: # Install Nextflow From 13e4878c622d8269bf3b4c7574607f0b04c53392 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 9 Aug 2018 16:43:25 +0200 Subject: [PATCH 47/75] Deleted now removed nextflow version check warning --- .../{{cookiecutter.pipeline_slug}}/main.nf | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf index 9fcf2d3330..3daecccd68 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf @@ -179,21 +179,6 @@ ${summary.collect { k,v -> "
$k
${v ?: '= $params.nf_required_version") ){ - throw GroovyException('Nextflow version too old') - } -} catch (all) { - log.error "====================================================\n" + - " Nextflow version $params.nf_required_version required! You are running v$workflow.nextflow.version.\n" + - " Pipeline execution will continue, but things may break.\n" + - " Please run `nextflow self-update` to update Nextflow.\n" + - "============================================================" -} - - /* * Parse software version numbers */ From f6f401138413b2c54b12760a6d81a46e6ad950e4 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 9 Aug 2018 16:46:10 +0200 Subject: [PATCH 48/75] Minor tweaks --- .../pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf index 3daecccd68..c118427a27 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf @@ -164,9 +164,9 @@ def create_workflow_summary(summary) { def yaml_file = workDir.resolve('workflow_summary_mqc.yaml') yaml_file.text = """ - id: '{{ cookiecutter.pipeline_name }}-summary' + id: 'nf-core-{{ cookiecutter.pipeline_slug }}-summary' description: " - this information is collected when the pipeline is started." - section_name: 'nfcore/{{ cookiecutter.pipeline_slug }} Workflow Summary' + section_name: 'nf-core/{{ cookiecutter.pipeline_name }} Workflow Summary' section_href: 'https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}' plot_type: 'html' data: | From 6936fb35a5e4c6c575721ef0bcbcbe70f0620b9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hadrien=20Gourl=C3=A9?= Date: Fri, 10 Aug 2018 08:42:23 +0200 Subject: [PATCH 49/75] update test files --- tests/lint_examples/minimal_working_example/Dockerfile | 3 ++- tests/lint_examples/minimal_working_example/Singularity | 6 +++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/tests/lint_examples/minimal_working_example/Dockerfile b/tests/lint_examples/minimal_working_example/Dockerfile index 1ed80a41d7..f2ebb853e0 100644 --- a/tests/lint_examples/minimal_working_example/Dockerfile +++ b/tests/lint_examples/minimal_working_example/Dockerfile @@ -4,4 +4,5 @@ LABEL authors="phil.ewels@scilifelab.se" \ description="Docker image containing all requirements for the nf-core tools pipeline" COPY environment.yml / -RUN conda env update -n root -f /environment.yml && conda clean -a +RUN conda env create -f /environment.yml && conda clean -a +ENV PATH /opt/conda/envs/nfcore-tools-0.4/bin:$PATH diff --git a/tests/lint_examples/minimal_working_example/Singularity b/tests/lint_examples/minimal_working_example/Singularity index 5935e8edf0..c9d1026f26 100644 --- a/tests/lint_examples/minimal_working_example/Singularity +++ b/tests/lint_examples/minimal_working_example/Singularity @@ -6,9 +6,13 @@ Bootstrap:docker DESCRIPTION Container image containing all requirements for the nf-core/tools pipeline VERSION 0.4 +%environment + PATH=/opt/conda/envs/nfcore-tools-0.4/bin:$PATH + export PATH + %files environment.yml / %post - /opt/conda/bin/conda env update -n root -f /environment.yml + /opt/conda/bin/conda env create -f /environment.yml /opt/conda/bin/conda clean -a From 9b4329741b6204d1bd92a17b05f832dce128a86a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hadrien=20Gourl=C3=A9?= Date: Fri, 10 Aug 2018 09:20:55 +0200 Subject: [PATCH 50/75] more tests are expected to fail (due to ENV line) --- tests/test_lint.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_lint.py b/tests/test_lint.py index 2de9e5e708..06bdca1995 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -321,7 +321,7 @@ def test_conda_dockerfile_fail(self): lint_obj.conda_config['name'] = 'nfcore-tools-0.4' lint_obj.dockerfile = ['fubar'] lint_obj.check_conda_dockerfile() - expectations = {"failed": 3, "warned": 0, "passed": 0} + expectations = {"failed": 4, "warned": 0, "passed": 0} self.assess_lint_status(lint_obj, **expectations) def test_conda_dockerfile_skip(self): From b8eb0512a6ebd1f8c0e207ccc331b53ee1952ec3 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 10 Aug 2018 13:35:02 +0200 Subject: [PATCH 51/75] Fix number of expected passing tests --- tests/test_lint.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_lint.py b/tests/test_lint.py index e915fca078..ceadbb39c2 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -38,7 +38,7 @@ def pf(wd, path): pf(WD, 'lint_examples/license_incomplete_example')] # The maximum sum of passed tests currently possible -MAX_PASS_CHECKS = 59 +MAX_PASS_CHECKS = 60 # The additional tests passed for releases ADD_PASS_RELEASE = 1 @@ -112,14 +112,14 @@ def test_config_variable_example_pass(self): """Tests that config variable existence test works with good pipeline example""" good_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) good_lint_obj.check_nextflow_config() - expectations = {"failed": 0, "warned": 0, "passed": 29} + expectations = {"failed": 0, "warned": 0, "passed": 30} self.assess_lint_status(good_lint_obj, **expectations) def test_config_variable_example_with_failed(self): """Tests that config variable existence test fails with bad pipeline example""" bad_lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) bad_lint_obj.check_nextflow_config() - expectations = {"failed": 18, "warned": 9, "passed": 3} + expectations = {"failed": 18, "warned": 9, "passed": 4} self.assess_lint_status(bad_lint_obj, **expectations) @pytest.mark.xfail(raises=AssertionError) From 98720368784f840df895216af9dabecf39b354ec Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 10 Aug 2018 13:42:48 +0200 Subject: [PATCH 52/75] Update template files to use reverted conda ENV hack --- .../{{cookiecutter.pipeline_slug}}/Dockerfile | 3 ++- .../{{cookiecutter.pipeline_slug}}/Singularity | 6 +++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Dockerfile b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Dockerfile index 05e941b29d..0d589213e3 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Dockerfile +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Dockerfile @@ -2,4 +2,5 @@ FROM nfcore/base LABEL description="Docker image containing all requirements for {{ cookiecutter.pipeline_name }} pipeline" COPY environment.yml / -RUN conda env update -n root -f /environment.yml && conda clean -a +RUN conda env create -f /environment.yml && conda clean -a +ENV PATH /opt/conda/envs/nfcore-{{ cookiecutter.pipeline_slug }}-{{ cookiecutter.version }}/bin:$PATH diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Singularity b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Singularity index 15ab3945ea..116ed37d0a 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Singularity +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Singularity @@ -5,9 +5,13 @@ Bootstrap:docker DESCRIPTION Singularity image containing all requirements for {{ cookiecutter.pipeline_name }} pipeline VERSION {{ cookiecutter.version }} +%environment + PATH=/opt/conda/envs/nfcore-{{ cookiecutter.pipeline_slug }}-{{ cookiecutter.version }}/bin:$PATH + export PATH + %files environment.yml / %post - /opt/conda/bin/conda env update -n root -f /environment.yml + /opt/conda/bin/conda env create -f /environment.yml /opt/conda/bin/conda clean -a From 1d4e5b5fea44d7d21ca9cca34c3c5af4d9d05340 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 10 Aug 2018 14:36:21 +0200 Subject: [PATCH 53/75] Use .startswith() as it's nicer --- nf_core/lint.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/lint.py b/nf_core/lint.py index ca3dc7be55..97b78c4825 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -325,7 +325,7 @@ def check_nextflow_config(self): # Check that the pipeline name starts with nf-core try: - assert self.config['manifest.name'].strip('\'"')[0:8] == 'nf-core/' + assert self.config['manifest.name'].strip('\'"').startswith('nf-core/') except (AssertionError, IndexError): self.failed.append((4, "Config variable 'manifest.name' did not begin with nf-core/:\n {}".format(self.config['manifest.name'].strip('\'"')))) else: @@ -334,7 +334,7 @@ def check_nextflow_config(self): # Check that the homePage is set to the GitHub URL try: - assert self.config['manifest.homePage'].strip('\'"')[0:27] == 'https://github.com/nf-core/' + assert self.config['manifest.homePage'].startswith('https://github.com/nf-core/') except (AssertionError, IndexError): self.failed.append((4, "Config variable 'manifest.homePage' did not begin with https://github.com/nf-core/:\n {}".format(self.config['manifest.homePage'].strip('\'"')))) else: From 1a49f42b7aacbe2b1114f8c1865c3769483c42b7 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 10 Aug 2018 14:49:25 +0200 Subject: [PATCH 54/75] Added back the quote mark stripping --- nf_core/lint.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/lint.py b/nf_core/lint.py index 97b78c4825..c4588450d9 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -334,7 +334,7 @@ def check_nextflow_config(self): # Check that the homePage is set to the GitHub URL try: - assert self.config['manifest.homePage'].startswith('https://github.com/nf-core/') + assert self.config['manifest.homePage'].strip('\'"').startswith('https://github.com/nf-core/') except (AssertionError, IndexError): self.failed.append((4, "Config variable 'manifest.homePage' did not begin with https://github.com/nf-core/:\n {}".format(self.config['manifest.homePage'].strip('\'"')))) else: From cebe343274adccc633ae5de700baa9788f9a44ed Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sun, 12 Aug 2018 10:08:49 +0200 Subject: [PATCH 55/75] Phil's attempt at failing master merges --- .travis.yml | 3 +++ .../{{cookiecutter.pipeline_slug}}/.travis.yml | 2 ++ 2 files changed, 5 insertions(+) diff --git a/.travis.yml b/.travis.yml index 0a8b1b5df8..d4e9fc0400 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,6 +6,9 @@ python: - '3.4' - '3.5' - '3.6' +before_install: + # PRs made to 'master' branch should always orginate from another repo or the 'dev' branch + - [ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ]) install: # Install Nextflow - mkdir /tmp/nextflow diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml index 1155c4091e..0391c99627 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml @@ -10,6 +10,8 @@ matrix: fast_finish: true before_install: + # PRs made to 'master' branch should always orginate from another repo or the 'dev' branch + - [ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ]) # Pull the docker image first so the test doesn't wait for this - docker pull nfcore/{{ cookiecutter.pipeline_slug }} # Fake the tag locally so that the pipeline runs properly From 6efb2d32634ce057ed91c9094e13394d98d80728 Mon Sep 17 00:00:00 2001 From: Sofia Haglund Date: Mon, 13 Aug 2018 17:23:35 +0200 Subject: [PATCH 56/75] Updated the documentation for cookiecutter (#123) * tried to improve the doc for cookiecutter by using the doc for methylseq as a template * fixed some typos * tried to fix some links not working * further link fixing * further fixing * minor fixes * fixing merging issues * tried to fix vis error * tried to fix error * deleted remains from merging conflict ONCE MORE * minor fixes * Update README.md Removed new whitespace * Update README.md * Update adding_your_own.md * Update local.md --- .../docs/configuration/adding_your_own.md | 21 +--- .../docs/configuration/local.md | 10 +- .../docs/configuration/reference_genomes.md | 4 +- .../docs/installation.md | 107 +++++++++++++++--- .../docs/output.md | 9 +- .../docs/troubleshooting.md | 2 +- .../docs/usage.md | 46 +++++++- 7 files changed, 156 insertions(+), 43 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md index 4f831d48f8..c8ca8d81bd 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md @@ -43,10 +43,10 @@ First, install docker on your system: [Docker Installation Instructions](https:/ Then, simply run the analysis pipeline: ```bash -nextflow run nf-core/{{ cookiecutter.pipeline_name }} -profile docker --reads '' +nextflow run nf-core/{{ cookiecutter.pipeline_slug }} -profile docker --reads '' ``` -Nextflow will recognise `nf-core/{{ cookiecutter.pipeline_name }}` and download the pipeline from GitHub. The `-profile docker` configuration lists the [{{ cookiecutter.pipeline_slug }}](https://hub.docker.com/r/{{ cookiecutter.pipeline_slug }}/) image that we have created and is hosted at dockerhub, and this is downloaded. +Nextflow will recognise `nf-core/{{ cookiecutter.pipeline_slug }}` and download the pipeline from GitHub. The `-profile docker` configuration lists the [{{ cookiecutter.pipeline_slug }}](https://hub.docker.com/r/nfcore/{{ cookiecutter.pipeline_slug }}/) image that we have created and is hosted at dockerhub, and this is downloaded. The public docker images are tagged with the same version numbers as the code, which you can use to ensure reproducibility. When running the pipeline, specify the pipeline version with `-r`, for example `-r v1.3`. This uses pipeline code and docker image from this tagged version. @@ -56,19 +56,13 @@ To add docker support to your own config file (instead of using the `docker` pro docker { enabled = true } -process { - container = wf_container -} ``` -The variable `wf_container` is defined dynamically and automatically specifies the image tag if Nextflow is running with `-r`. - -A test suite for docker comes with the pipeline, and can be run by moving to the [`tests` directory](https://github.com/nf-core/{{ cookiecutter.pipeline_name }}/tree/master/tests) and running `./run_test.sh`. This will download a small yeast genome and some data, and attempt to run the pipeline through docker on that small dataset. This is automatically run using [Travis](https://travis-ci.org/nf-core/{{ cookiecutter.pipeline_name }}/) whenever changes are made to the pipeline. ### Singularity image Many HPC environments are not able to run Docker due to security issues. [Singularity](http://singularity.lbl.gov/) is a tool designed to run on such HPC systems which is very similar to Docker. Even better, it can use create images directly from dockerhub. -To use the singularity image for a single run, use `-with-singularity 'docker://nf-core/{{ cookiecutter.pipeline_name }}'`. This will download the docker container from dockerhub and create a singularity image for you dynamically. +To use the singularity image for a single run, use `-profile standard,singularity`. This will download the singularity container from singularity hub dynamically. To specify singularity usage in your pipeline config file, add the following: @@ -76,25 +70,20 @@ To specify singularity usage in your pipeline config file, add the following: singularity { enabled = true } -process { - container = "docker://$wf_container" -} ``` -The variable `wf_container` is defined dynamically and automatically specifies the image tag if Nextflow is running with `-r`. - If you intend to run the pipeline offline, nextflow will not be able to automatically download the singularity image for you. Instead, you'll have to do this yourself manually first, transfer the image file and then point to that. First, pull the image file where you have an internet connection: ```bash -singularity pull --name {{ cookiecutter.pipeline_slug }}.img docker://nf-core/{{ cookiecutter.pipeline_name }} +singularity pull --name nf-core-{{ cookiecutter.pipeline_slug }}.simg shub://nf-core/{{ cookiecutter.pipeline_slug }} ``` Then transfer this file and run the pipeline with this path: ```bash -nextflow run /path/to/{{ cookiecutter.pipeline_slug }} -with-singularity /path/to/{{ cookiecutter.pipeline_slug }}.img +nextflow run /path/to/{{ cookiecutter.pipeline_slug }} -with-singularity /path/to/nf-core-{{ cookiecutter.pipeline_slug }}.simg ``` diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md index 7ea5364c96..c52ce66a24 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md @@ -11,10 +11,10 @@ First, install docker on your system: [Docker Installation Instructions](https:/ Then, simply run the analysis pipeline: ```bash -nextflow run nf-core/{{ cookiecutter.pipeline_name }} -profile docker --reads '' +nextflow run nf-core/{{ cookiecutter.pipeline_slug }} -profile docker --reads '' ``` -Nextflow will recognise `nf-core/{{ cookiecutter.pipeline_name }}` and download the pipeline from GitHub. The `-profile docker` configuration lists the [{{ cookiecutter.pipeline_slug }}](https://hub.docker.com/r/{{ cookiecutter.pipeline_slug }}/) image that we have created and is hosted at dockerhub, and this is downloaded. +Nextflow will recognise `nf-core/{{ cookiecutter.pipeline_slug }}` and download the pipeline from GitHub. The `-profile docker` configuration lists the [nfcore/{{ cookiecutter.pipeline_slug }}](https://hub.docker.com/r/nfcore/{{ cookiecutter.dockerhub_slug }}/) image that we have created and is hosted at dockerhub, and this is downloaded. For more information about how to work with reference genomes, see [`docs/configuration/reference_genomes.md`](docs/configuration/reference_genomes.md). @@ -25,18 +25,18 @@ The public docker images are tagged with the same version numbers as the code, w ## Singularity image Many HPC environments are not able to run Docker due to security issues. [Singularity](http://singularity.lbl.gov/) is a tool designed to run on such HPC systems which is very similar to Docker. Even better, it can use create images directly from dockerhub. -To use the singularity image for a single run, use `-with-singularity 'docker://{{ cookiecutter.pipeline_slug }}'`. This will download the docker container from dockerhub and create a singularity image for you dynamically. +To use the singularity image for a single run, use `-profile singularity`. This will download the docker container from singularity hub dynamically. If you intend to run the pipeline offline, nextflow will not be able to automatically download the singularity image for you. Instead, you'll have to do this yourself manually first, transfer the image file and then point to that. First, pull the image file where you have an internet connection: ```bash -singularity pull --name {{ cookiecutter.pipeline_slug }}.img docker://{{ cookiecutter.pipeline_slug }} +singularity pull --name nf-core-{{ cookiecutter.pipeline_slug }}.simg shub://nf-core/{{ cookiecutter.pipeline_slug }} ``` Then transfer this file and run the pipeline with this path: ```bash -nextflow run /path/to/{{ cookiecutter.pipeline_name }} -with-singularity /path/to/{{ cookiecutter.pipeline_slug }}.img +nextflow run /path/to/nf-core-{{ cookiecutter.pipeline_slug }} -with-singularity nf-core-{{ cookiecutter.pipeline_slug }}.simg ``` diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/reference_genomes.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/reference_genomes.md index 28aa10b5ca..5874acdd51 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/reference_genomes.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/reference_genomes.md @@ -5,7 +5,7 @@ The {{ cookiecutter.pipeline_name }} pipeline needs a reference genome for align The minimal requirements are a FASTA file. ## Adding paths to a config file -Specifying long paths every time you run the pipeline is a pain. To make this easier, the pipeline comes configured to understand reference genome keywords which correspond to preconfigured paths, meaning that you can just specify `--genome ID` when running the pipeline. +Specifying long paths every time you run the pipeline is a pain. To make this easier, the pipeline comes configured to understand reference genome keywords which correspond to preconfigured paths, meaning that you can just specify `--genome ID` when running the pipeline. Note that this genome key can also be specified in a config file if you always use the same genome. @@ -43,4 +43,4 @@ This works best when you have a `profile` set up in the pipeline - see [`nextflo The hosting fees for AWS iGenomes are currently funded by a grant from Amazon. We hope that this work will be extended past the end of the grant expiry date (mid 2018), but we can't be sure at this point. -For more information about the AWS iGenomes, see https://ewels.github.io/AWS-iGenomes/ +For more information about the AWS iGenomes, see https://ewels.github.io/AWS-iGenomes/ \ No newline at end of file diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md index d11c018813..834dbabd1c 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md @@ -1,18 +1,25 @@ -# {{ cookiecutter.pipeline_name }} Installation +# nf-core/{{ cookiecutter.pipeline_slug }} installation -To start using the {{ cookiecutter.pipeline_name }} pipeline, there are three steps described below: +To start using the nf-core/{{ cookiecutter.pipeline_slug }} pipeline, follow the steps below: -1. [Install Nextflow](#install-nextflow) -2. [Install the pipeline](#install-the-pipeline) -3. Configure the pipeline - * [Local installation](configuration/local.md) - * [Adding your own system](configuration/adding_your_own.md) +1. [Install Nextflow](#1-install-nextflow) +2. [Install the pipeline](#2-install-the-pipeline) + * [Automatic](#21-automatic) + * [Offline](#22-offline) + * [Development](#23-development) +3. [Pipeline configuration](#3-pipeline-configuration) + * [Software deps: Docker and Singularity](#31-software-deps-docker-and-singularity) + * [Software deps: Bioconda](#32-software-deps-bioconda) + * [Configuration profiles](#33-configuration-profiles) +4. [Reference genomes](#4-reference-genomes) +5. [Appendices](#appendices) + * [Running on UPPMAX](#running-on-uppmax) ## 1) Install NextFlow Nextflow runs on most POSIX systems (Linux, Mac OSX etc). It can be installed by running the following commands: ```bash -# Make sure that Java v7+ is installed: +# Make sure that Java v8+ is installed: java -version # Install Nextflow @@ -24,16 +31,15 @@ mv nextflow ~/bin/ # sudo mv nextflow /usr/local/bin ``` -**You need NextFlow version >= 0.24 to run this pipeline.** +See [nextflow.io](https://www.nextflow.io/) for further instructions on how to install and configure Nextflow. -See [nextflow.io](https://www.nextflow.io/) and [NGI-NextflowDocs](https://github.com/SciLifeLab/NGI-NextflowDocs) for further instructions on how to install and configure Nextflow. +## 2) Install the pipeline -## 2) Install the Pipeline +#### 2.1) Automatic This pipeline itself needs no installation - NextFlow will automatically fetch it from GitHub if `nf-core/{{ cookiecutter.pipeline_name }}` is specified as the pipeline name. -### Offline use - -If you need to run the pipeline on a system with no internet connection, you will need to download the files yourself from GitHub and run them directly: +#### 2.2) Offline +The above method requires an internet connection so that Nextflow can download the pipeline files. If you're running on a system that has no internet connection, you'll need to download and transfer the pipeline files manually: ```bash wget https://github.com/nf-core/{{ cookiecutter.pipeline_name }}/archive/master.zip @@ -47,3 +53,76 @@ To stop nextflow from looking for updates online, you can tell it to run in offl ```bash export NXF_OFFLINE='TRUE' ``` + +#### 2.3) Development + +If you would like to make changes to the pipeline, it's best to make a fork on GitHub and then clone the files. Once cloned you can run the pipeline directly as above. + + +## 3) Pipeline configuration +By default, the pipeline runs with the `standard` configuration profile. This uses a number of sensible defaults for process requirements and is suitable for running on a simple (if powerful!) basic server. You can see this configuration in [`conf/base.config`](../conf/base.config). + +Be warned of two important points about this default configuration: + +1. The default profile uses the `local` executor + * All jobs are run in the login session. If you're using a simple server, this may be fine. If you're using a compute cluster, this is bad as all jobs will run on the head node. + * See the [nextflow docs](https://www.nextflow.io/docs/latest/executor.html) for information about running with other hardware backends. Most job scheduler systems are natively supported. +2. Nextflow will expect all software to be installed and available on the `PATH` + +#### 3.1) Software deps: Docker and Singularity +Running the pipeline with the option `-profile singularity` or `-with-docker` tells Nextflow to enable either [Singularity](http://singularity.lbl.gov/) or Docker for this run. An image containing all of the software requirements will be automatically fetched and used (https://hub.docker.com/r/nf-core/{{ cookiecutter.pipeline_slug }}). + +If running offline with Singularity, you'll need to download and transfer the Singularity image first: + +```bash +singularity pull --name nfcore-{{ cookiecutter.pipeline_slug }}-[VERSION].simg shub://nfcore/{{ cookiecutter.pipeline_slug }}:[VERSION] +``` + +Once transferred, use `-profile singularity` but specify the path to the image file: + +```bash +nextflow run /path/to/nf-core-{{ cookiecutter.pipeline_slug }} -profile singularity /path/to/{{ cookiecutter.pipeline_slug }}-[VERSION].simg +``` + +#### 3.2) Software deps: bioconda + +If you're unable to use either Docker or Singularity but you have conda installed, you can use the bioconda environment that comes with the pipeline. Running this command will create a new conda environment with all of the required software installed: + +```bash +conda env create -f environment.yml +conda clean -a # Recommended, not essential +source activate nfcore-{{ cookiecutter.pipeline_slug }}-1.3 # Name depends on version +``` + +The [`environment.yml`](../environment.yml) file is packaged with the pipeline. Note that you may need to download this file from the [GitHub project page](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}) if nextflow is automatically fetching the pipeline files. Ensure that the bioconda environment file version matches the pipeline version that you run. + + +#### 3.3) Configuration profiles + +Nextflow can be configured to run on a wide range of different computational infrastructures. In addition to the above pipeline-specific parameters it is likely that you will need to define system-specific options. For more information, please see the [Nextflow documentation](https://www.nextflow.io/docs/latest/). + +Whilst most parameters can be specified on the command line, it is usually sensible to create a configuration file for your environment. + +If you are the only person to be running this pipeline, you can create your config file as `~/.nextflow/config` and it will be applied every time you run Nextflow. Alternatively, save the file anywhere and reference it when running the pipeline with `-c path/to/config`. + +If you think that there are other people using the pipeline who would benefit from your configuration (eg. other common cluster setups), please let us know. We can add a new configuration and profile which can used by specifying `-profile ` when running the pipeline. + +The pipeline comes with several such config profiles - see the installation appendices and usage documentation for more information. + + +## 4) Reference Genomes +The nf-core/{{ cookiecutter.pipeline_slug }} pipeline needs a reference genome for read alignment. Support for many common genomes is built in if running on UPPMAX or AWS, by using [AWS-iGenomes](https://ewels.github.io/AWS-iGenomes/). + + +## Appendices + +#### Running on UPPMAX +To run the pipeline on the [Swedish UPPMAX](https://www.uppmax.uu.se/) clusters (`rackham`, `irma`, `bianca` etc), use the command line flag `-profile uppmax`. This tells Nextflow to submit jobs using the SLURM job executor with Singularity for software dependencies. + +Note that you will need to specify your UPPMAX project ID when running a pipeline. To do this, use the command line flag `--project `. The pipeline will exit with an error message if you try to run it pipeline with the default UPPMAX config profile without a project. + +**Optional Extra:** To avoid having to specify your project every time you run Nextflow, you can add it to your personal Nextflow config file instead. Add this line to `~/.nextflow/config`: + +```nextflow +params.project = 'project_ID' // eg. b2017123 +``` \ No newline at end of file diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/output.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/output.md index 50ca1b35f5..ec26d0191a 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/output.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/output.md @@ -1,8 +1,8 @@ -# {{ cookiecutter.pipeline_name }} -{{ cookiecutter.pipeline_short_description }} +# nf-core/{{ cookiecutter.pipeline_slug }} Output This document describes the output produced by the pipeline. Most of the plots are taken from the MultiQC report, which summarises results at the end of the pipeline. + ## Pipeline overview The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes data using the following steps: @@ -15,6 +15,8 @@ and processes data using the following steps: For further reading and documentation see the [FastQC help](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/). +> **NB:** The FastQC plots displayed in the MultiQC report shows _untrimmed_ reads. They may contain adapter sequence and potentially regions with low quality. To see how your reads look after trimming, look at the FastQC reports in the `trim_galore` directory. + **Output directory: `results/fastqc`** * `sample_fastqc.html` @@ -22,9 +24,12 @@ For further reading and documentation see the [FastQC help](http://www.bioinform * `zips/sample_fastqc.zip` * zip file containing the FastQC report, tab-delimited data file and plot images + ## MultiQC [MultiQC](http://multiqc.info) is a visualisation tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in within the report data directory. +The pipeline has special steps which allow the software versions used to be reported in the MultiQC output for future traceability. + **Output directory: `results/multiqc`** * `Project_multiqc_report.html` diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/troubleshooting.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/troubleshooting.md index 305cedece0..5fbed04eb6 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/troubleshooting.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/troubleshooting.md @@ -23,6 +23,6 @@ The pipeline can't take a list of multiple input files - it takes a glob express ## Extra resources and getting help If you still have an issue with running the pipeline then feel free to contact us. -Have look at the [pipeline website](https://github.com/nf-core/{{ cookiecutter.pipeline_name }}) to find out how. +Have a look at the [pipeline website](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}) to find out how. If you have problems that are related to Nextflow and not our pipeline then check out the [Nextflow gitter channel](https://gitter.im/nextflow-io/nextflow) or the [google group](https://groups.google.com/forum/#!forum/nextflow). diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md index cd96293181..8072c4d257 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md @@ -1,4 +1,42 @@ -# {{ cookiecutter.pipeline_name }} Usage +# {{ cookiecutter.pipeline_slug }} Usage + +## Table of contents + +* [Introduction](#general-nextflow-info) +* [Running the pipeline](#running-the-pipeline) +* [Updating the pipeline](#updating-the-pipeline) +* [Reproducibility](#reproducibility) +* [Main arguments](#main-arguments) + * [`-profile`](#-profile-single-dash) + * [`docker`](#docker) + * [`awsbatch`](#awsbatch) + * [`standard`](#standard) + * [`none`](#none) + * [`--reads`](#--reads) + * [`--singleEnd`](#--singleend) +* [Reference Genomes](#reference-genomes) + * [`--genome`](#--genome) + * [`--fasta`](#--fasta) +* [Job Resources](#job-resources) +* [Automatic resubmission](#automatic-resubmission) +* [Custom resource requests](#custom-resource-requests) +* [AWS batch specific parameters](#aws-batch-specific-parameters) + * [`-awsbatch`](#-awsbatch) + * [`--awsqueue`](#--awsqueue) + * [`--awsregion`](#--awsregion) +* [Other command line parameters](#other-command-line-parameters) + * [`--outdir`](#--outdir) + * [`--email`](#--email) + * [`-name`](#-name-single-dash) + * [`-resume`](#-resume-single-dash) + * [`-c`](#-c-single-dash) + * [`--max_memory`](#--max_memory) + * [`--max_time`](#--max_time) + * [`--max_cpus`](#--max_cpus) + * [`--plaintext_emails`](#--plaintext_emails) + * [`--sampleLevel`](#--sampleLevel) + * [`--multiqc_config`](#--multiqc_config) + ## General Nextflow info Nextflow handles job submissions on SLURM or other environments, and supervises running the jobs. Thus the Nextflow process must run until the pipeline is finished. We recommend that you put the process running in the background through `screen` / `tmux` or similar tool. Alternatively you can run nextflow within a cluster job submitted your job scheduler. @@ -49,6 +87,7 @@ Use this parameter to choose a configuration profile. Each profile is designed f * `docker` * A generic configuration profile to be used with [Docker](http://docker.com/) * Runs using the `local` executor and pulls software from dockerhub: [`{{ cookiecutter.pipeline_slug }}`](http://hub.docker.com/r/{{ cookiecutter.pipeline_slug }}/) + * Runs using the `local` executor and pulls software from dockerhub: [`{{ cookiecutter.dockerhub_slug }}`] * `awsbatch` * A generic configuration profile to be used with AWS Batch. * `standard` @@ -141,6 +180,7 @@ The AWS region to run your job in. Default is set to `eu-west-1` but can be adju Please make sure to also set the `-w/--work-dir` and `--outdir` parameters to a S3 storage bucket of your choice - you'll get an error message notifying you if you didn't. ## Other command line parameters + ### `--outdir` The output directory where the results will be saved. @@ -190,5 +230,5 @@ Set to receive plain-text e-mails instead of HTML formatted. ### `--sampleLevel` Used to turn of the edgeR MDS and heatmap. Set automatically when running on fewer than 3 samples. -### `--multiqc_config` -If you would like to supply a custom config file to MultiQC, you can specify a path with `--multiqc_config`. This is used instead of the config file specific to the pipeline. +### `--multiqc_config` + From 6baab6b6360223eae383c24cff5e26cc3d4d13fa Mon Sep 17 00:00:00 2001 From: sven1103 Date: Mon, 13 Aug 2018 18:35:37 +0200 Subject: [PATCH 57/75] Refactor sync tools --- bin/{broadcast_prs.py => sync} | 88 ++++------------------------------ bin/syncutils/__init.py__ | 0 bin/syncutils/template.py | 86 +++++++++++++++++++++++++++++++++ bin/{ => syncutils}/utils.py | 0 4 files changed, 96 insertions(+), 78 deletions(-) rename bin/{broadcast_prs.py => sync} (52%) create mode 100644 bin/syncutils/__init.py__ create mode 100644 bin/syncutils/template.py rename bin/{ => syncutils}/utils.py (100%) diff --git a/bin/broadcast_prs.py b/bin/sync similarity index 52% rename from bin/broadcast_prs.py rename to bin/sync index dad63592ba..0cec5e5b36 100755 --- a/bin/broadcast_prs.py +++ b/bin/sync @@ -10,10 +10,12 @@ import sys import subprocess import tempfile -import utils +import syncutils.template +# Set the default nf-core pipeline template branch +DEF_TEMPLATE_BRANCH = "TEMPLATE" # The GitHub base url or the nf-core project -GH_BASE_URL = "https://{token}@github.com/nf-core" +GH_BASE_URL = "https://{token}@github.com/nf-core/{pipeline}" # The current cookiecutter template url for nf-core pipelines NF_CORE_TEMPLATE = os.path.join( os.path.dirname( @@ -25,80 +27,6 @@ GITHUB_PR_URL_TEMPL = "https://api.github.com/repos/nf-core/{pipeline}/pulls" -class UpdateTemplate: - """Updates the template content of an nf-core pipeline in - its `TEMPLATE` branch. - - Args: - pipeline: The pipeline name - - branch: The template branch name, default=`TEMPLATE` - - token: GitHub auth token - """ - - def __init__(self, pipeline, branch='TEMPLATE', token=""): - """Basic constructor - """ - self.pipeline = pipeline - self.repo_url = "{base_url}/{pipeline}".format( - base_url=GH_BASE_URL.format(token=token), - pipeline=pipeline) - self.branch = branch - self.tmpdir = tempfile.mkdtemp() - self.templatedir = tempfile.mkdtemp() - self.repo = None - - def run(self): - """Execute the template update. - """ - config = self._clone_repo() - assert self.repo - context = utils.create_context(config) - self._apply_changes(context) - self._commit_changes() - self._push_changes() - - def _clone_repo(self): - """Clone the repo and switch to the configured branch. - """ - self.repo = git.Repo.clone_from(self.repo_url, self.tmpdir) - config = utils.fetch_wf_config(wf_path=self.tmpdir) - self.repo.git.checkout("origin/{branch}".format(branch=self.branch), - b="{branch}".format(branch=self.branch)) - return config - - def _apply_changes(self, context): - """Apply the changes of the cookiecutter template - to the pipelines template branch. - """ - cookiecutter(NF_CORE_TEMPLATE, - no_input=True, - extra_context=context, - overwrite_if_exists=True, - output_dir=self.templatedir) - # Clear the template branch content - for f in os.listdir(self.tmpdir): - if f == ".git": continue - try: - shutil.rmtree(os.path.join(self.tmpdir, f)) - except: - os.remove(os.path.join(self.tmpdir, f)) - # Move the new template content into the template branch - template_path = os.path.join(self.templatedir, self.pipeline) - for f in os.listdir(template_path): - shutil.move( - os.path.join(template_path, f), # src - os.path.join(self.tmpdir, f), # dest - ) - - def _commit_changes(self): - """Commits the changes of the new template to the current branch. - """ - self.repo.git.add(A=True) - self.repo.index.commit("Update nf-core pipeline template.") - - def _push_changes(self): - self.repo.git.push() - - def create_pullrequest(pipeline, origin="dev", template="TEMPLATE", token="", user="nf-core"): """Create a pull request to a base branch (default: dev), from a head branch (default: TEMPLATE) @@ -132,12 +60,16 @@ def main(): # Update the template branch of each pipeline repo for pipeline in pipelines: print("Update template branch for pipeline '{pipeline}'... ".format(pipeline=pipeline['name'])) - UpdateTemplate(pipeline['name'], token=os.environ['NF_CORE_BOT']).run() + syncutils.template.NfcoreTemplate( + pipeline['name'], + branch=DEF_TEMPLATE_BRANCH, + repo_url=GH_BASE_URL.format(token=os.environ["NF_CORE_BOT"], pipeline=pipeline['name']) + ).sync(template_url=NF_CORE_TEMPLATE) # Create a pull request from each template branch to the origin branch for pipeline in pipelines: print("Trying to open pull request for pipeline {}...".format(pipeline['name'])) - response = create_pullrequest(pipeline['name'], token=os.environ["NF_CORE_BOT"]) + response = create_pullrequest(pipeline['name'], os.environ["NF_CORE_BOT"]) if response.status_code != 201: print("Pull-request for pipeline \'{pipeline}\' failed," " got return code {return_code}." diff --git a/bin/syncutils/__init.py__ b/bin/syncutils/__init.py__ new file mode 100644 index 0000000000..e69de29bb2 diff --git a/bin/syncutils/template.py b/bin/syncutils/template.py new file mode 100644 index 0000000000..1de7253d5d --- /dev/null +++ b/bin/syncutils/template.py @@ -0,0 +1,86 @@ +import tempfile +import utils +import git +import os +import shutil +from cookiecutter.main import cookiecutter + +class NfcoreTemplate: + """Updates the template content of an nf-core pipeline in + its `TEMPLATE` branch. + + Args: - pipeline: The pipeline name + - branch: The template branch name, default=`TEMPLATE` + - token: GitHub auth token + """ + def __init__(self, pipeline, branch='master', repo_url=""): + """Basic constructor + """ + self.pipeline = pipeline + self.repo_url = repo_url + self.branch = branch + self.tmpdir = tempfile.mkdtemp() + self.templatedir = tempfile.mkdtemp() + self.repo = git.Repo.clone_from(self.repo_url, self.tmpdir) + assert self.repo + + def sync(self, template_url): + """Execute the template update. + """ + context = self.context_from_nextflow(nf_project_dir=self.tmpdir) + self.update_child_template(template_url, self.templatedir, self.tmpdir, context=context) + self.commit_changes() + self.push_changes() + + def context_from_nextflow(self, nf_project_dir): + """Fetch a Nextflow pipeline's config settings. + + Returns: A cookiecutter-readable context (Python dictionary) + """ + # Check if we are on "master" (main pipeline code) + if self.repo.active_branch is not "master": + self.repo.git.checkout("origin/master", b="master") + + # Fetch the config variables from the Nextflow pipeline + config = utils.fetch_wf_config(wf_path=nf_project_dir) + + # Checkout again to configured template branch + self.repo.git.checkout("origin/{branch}".format(branch=self.branch), + b="{branch}".format(branch=self.branch)) + + return utils.create_context(config) + + + def update_child_template(self, template_url, templatedir, target_dir, context=None): + """Apply the changes of the cookiecutter template + to the pipelines template branch. + """ + cookiecutter(template_url, + no_input=True, + extra_context=context, + overwrite_if_exists=True, + output_dir=templatedir) + # Clear the pipeline's template branch content + for f in os.listdir(self.tmpdir): + if f == ".git": continue + try: + shutil.rmtree(os.path.join(target_dir, f)) + except: + os.remove(os.path.join(target_dir, f)) + # Move the new template content into the template branch + template_path = os.path.join(self.templatedir, self.pipeline) + for f in os.listdir(template_path): + shutil.move( + os.path.join(template_path, f), # src + os.path.join(self.tmpdir, f), # dest + ) + + def commit_changes(self): + """Commits the changes of the new template to the current branch. + """ + self.repo.git.add(A=True) + self.repo.index.commit("Update nf-core pipeline template.") + + def push_changes(self): + self.repo.git.push() + \ No newline at end of file diff --git a/bin/utils.py b/bin/syncutils/utils.py similarity index 100% rename from bin/utils.py rename to bin/syncutils/utils.py From da27216b3e10d48706e1a65342c586848a04dcfc Mon Sep 17 00:00:00 2001 From: sven1103 Date: Mon, 13 Aug 2018 18:46:08 +0200 Subject: [PATCH 58/75] Introduce better expection handling --- bin/sync | 37 ++++++++++++++++++++++++++++--------- 1 file changed, 28 insertions(+), 9 deletions(-) diff --git a/bin/sync b/bin/sync index 0cec5e5b36..f07d7416a0 100755 --- a/bin/sync +++ b/bin/sync @@ -48,6 +48,11 @@ def main(): # Check that the commit event is a GitHub tag event assert os.environ['TRAVIS_TAG'] assert os.environ['NF_CORE_BOT'] + + # Catch exceptions in lists, and list them at the end + sync_errors = [] + pr_errors = [] + # Get nf-core pipelines info res = requests.get(NF_CORE_PIPELINE_INFO) pipelines = json.loads(res.content).get('remote_workflows') @@ -60,24 +65,38 @@ def main(): # Update the template branch of each pipeline repo for pipeline in pipelines: print("Update template branch for pipeline '{pipeline}'... ".format(pipeline=pipeline['name'])) - syncutils.template.NfcoreTemplate( - pipeline['name'], - branch=DEF_TEMPLATE_BRANCH, - repo_url=GH_BASE_URL.format(token=os.environ["NF_CORE_BOT"], pipeline=pipeline['name']) - ).sync(template_url=NF_CORE_TEMPLATE) + try: + syncutils.template.NfcoreTemplate( + pipeline['name'], + branch=DEF_TEMPLATE_BRANCH, + repo_url=GH_BASE_URL.format(token=os.environ["NF_CORE_BOT"], pipeline=pipeline['name']) + ).sync(template_url=NF_CORE_TEMPLATE) + except Exception as e: + sync_errors.append((pipeline['name'], e)) # Create a pull request from each template branch to the origin branch for pipeline in pipelines: print("Trying to open pull request for pipeline {}...".format(pipeline['name'])) response = create_pullrequest(pipeline['name'], os.environ["NF_CORE_BOT"]) if response.status_code != 201: - print("Pull-request for pipeline \'{pipeline}\' failed," - " got return code {return_code}." - .format(pipeline=pipeline["name"], return_code=response.status_code)) - print(response.content) + pr_errors.append((pipeline['name'], response.status_code, response.content)) else: print("Created pull-request for pipeline \'{pipeline}\' successfully." .format(pipeline=pipeline["name"])) + + for pipeline, exception in sync_errors: + print("Sync for pipeline {name} failed.".format(pipeline)) + print(exception) + + for pipeline, return_code, content in pr_errors: + print("Pull-request for pipeline \'{pipeline}\' failed," + " got return code {return_code}." + .format(pipeline=pipeline["name"], return_code=return_code)) + print(content) + + if pr_errors or sync_errors: sys.exit(1) + + sys.exit(0) if __name__ == "__main__": main() From 997e718a5e04daf2f1154dc28592776184a67eab Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 14 Aug 2018 07:14:18 +0200 Subject: [PATCH 59/75] Fixed config bug in singularity profile --- .../{{cookiecutter.pipeline_slug}}/nextflow.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config index 0f6c4001f3..998ac2c533 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config @@ -35,7 +35,7 @@ profiles { process.container = params.container } singularity { - enabled = true + singularity.enabled = true process.container = {"shub://${params.container.replace('nfcore', 'nf-core')}"} } awsbatch { From 940dc08f4832049c52c69f4b31f57fc20864d7ed Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Tue, 14 Aug 2018 07:49:41 +0200 Subject: [PATCH 60/75] Docs redundancy (#124) * Minor adjustments for Singularity setup / URI for image * Singularity hub uses nf-core instead of nfcore * Add reference to our test data repository * Add comment to use test-datasets readme to find out how to add own data * Singularity Hub pulling without docker prefix * Just use the default, conda-forge and bioconda channels in conda * Add singularity shub URLs * Update adding_your_own.md --- .../docs/configuration/adding_your_own.md | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md index c8ca8d81bd..3b66dc4267 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md @@ -58,18 +58,18 @@ docker { } ``` +A test profile comes with the pipeline and is used by the Travis continuous integration (CI) service to test the pipeline for potential errors. Typically, this downloads a small test dataset from [test data repository](https://github.com/nf-core/test-datasets/) and runs the pipeline automatically using docker using [Travis](https://travis-ci.org/nf-core/{{ cookiecutter.pipeline_name }}/) whenever changes are made to the pipeline. Further information on how to add your own test data for a new pipeline can be found at the link mentioned above. ### Singularity image Many HPC environments are not able to run Docker due to security issues. [Singularity](http://singularity.lbl.gov/) is a tool designed to run on such HPC systems which is very similar to Docker. Even better, it can use create images directly from dockerhub. - To use the singularity image for a single run, use `-profile standard,singularity`. This will download the singularity container from singularity hub dynamically. + To specify singularity usage in your pipeline config file, add the following: ```nextflow -singularity { - enabled = true -} +singularity.enabled = true +process.container = {"shub://${params.container.replace('nfcore', 'nf-core')}"} ``` If you intend to run the pipeline offline, nextflow will not be able to automatically download the singularity image for you. Instead, you'll have to do this yourself manually first, transfer the image file and then point to that. @@ -99,12 +99,9 @@ bash Miniconda3-latest-Linux-x86_64.sh #### 2) Add the bioconda conda channel (and others) ```bash -conda config --add channels anaconda +conda config --add channels default conda config --add channels conda-forge -conda config --add channels defaults -conda config --add channels r conda config --add channels bioconda -conda config --add channels salilab ``` #### 3) Create a conda environment, with all necessary packages: From 9886201371396aa1ac17c7e46cced5352bf8374e Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 14 Aug 2018 07:55:49 +0200 Subject: [PATCH 61/75] Quote the new .travis.yaml tests The unquoted travis tests were breaking the YAML, meaning that Travis wasn't running. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index d4e9fc0400..2185bb3795 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,7 +8,7 @@ python: - '3.6' before_install: # PRs made to 'master' branch should always orginate from another repo or the 'dev' branch - - [ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ]) + - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ])' install: # Install Nextflow - mkdir /tmp/nextflow From e36ba2614398d3a759402c7184bb53743f21d721 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 14 Aug 2018 07:57:52 +0200 Subject: [PATCH 62/75] Update repo template travis config --- .../{{cookiecutter.pipeline_slug}}/.travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml index 0391c99627..ed1264d80b 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml @@ -11,7 +11,7 @@ matrix: before_install: # PRs made to 'master' branch should always orginate from another repo or the 'dev' branch - - [ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ]) + - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ])' # Pull the docker image first so the test doesn't wait for this - docker pull nfcore/{{ cookiecutter.pipeline_slug }} # Fake the tag locally so that the pipeline runs properly From 4aabcb066057d9f0b80d0640ad24712812040e31 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 14 Aug 2018 08:02:41 +0200 Subject: [PATCH 63/75] Fix docs templating errors --- .../{{cookiecutter.pipeline_slug}}/docs/configuration/local.md | 2 +- .../{{cookiecutter.pipeline_slug}}/docs/usage.md | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md index c52ce66a24..441adbb9ae 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md @@ -14,7 +14,7 @@ Then, simply run the analysis pipeline: nextflow run nf-core/{{ cookiecutter.pipeline_slug }} -profile docker --reads '' ``` -Nextflow will recognise `nf-core/{{ cookiecutter.pipeline_slug }}` and download the pipeline from GitHub. The `-profile docker` configuration lists the [nfcore/{{ cookiecutter.pipeline_slug }}](https://hub.docker.com/r/nfcore/{{ cookiecutter.dockerhub_slug }}/) image that we have created and is hosted at dockerhub, and this is downloaded. +Nextflow will recognise `nf-core/{{ cookiecutter.pipeline_slug }}` and download the pipeline from GitHub. The `-profile docker` configuration lists the [nfcore/{{ cookiecutter.pipeline_slug }}](https://hub.docker.com/r/nfcore/{{ cookiecutter.pipeline_slug }}/) image that we have created and is hosted at dockerhub, and this is downloaded. For more information about how to work with reference genomes, see [`docs/configuration/reference_genomes.md`](docs/configuration/reference_genomes.md). diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md index 8072c4d257..68314bf48a 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md +++ b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md @@ -87,7 +87,6 @@ Use this parameter to choose a configuration profile. Each profile is designed f * `docker` * A generic configuration profile to be used with [Docker](http://docker.com/) * Runs using the `local` executor and pulls software from dockerhub: [`{{ cookiecutter.pipeline_slug }}`](http://hub.docker.com/r/{{ cookiecutter.pipeline_slug }}/) - * Runs using the `local` executor and pulls software from dockerhub: [`{{ cookiecutter.dockerhub_slug }}`] * `awsbatch` * A generic configuration profile to be used with AWS Batch. * `standard` @@ -231,4 +230,3 @@ Set to receive plain-text e-mails instead of HTML formatted. Used to turn of the edgeR MDS and heatmap. Set automatically when running on fewer than 3 samples. ### `--multiqc_config` - From 1d757947147d98a6097a3cda2428bf67b1c92955 Mon Sep 17 00:00:00 2001 From: sven1103 Date: Tue, 14 Aug 2018 12:39:12 +0200 Subject: [PATCH 64/75] Use cookiecutter function from nf_core --- bin/syncutils/template.py | 20 +++++++++++++++----- bin/syncutils/utils.py | 2 +- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/bin/syncutils/template.py b/bin/syncutils/template.py index 1de7253d5d..dc2b4b12cc 100644 --- a/bin/syncutils/template.py +++ b/bin/syncutils/template.py @@ -3,8 +3,17 @@ import git import os import shutil +import sys from cookiecutter.main import cookiecutter +# Enable access to the nf_core package +rootPath = os.path.abspath("../..") +if rootPath not in sys.path: + sys.path.insert(0, rootPath) +# Import the create script file +import nf_core.create + + class NfcoreTemplate: """Updates the template content of an nf-core pipeline in its `TEMPLATE` branch. @@ -55,11 +64,12 @@ def update_child_template(self, template_url, templatedir, target_dir, context=N """Apply the changes of the cookiecutter template to the pipelines template branch. """ - cookiecutter(template_url, - no_input=True, - extra_context=context, - overwrite_if_exists=True, - output_dir=templatedir) + nf_core.create.run_cookiecutter( + name=context.get('pipeline_name'), + description=context.get('pipeline_short_description'), + new_version=context.get('version') + ) + # Clear the pipeline's template branch content for f in os.listdir(self.tmpdir): if f == ".git": continue diff --git a/bin/syncutils/utils.py b/bin/syncutils/utils.py index 0a536b6c78..0eed13dc1b 100644 --- a/bin/syncutils/utils.py +++ b/bin/syncutils/utils.py @@ -27,7 +27,7 @@ def create_context(config): { 'pipeline_name': '' 'pipeline_short_description': '' - 'new_version': '' + 'version': '' } """ context = {} From e40c0b51bc199148db7f4bd5d7c73c7671659f48 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 14 Aug 2018 13:18:59 +0200 Subject: [PATCH 65/75] nf-core create: Refactor, add --outdir --- nf_core/create.py | 83 ++++++++++++++++++++++++++++++++--------------- scripts/nf-core | 10 ++++-- 2 files changed, 65 insertions(+), 28 deletions(-) diff --git a/nf_core/create.py b/nf_core/create.py index 4b584b1dbe..1ff65f4bee 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -10,40 +10,71 @@ import logging import os import re +import shutil +import sys +import tempfile import nf_core -def init_pipeline(name, description, new_version='1.0dev', no_git=False, force=False): - """Function to init a new pipeline. Called by the main cli""" +class PipelineCreate(object): + """ Object to create a new pipeline """ - # Make the new pipeline - run_cookiecutter(name, description, new_version, force) + def __init__(self, name, description, new_version='1.0dev', no_git=False, force=False, outdir=None): + """ Init the object and define variables """ + self.name = name + self.description = description + self.new_version = new_version + self.no_git = no_git + self.force = force + self.outdir = outdir + if not self.outdir: + self.outdir = os.path.join(os.getcwd(), self.name) - # Init the git repository and make the first commit - if not no_git: - git_init_pipeline(name) + def init_pipeline(self): + """Function to init a new pipeline. Called by the main cli""" -def run_cookiecutter(name, description, new_version='1.0dev', force=False): - """Run cookiecutter to create a new pipeline""" + # Make the new pipeline + self.run_cookiecutter() - logging.info("Creating new nf-core pipeline: {}".format(name)) - template = os.path.join(os.path.dirname(os.path.realpath(nf_core.__file__)), 'pipeline-template/') - try: + # Init the git repository and make the first commit + if not self.no_git: + self.git_init_pipeline() + + def run_cookiecutter(self): + """Run cookiecutter to create a new pipeline""" + + logging.info("Creating new nf-core pipeline: {}".format(self.name)) + + # Check if the output directory exists + if os.path.exists(self.outdir): + logging.error("Output directory '{}' exists!".format(self.outdir)) + logging.info("Use -f / --force to overwrite existing files") + sys.exit(1) + + # Build the template in a temporary directory + tmpdir = tempfile.mkdtemp() + template = os.path.join(os.path.dirname(os.path.realpath(nf_core.__file__)), 'pipeline-template/') cookiecutter.main.cookiecutter ( template, - extra_context={'pipeline_name':name, 'pipeline_short_description':description, 'version':new_version}, + extra_context={'pipeline_name':self.name, 'pipeline_short_description':self.description, 'version':self.new_version}, no_input=True, - overwrite_if_exists=force + overwrite_if_exists=self.force, + output_dir=tmpdir ) - except (cookiecutter.exceptions.OutputDirExistsException) as e: - logging.error(e) - logging.info("Use -f / --force to overwrite existing files") - -def git_init_pipeline(name): - """Initialise the new pipeline as a git repo and make first commit""" - logging.info("Initialising pipeline git repository") - pipeline_dir = os.path.join(os.getcwd(), name) - repo = git.Repo.init(pipeline_dir) - repo.git.add(A=True) - repo.index.commit("initial commit") - logging.info("Done. Remember to add a remote and push to GitHub!") + + # Move the template to the output directory + os.makedirs(self.outdir) + for f in os.listdir(os.path.join(tmpdir, self.name)): + shutil.move(os.path.join(tmpdir, self.name, f), self.outdir) + + # Delete the temporary directory + shutil.rmtree(tmpdir) + + + def git_init_pipeline(self): + """Initialise the new pipeline as a git repo and make first commit""" + logging.info("Initialising pipeline git repository") + repo = git.Repo.init(self.outdir) + repo.git.add(A=True) + repo.index.commit("initial template build from nf-core/tools, version {}".format(nf_core.__version__)) + logging.info("Done. Remember to add a remote and push to GitHub:\n cd {}\n git remote add origin git@github.com:USERNAME/REPO_NAME.git\n git push".format(self.outdir)) diff --git a/scripts/nf-core b/scripts/nf-core index cab2878263..7ce861a571 100755 --- a/scripts/nf-core +++ b/scripts/nf-core @@ -137,9 +137,15 @@ def release(pipeline_dir, new_version): default = False, help = "Overwrite output directory if it already exists" ) -def create(name, description, new_version, no_git, force): +@click.option( + '-o', '--outdir', + type = str, + help = "Output directory for new pipeline (default: pipeline name)" +) +def create(name, description, new_version, no_git, force, outdir): """ Create a new pipeline using the nf-core template """ - nf_core.create.init_pipeline(name, description, new_version, no_git, force) + create_obj = nf_core.create.PipelineCreate(name, description, new_version, no_git, force, outdir) + create_obj.init_pipeline() if __name__ == '__main__': From 115e9ef29f88edee1deee24ec54c075b15707890 Mon Sep 17 00:00:00 2001 From: sven1103 Date: Tue, 14 Aug 2018 14:14:26 +0200 Subject: [PATCH 66/75] Update pipeline creation code --- bin/syncutils/template.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/bin/syncutils/template.py b/bin/syncutils/template.py index dc2b4b12cc..70d5849353 100644 --- a/bin/syncutils/template.py +++ b/bin/syncutils/template.py @@ -64,10 +64,13 @@ def update_child_template(self, template_url, templatedir, target_dir, context=N """Apply the changes of the cookiecutter template to the pipelines template branch. """ - nf_core.create.run_cookiecutter( + nf_core.create.PipelineCreate( name=context.get('pipeline_name'), description=context.get('pipeline_short_description'), - new_version=context.get('version') + new_version=context.get('version'), + no_git=True, + force=True, + outdir=templatedir ) # Clear the pipeline's template branch content From ad4d6056577cb10392119fa96cd8762c0b443c34 Mon Sep 17 00:00:00 2001 From: sven1103 Date: Tue, 14 Aug 2018 14:36:18 +0200 Subject: [PATCH 67/75] Refactor sync --- bin/sync | 7 +------ bin/syncutils/template.py | 12 ++++-------- 2 files changed, 5 insertions(+), 14 deletions(-) diff --git a/bin/sync b/bin/sync index f07d7416a0..b7d527d14e 100755 --- a/bin/sync +++ b/bin/sync @@ -16,11 +16,6 @@ import syncutils.template DEF_TEMPLATE_BRANCH = "TEMPLATE" # The GitHub base url or the nf-core project GH_BASE_URL = "https://{token}@github.com/nf-core/{pipeline}" -# The current cookiecutter template url for nf-core pipelines -NF_CORE_TEMPLATE = os.path.join( - os.path.dirname( - os.path.dirname(os.path.realpath(__file__)) - ), "nf_core/pipeline-template") # The JSON file is updated on every push event on the nf-core GitHub project NF_CORE_PIPELINE_INFO = "http://nf-co.re/pipelines.json" # The API endpoint for creating pull requests @@ -70,7 +65,7 @@ def main(): pipeline['name'], branch=DEF_TEMPLATE_BRANCH, repo_url=GH_BASE_URL.format(token=os.environ["NF_CORE_BOT"], pipeline=pipeline['name']) - ).sync(template_url=NF_CORE_TEMPLATE) + ).sync() except Exception as e: sync_errors.append((pipeline['name'], e)) diff --git a/bin/syncutils/template.py b/bin/syncutils/template.py index 70d5849353..22e399df94 100644 --- a/bin/syncutils/template.py +++ b/bin/syncutils/template.py @@ -6,11 +6,6 @@ import sys from cookiecutter.main import cookiecutter -# Enable access to the nf_core package -rootPath = os.path.abspath("../..") -if rootPath not in sys.path: - sys.path.insert(0, rootPath) -# Import the create script file import nf_core.create @@ -33,11 +28,11 @@ def __init__(self, pipeline, branch='master', repo_url=""): self.repo = git.Repo.clone_from(self.repo_url, self.tmpdir) assert self.repo - def sync(self, template_url): + def sync(self): """Execute the template update. """ context = self.context_from_nextflow(nf_project_dir=self.tmpdir) - self.update_child_template(template_url, self.templatedir, self.tmpdir, context=context) + self.update_child_template(self.templatedir, self.tmpdir, context=context) self.commit_changes() self.push_changes() @@ -60,7 +55,7 @@ def context_from_nextflow(self, nf_project_dir): return utils.create_context(config) - def update_child_template(self, template_url, templatedir, target_dir, context=None): + def update_child_template(self, templatedir, target_dir, context=None): """Apply the changes of the cookiecutter template to the pipelines template branch. """ @@ -80,6 +75,7 @@ def update_child_template(self, template_url, templatedir, target_dir, context=N shutil.rmtree(os.path.join(target_dir, f)) except: os.remove(os.path.join(target_dir, f)) + # Move the new template content into the template branch template_path = os.path.join(self.templatedir, self.pipeline) for f in os.listdir(template_path): From 428b479df943a53137734154a8c60bf54a45286c Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 14 Aug 2018 15:41:57 +0200 Subject: [PATCH 68/75] Make --force work for nf-core create --- nf_core/create.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/nf_core/create.py b/nf_core/create.py index 1ff65f4bee..d85fad2f5b 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -47,9 +47,14 @@ def run_cookiecutter(self): # Check if the output directory exists if os.path.exists(self.outdir): - logging.error("Output directory '{}' exists!".format(self.outdir)) - logging.info("Use -f / --force to overwrite existing files") - sys.exit(1) + if self.force: + logging.warn("Output directory '{}' exists - continuing as --force specified".format(self.outdir)) + else: + logging.error("Output directory '{}' exists!".format(self.outdir)) + logging.info("Use -f / --force to overwrite existing files") + sys.exit(1) + else: + os.makedirs(self.outdir) # Build the template in a temporary directory tmpdir = tempfile.mkdtemp() @@ -63,7 +68,6 @@ def run_cookiecutter(self): ) # Move the template to the output directory - os.makedirs(self.outdir) for f in os.listdir(os.path.join(tmpdir, self.name)): shutil.move(os.path.join(tmpdir, self.name, f), self.outdir) From bd27ee56b7192ca3cb5b0ce5bf4ef3775bd9119c Mon Sep 17 00:00:00 2001 From: sven1103 Date: Tue, 14 Aug 2018 15:46:16 +0200 Subject: [PATCH 69/75] Let the create command move the template --- bin/syncutils/template.py | 25 +++++++++---------------- 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/bin/syncutils/template.py b/bin/syncutils/template.py index 22e399df94..fcce6cf0f0 100644 --- a/bin/syncutils/template.py +++ b/bin/syncutils/template.py @@ -59,15 +59,6 @@ def update_child_template(self, templatedir, target_dir, context=None): """Apply the changes of the cookiecutter template to the pipelines template branch. """ - nf_core.create.PipelineCreate( - name=context.get('pipeline_name'), - description=context.get('pipeline_short_description'), - new_version=context.get('version'), - no_git=True, - force=True, - outdir=templatedir - ) - # Clear the pipeline's template branch content for f in os.listdir(self.tmpdir): if f == ".git": continue @@ -76,13 +67,15 @@ def update_child_template(self, templatedir, target_dir, context=None): except: os.remove(os.path.join(target_dir, f)) - # Move the new template content into the template branch - template_path = os.path.join(self.templatedir, self.pipeline) - for f in os.listdir(template_path): - shutil.move( - os.path.join(template_path, f), # src - os.path.join(self.tmpdir, f), # dest - ) + # Create the new template structure + nf_core.create.PipelineCreate( + name=context.get('pipeline_name'), + description=context.get('pipeline_short_description'), + new_version=context.get('version'), + no_git=True, + force=True, + outdir=templatedir + ) def commit_changes(self): """Commits the changes of the new template to the current branch. From c2d6d4a81f15428a2cfdcce3d0140c3df98c8f99 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 14 Aug 2018 17:13:47 +0200 Subject: [PATCH 70/75] Refactored cookiecutter variables, overhauled docs --- nf_core/create.py | 26 ++-- nf_core/pipeline-template/cookiecutter.json | 7 +- .../.gitattributes | 0 .../.github/CONTRIBUTING.md | 25 ++++ .../.github/bug_report.md | 0 .../.github/feature_request.md | 0 .../.github/pull_request.md | 6 +- .../.gitignore | 0 .../.travis.yml | 4 +- .../CHANGELOG.md | 3 + .../CODE_OF_CONDUCT.md | 0 .../Dockerfile | 4 +- .../LICENSE | 0 .../README.md | 12 +- .../Singularity | 4 +- .../assets/email_template.html | 14 +- .../assets/email_template.txt | 14 +- .../assets/sendmail_template.txt | 0 .../bin/markdown_to_html.r | 0 .../bin/scrape_software_versions.py | 10 +- .../conf/awsbatch.config | 0 .../conf/base.config | 2 +- .../conf/igenomes.config | 0 .../conf/multiqc_config.yaml | 7 + .../conf/test.config | 0 .../docs/README.md | 11 ++ .../docs/configuration/adding_your_own.md | 86 ++++++++++++ .../docs/configuration/reference_genomes.md | 49 +++++++ .../docs/installation.md | 63 ++++----- .../docs/output.md | 2 +- .../docs/troubleshooting.md | 4 +- .../docs/usage.md | 31 +++-- .../environment.yml | 2 +- .../main.nf | 30 ++--- .../nextflow.config | 18 +-- .../.github/CONTRIBUTING.md | 25 ---- .../CHANGELOG.md | 3 - .../conf/multiqc_config.yaml | 7 - .../docs/README.md | 11 -- .../docs/configuration/adding_your_own.md | 124 ------------------ .../docs/configuration/local.md | 42 ------ .../docs/configuration/reference_genomes.md | 46 ------- 42 files changed, 309 insertions(+), 383 deletions(-) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/.gitattributes (100%) create mode 100644 nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/.github/bug_report.md (100%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/.github/feature_request.md (100%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/.github/pull_request.md (58%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/.gitignore (100%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/.travis.yml (88%) create mode 100644 nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CHANGELOG.md rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/CODE_OF_CONDUCT.md (100%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/Dockerfile (53%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/LICENSE (100%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/README.md (66%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/Singularity (67%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/assets/email_template.html (78%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/assets/email_template.txt (67%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/assets/sendmail_template.txt (100%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/bin/markdown_to_html.r (100%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/bin/scrape_software_versions.py (71%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/conf/awsbatch.config (100%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/conf/base.config (94%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/conf/igenomes.config (100%) create mode 100644 nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/multiqc_config.yaml rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/conf/test.config (100%) create mode 100644 nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/README.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/configuration/adding_your_own.md create mode 100644 nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/configuration/reference_genomes.md rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/docs/installation.md (52%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/docs/output.md (97%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/docs/troubleshooting.md (92%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/docs/usage.md (86%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/environment.yml (57%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/main.nf (91%) rename nf_core/pipeline-template/{{{cookiecutter.pipeline_slug}} => {{cookiecutter.name_noslash}}}/nextflow.config (76%) delete mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md delete mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CHANGELOG.md delete mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/multiqc_config.yaml delete mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/README.md delete mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md delete mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md delete mode 100644 nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/reference_genomes.md diff --git a/nf_core/create.py b/nf_core/create.py index d85fad2f5b..c0cbc98761 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -21,14 +21,18 @@ class PipelineCreate(object): def __init__(self, name, description, new_version='1.0dev', no_git=False, force=False, outdir=None): """ Init the object and define variables """ - self.name = name + self.name = 'nf-core/{}'.format( + name.lower().replace(r'/\s+/', '-').replace('nf-core/', '').replace('/', '-') + ) + self.name_noslash = self.name.replace('/', '-') + self.name_docker = self.name.replace('nf-core', 'nfcore') self.description = description self.new_version = new_version self.no_git = no_git self.force = force self.outdir = outdir if not self.outdir: - self.outdir = os.path.join(os.getcwd(), self.name) + self.outdir = os.path.join(os.getcwd(), self.name_noslash) def init_pipeline(self): """Function to init a new pipeline. Called by the main cli""" @@ -61,15 +65,21 @@ def run_cookiecutter(self): template = os.path.join(os.path.dirname(os.path.realpath(nf_core.__file__)), 'pipeline-template/') cookiecutter.main.cookiecutter ( template, - extra_context={'pipeline_name':self.name, 'pipeline_short_description':self.description, 'version':self.new_version}, - no_input=True, - overwrite_if_exists=self.force, - output_dir=tmpdir + extra_context = { + 'name':self.name, + 'description':self.description, + 'name_noslash':self.name_noslash, + 'name_docker':self.name_docker, + 'version':self.new_version + }, + no_input = True, + overwrite_if_exists = self.force, + output_dir = tmpdir ) # Move the template to the output directory - for f in os.listdir(os.path.join(tmpdir, self.name)): - shutil.move(os.path.join(tmpdir, self.name, f), self.outdir) + for f in os.listdir(os.path.join(tmpdir, self.name_noslash)): + shutil.move(os.path.join(tmpdir, self.name_noslash, f), self.outdir) # Delete the temporary directory shutil.rmtree(tmpdir) diff --git a/nf_core/pipeline-template/cookiecutter.json b/nf_core/pipeline-template/cookiecutter.json index 197784e282..eece1fe294 100644 --- a/nf_core/pipeline-template/cookiecutter.json +++ b/nf_core/pipeline-template/cookiecutter.json @@ -1,6 +1,7 @@ { - "pipeline_name": "example", - "pipeline_short_description": "This pipeline takes some data and does something with it.", - "pipeline_slug": "{{ cookiecutter.pipeline_name.lower().replace(' ', '-').replace('/', '-') }}", + "name": "example", + "description": "This pipeline takes some data and does something with it.", + "name_noslash": "{{ cookiecutter.name.replace('/', '-') }}", + "name_docker": "{{ cookiecutter.name_docker }}", "version": "1.0dev" } diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.gitattributes b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.gitattributes similarity index 100% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.gitattributes rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.gitattributes diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md new file mode 100644 index 0000000000..b74ffb41b3 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md @@ -0,0 +1,25 @@ +# {{ cookiecutter.name }} Contributing Guidelines + +Hi there! Many thanks for taking an interest in improving {{ cookiecutter.name }}. + +We try to manage the required tasks for {{ cookiecutter.name }} using GitHub issues, you probably came to this page when creating one. Please use the prefilled template to save time. + +However, don't be put off by this template - other more general issues and suggestions are welcome! Contributions to the code are even more welcome ;) + +> If you need help using {{ cookiecutter.name }} then the best place to go is the Gitter chatroom where you can ask us questions directly: https://gitter.im/nf-core/Lobby + +## Contribution workflow +If you'd like to write some code for {{ cookiecutter.name }}, the standard workflow +is as follows: + +1. Check that there isn't already an issue about your idea in the + [{{ cookiecutter.name }} issues](https://github.com/{{ cookiecutter.name }}/issues) to avoid + duplicating work. + * Feel free to add a new issue here for the same reason. +2. Fork the [{{ cookiecutter.name }} repository](https://github.com/{{ cookiecutter.name }}) to your GitHub account +3. Make the necessary changes / additions within your forked repository +4. Submit a Pull Request against the master branch and wait for the code to be reviewed and merged. + +If you're not used to this workflow with git, you can start with some [basic docs from GitHub](https://help.github.com/articles/fork-a-repo/) or even their [excellent interactive tutorial](https://try.github.io/). + +For further information/help, please consult the [{{ cookiecutter.name }} documentation](https://github.com/{{ cookiecutter.name }}#documentation) and don't hesitate to get in touch on [Gitter](https://gitter.im/nf-core/Lobby) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/bug_report.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/bug_report.md similarity index 100% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/bug_report.md rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/bug_report.md diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/feature_request.md similarity index 100% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/feature_request.md rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/feature_request.md diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/pull_request.md similarity index 58% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/pull_request.md index cbc424cc0e..8807e5f2d4 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/pull_request.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/pull_request.md @@ -1,15 +1,15 @@ -Many thanks to contributing to nf-core/{{ cookiecutter.pipeline_slug }}! +Many thanks to contributing to {{ cookiecutter.name }}! Please fill in the appropriate checklist below (delete whatever is not relevant). These are the most common things requested on pull requests (PRs). ## PR checklist - [ ] This comment contains a description of changes (with reason) - [ ] If you've fixed a bug or added code that should be tested, add tests! - - [ ] If necessary, also make a PR on the [{{ cookiecutter.pipeline_slug }} branch on the nf-core/test-datasets repo]( https://github.com/nf-core/test-datasets/pull/new{{ cookiecutter.pipeline_slug }}) + - [ ] If necessary, also make a PR on the [{{ cookiecutter.name }} branch on the nf-core/test-datasets repo]( https://github.com/nf-core/test-datasets/pull/new{{ cookiecutter.name }}) - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker`). - [ ] Make sure your code lints (`nf-core lint .`). - [ ] Documentation in `docs` is updated - [ ] `CHANGELOG.md` is updated - [ ] `README.md` is updated -**Learn more about contributing:** https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}/.github/CONTRIBUTING.md +**Learn more about contributing:** https://github.com/{{ cookiecutter.name }}/tree/master/.github/CONTRIBUTING.md diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.gitignore b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.gitignore similarity index 100% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.gitignore rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.gitignore diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.travis.yml similarity index 88% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.travis.yml index ed1264d80b..6bea557ee1 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.travis.yml +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.travis.yml @@ -13,9 +13,9 @@ before_install: # PRs made to 'master' branch should always orginate from another repo or the 'dev' branch - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ])' # Pull the docker image first so the test doesn't wait for this - - docker pull nfcore/{{ cookiecutter.pipeline_slug }} + - docker pull {{ cookiecutter.name_docker }} # Fake the tag locally so that the pipeline runs properly - - docker tag nfcore/{{ cookiecutter.pipeline_slug }} nfcore/{{ cookiecutter.pipeline_slug }}:latest + - docker tag {{ cookiecutter.name_docker }} {{ cookiecutter.name_docker }}:latest install: # Install Nextflow diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CHANGELOG.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CHANGELOG.md new file mode 100644 index 0000000000..1f720053a7 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CHANGELOG.md @@ -0,0 +1,3 @@ + +## {{ cookiecutter.version }} - +Initial release of {{ cookiecutter.name }}, created with the NGI-NFcookiecutter template: https://github.com/ewels/NGI-NFcookiecutter. diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CODE_OF_CONDUCT.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CODE_OF_CONDUCT.md similarity index 100% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CODE_OF_CONDUCT.md rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CODE_OF_CONDUCT.md diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Dockerfile b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/Dockerfile similarity index 53% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Dockerfile rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/Dockerfile index 0d589213e3..12c351bb83 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Dockerfile +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/Dockerfile @@ -1,6 +1,6 @@ FROM nfcore/base -LABEL description="Docker image containing all requirements for {{ cookiecutter.pipeline_name }} pipeline" +LABEL description="Docker image containing all requirements for {{ cookiecutter.name }} pipeline" COPY environment.yml / RUN conda env create -f /environment.yml && conda clean -a -ENV PATH /opt/conda/envs/nfcore-{{ cookiecutter.pipeline_slug }}-{{ cookiecutter.version }}/bin:$PATH +ENV PATH /opt/conda/envs/{{ cookiecutter.name_noslash }}-{{ cookiecutter.version }}/bin:$PATH diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/LICENSE b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/LICENSE similarity index 100% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/LICENSE rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/LICENSE diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/README.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md similarity index 66% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/README.md rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md index 5b676b8af6..d242566721 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/README.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md @@ -1,22 +1,20 @@ -# {{ cookiecutter.pipeline_name }} -{{ cookiecutter.pipeline_short_description }} +# {{ cookiecutter.name }} +**{{ cookiecutter.description }}** -[![Build Status](https://travis-ci.org/nf-core/{{ cookiecutter.pipeline_name }}.svg?branch=master)](https://travis-ci.org/nf-core/{{ cookiecutter.pipeline_name }}) +[![Build Status](https://travis-ci.org/{{ cookiecutter.name }}.svg?branch=master)](https://travis-ci.org/{{ cookiecutter.name }}) [![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A50.30.0-brightgreen.svg)](https://www.nextflow.io/) [![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](http://bioconda.github.io/) -[![Docker](https://img.shields.io/docker/automated/{{ cookiecutter.pipeline_slug }}.svg)](https://hub.docker.com/r/{{ cookiecutter.pipeline_slug }}) +[![Docker](https://img.shields.io/docker/automated/{{ cookiecutter.name_docker }}.svg)](https://hub.docker.com/r/{{ cookiecutter.name_docker }}) ![Singularity Container available]( https://img.shields.io/badge/singularity-available-7E4C74.svg) ### Introduction -{{ cookiecutter.pipeline_name }}: {{ cookiecutter.pipeline_short_description }} - The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool to run tasks across multiple compute infrastructures in a very portable manner. It comes with docker / singularity containers making installation trivial and results highly reproducible. ### Documentation -The {{ cookiecutter.pipeline_name }} pipeline comes with documentation about the pipeline, found in the `docs/` directory: +The {{ cookiecutter.name }} pipeline comes with documentation about the pipeline, found in the `docs/` directory: 1. [Installation](docs/installation.md) 2. Pipeline configuration diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Singularity b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/Singularity similarity index 67% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Singularity rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/Singularity index 116ed37d0a..7b5483e1de 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/Singularity +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/Singularity @@ -2,11 +2,11 @@ From:nfcore/base Bootstrap:docker %labels - DESCRIPTION Singularity image containing all requirements for {{ cookiecutter.pipeline_name }} pipeline + DESCRIPTION Singularity image containing all requirements for {{ cookiecutter.name }} pipeline VERSION {{ cookiecutter.version }} %environment - PATH=/opt/conda/envs/nfcore-{{ cookiecutter.pipeline_slug }}-{{ cookiecutter.version }}/bin:$PATH + PATH=/opt/conda/envs/{{ cookiecutter.name_noslash }}-{{ cookiecutter.version }}/bin:$PATH export PATH %files diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.html b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.html similarity index 78% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.html rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.html index 775aad0b4b..96bcdf29ac 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.html +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.html @@ -5,19 +5,19 @@ - - {{ cookiecutter.pipeline_name }} Pipeline Report + + {{ cookiecutter.name }} Pipeline Report
-

{{ cookiecutter.pipeline_name }} v${version}

+

{{ cookiecutter.name }} v${version}

Run Name: $runName

<% if (!success){ out << """
-

{{ cookiecutter.pipeline_name }} execution completed unsuccessfully!

+

{{ cookiecutter.name }} execution completed unsuccessfully!

The exit status of the task that caused the workflow execution to fail was: $exitStatus.

The full error message was:

${errorReport}
@@ -26,7 +26,7 @@

{{ cookiecutter.pipeline_name }} execu } else { out << """
- {{ cookiecutter.pipeline_name }} execution completed successfully! + {{ cookiecutter.name }} execution completed successfully!
""" } @@ -43,8 +43,8 @@

Pipeline Configuration:

-

{{ cookiecutter.pipeline_name }}

-

https://github.com/nf-core/{{ cookiecutter.pipeline_name }}

+

{{ cookiecutter.name }}

+

https://github.com/{{ cookiecutter.name }}

diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.txt b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.txt similarity index 67% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.txt rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.txt index 19b196f997..e4096abdb4 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/email_template.txt +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.txt @@ -1,13 +1,13 @@ ======================================== - {{ cookiecutter.pipeline_name }} v${version} + {{ cookiecutter.name }} v${version} ======================================== Run Name: $runName <% if (success){ - out << "## {{ cookiecutter.pipeline_name }} execution completed successfully! ##" + out << "## {{ cookiecutter.name }} execution completed successfully! ##" } else { out << """#################################################### -## {{ cookiecutter.pipeline_name }} execution completed unsuccessfully! ## +## {{ cookiecutter.name }} execution completed unsuccessfully! ## #################################################### The exit status of the task that caused the workflow execution to fail was: $exitStatus. The full error message was: @@ -19,7 +19,7 @@ ${errorReport} <% if (!success){ out << """#################################################### -## {{ cookiecutter.pipeline_name }} execution completed unsuccessfully! ## +## {{ cookiecutter.name }} execution completed unsuccessfully! ## #################################################### The exit status of the task that caused the workflow execution to fail was: $exitStatus. The full error message was: @@ -27,7 +27,7 @@ The full error message was: ${errorReport} """ } else { - out << "## {{ cookiecutter.pipeline_name }} execution completed successfully! ##" + out << "## {{ cookiecutter.name }} execution completed successfully! ##" } %> @@ -47,5 +47,5 @@ Pipeline Configuration: <% out << summary.collect{ k,v -> " - $k: $v" }.join("\n") %> -- -{{ cookiecutter.pipeline_name }} -https://github.com/nf-core/{{ cookiecutter.pipeline_name }} +{{ cookiecutter.name }} +https://github.com/{{ cookiecutter.name }} diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/sendmail_template.txt b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/sendmail_template.txt similarity index 100% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/assets/sendmail_template.txt rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/sendmail_template.txt diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/markdown_to_html.r b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/markdown_to_html.r similarity index 100% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/markdown_to_html.r rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/markdown_to_html.r diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/scrape_software_versions.py b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py similarity index 71% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/scrape_software_versions.py rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py index 7ceaf3becf..249a5e4538 100755 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/bin/scrape_software_versions.py +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py @@ -4,13 +4,13 @@ import re regexes = { - '{{ cookiecutter.pipeline_name }}': ['v_pipeline.txt', r"(\S+)"], + '{{ cookiecutter.name }}': ['v_pipeline.txt', r"(\S+)"], 'Nextflow': ['v_nextflow.txt', r"(\S+)"], 'FastQC': ['v_fastqc.txt', r"FastQC v(\S+)"], 'MultiQC': ['v_multiqc.txt', r"multiqc, version (\S+)"], } results = OrderedDict() -results['{{ cookiecutter.pipeline_name }}'] = 'N/A' +results['{{ cookiecutter.name }}'] = 'N/A' results['Nextflow'] = 'N/A' results['FastQC'] = 'N/A' results['MultiQC'] = 'N/A' @@ -25,9 +25,9 @@ # Dump to YAML print (''' -id: '{{ cookiecutter.pipeline_name.lower().replace(' ', '-') }}-software-versions' -section_name: '{{ cookiecutter.pipeline_name }} Software Versions' -section_href: 'https://github.com/nf-core/{{ cookiecutter.pipeline_name }}' +id: '{{ cookiecutter.name.lower().replace(' ', '-') }}-software-versions' +section_name: '{{ cookiecutter.name }} Software Versions' +section_href: 'https://github.com/{{ cookiecutter.name }}' plot_type: 'html' description: 'are collected at run time from the software output.' data: | diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/awsbatch.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/awsbatch.config similarity index 100% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/awsbatch.config rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/awsbatch.config diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/base.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/base.config similarity index 94% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/base.config rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/base.config index 39f59454f8..82e88b1306 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/base.config +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/base.config @@ -1,6 +1,6 @@ /* * ------------------------------------------------- - * {{ cookiecutter.pipeline_name }} Nextflow base config file + * {{ cookiecutter.name }} Nextflow base config file * ------------------------------------------------- * A 'blank slate' config file, appropriate for general * use on most high performace compute environments. diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/igenomes.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/igenomes.config similarity index 100% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/igenomes.config rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/igenomes.config diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/multiqc_config.yaml b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/multiqc_config.yaml new file mode 100644 index 0000000000..621736a877 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/multiqc_config.yaml @@ -0,0 +1,7 @@ +report_comment: > + This report has been generated by the {{ cookiecutter.name }} + analysis pipeline. For information about how to interpret these results, please see the + documentation. +report_section_order: + {{ cookiecutter.name.lower().replace(' ', '-') }}-software-versions: + order: -1000 diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/test.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/test.config similarity index 100% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/test.config rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/test.config diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/README.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/README.md new file mode 100644 index 0000000000..ea4a6da4b9 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/README.md @@ -0,0 +1,11 @@ +# {{ cookiecutter.name }}: Documentation + +The {{ cookiecutter.name }} documentation is split into the following files: + +1. [Installation](installation.md) +2. [Running the pipeline](usage.md) +3. Pipeline configuration + * [Adding your own system](configuration/adding_your_own.md) + * [Reference genomes](configuration/reference_genomes.md) +4. [Output and how to interpret the results](output.md) +5. [Troubleshooting](troubleshooting.md) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/configuration/adding_your_own.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/configuration/adding_your_own.md new file mode 100644 index 0000000000..7a9d8acee5 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/configuration/adding_your_own.md @@ -0,0 +1,86 @@ +# {{ cookiecutter.name }}: Configuration for other clusters + +It is entirely possible to run this pipeline on other clusters, though you will need to set up your own config file so that the pipeline knows how to work with your cluster. + +> If you think that there are other people using the pipeline who would benefit from your configuration (eg. other common cluster setups), please let us know. We can add a new configuration and profile which can used by specifying `-profile ` when running the pipeline. + +If you are the only person to be running this pipeline, you can create your config file as `~/.nextflow/config` and it will be applied every time you run Nextflow. Alternatively, save the file anywhere and reference it when running the pipeline with `-c path/to/config` (see the [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html) for more). + +A basic configuration comes with the pipeline, which runs by default (the `standard` config profile - see [`conf/base.config`](../conf/base.config)). This means that you only need to configure the specifics for your system and overwrite any defaults that you want to change. + +## Cluster Environment +By default, pipeline uses the `local` Nextflow executor - in other words, all jobs are run in the login session. If you're using a simple server, this may be fine. If you're using a compute cluster, this is bad as all jobs will run on the head node. + +To specify your cluster environment, add the following line to your config file: + +```nextflow +process.executor = 'YOUR_SYSTEM_TYPE' +``` + +Many different cluster types are supported by Nextflow. For more information, please see the [Nextflow documentation](https://www.nextflow.io/docs/latest/executor.html). + +Note that you may need to specify cluster options, such as a project or queue. To do so, use the `clusterOptions` config option: + +```nextflow +process { + executor = 'SLURM' + clusterOptions = '-A myproject' +} +``` + + +## Software Requirements +To run the pipeline, several software packages are required. How you satisfy these requirements is essentially up to you and depends on your system. If possible, we _highly_ recommend using either Docker or Singularity. + +Please see the [`installation documentation`](../installation.md) for how to run using the below as a one-off. These instructions are about configuring a config file for repeated use. + +### Docker +Docker is a great way to run {{ cookiecutter.name }}, as it manages all software installations and allows the pipeline to be run in an identical software environment across a range of systems. + +Nextflow has [excellent integration](https://www.nextflow.io/docs/latest/docker.html) with Docker, and beyond installing the two tools, not much else is required - nextflow will automatically fetch the [{{ cookiecutter.name_docker }}](https://hub.docker.com/r/{{ cookiecutter.name_docker }}/) image that we have created and is hosted at dockerhub at run time. + +To add docker support to your own config file, add the following: + +```nextflow +docker.enabled = true +process.container = "{{ cookiecutter.name_docker }}" +``` + +Note that the dockerhub organisation name annoyingly can't have a hyphen, so is `nfcore` and not `nf-core`. + + +### Singularity image +Many HPC environments are not able to run Docker due to security issues. +[Singularity](http://singularity.lbl.gov/) is a tool designed to run on such HPC systems which is very similar to Docker. + +To specify singularity usage in your pipeline config file, add the following: + +```nextflow +singularity.enabled = true +process.container = "shub://{{ cookiecutter.name }}" +``` + +If you intend to run the pipeline offline, nextflow will not be able to automatically download the singularity image for you. +Instead, you'll have to do this yourself manually first, transfer the image file and then point to that. + +First, pull the image file where you have an internet connection: + +```bash +singularity pull --name {{ cookiecutter.name_noslash }}.simg shub://{{ cookiecutter.name }} +``` + +Then transfer this file and point the config file to the image: + +```nextflow +singularity.enabled = true +process.container = "/path/to/{{ cookiecutter.name_noslash }}.simg" +``` + + +### Conda +If you're not able to use Docker or Singularity, you can instead use conda to manage the software requirements. +To use conda in your own config file, add the following: + +```nextflow +process.conda = "$baseDir/environment.yml" +``` diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/configuration/reference_genomes.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/configuration/reference_genomes.md new file mode 100644 index 0000000000..a523622486 --- /dev/null +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/configuration/reference_genomes.md @@ -0,0 +1,49 @@ +# {{ cookiecutter.name }}: Reference Genomes Configuration + +The {{ cookiecutter.name }} pipeline needs a reference genome for alignment and annotation. + +These paths can be supplied on the command line at run time (see the [usage docs](../usage.md)), +but for convenience it's often better to save these paths in a nextflow config file. +See below for instructions on how to do this. +Read [Adding your own system](adding_your_own.md) to find out how to set up custom config files. + +## Adding paths to a config file +Specifying long paths every time you run the pipeline is a pain. +To make this easier, the pipeline comes configured to understand reference genome keywords which correspond to preconfigured paths, meaning that you can just specify `--genome ID` when running the pipeline. + +Note that this genome key can also be specified in a config file if you always use the same genome. + +To use this system, add paths to your config file using the following template: + +```nextflow +params { + genomes { + 'YOUR-ID' { + fasta = '/genome.fa' + } + 'OTHER-GENOME' { + // [..] + } + } + // Optional - default genome. Ignored if --genome 'OTHER-GENOME' specified on command line + genome = 'YOUR-ID' +} +``` + +You can add as many genomes as you like as long as they have unique IDs. + +## illumina iGenomes +To make the use of reference genomes easier, illumina has developed a centralised resource called [iGenomes](https://support.illumina.com/sequencing/sequencing_software/igenome.html). +Multiple reference index types are held together with consistent structure for multiple genomes. + +We have put a copy of iGenomes up onto AWS S3 hosting and this pipeline is configured to use this by default. +The hosting fees for AWS iGenomes are currently kindly funded by a grant from Amazon. +The pipeline will automatically download the required reference files when you run the pipeline. +For more information about the AWS iGenomes, see https://ewels.github.io/AWS-iGenomes/ + +Downloading the files takes time and bandwidth, so we recommend making a local copy of the iGenomes resource. +Once downloaded, you can customise the variable `params.igenomes_base` in your custom configuration file to point to the reference location. +For example: +```nextflow +params.igenomes_base = '/path/to/data/igenomes/' +``` diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/installation.md similarity index 52% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/installation.md index 834dbabd1c..d1e0fc7d89 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/installation.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/installation.md @@ -1,6 +1,6 @@ -# nf-core/{{ cookiecutter.pipeline_slug }} installation +# {{ cookiecutter.name }}: Installation -To start using the nf-core/{{ cookiecutter.pipeline_slug }} pipeline, follow the steps below: +To start using the {{ cookiecutter.name }} pipeline, follow the steps below: 1. [Install Nextflow](#1-install-nextflow) 2. [Install the pipeline](#2-install-the-pipeline) @@ -36,16 +36,17 @@ See [nextflow.io](https://www.nextflow.io/) for further instructions on how to i ## 2) Install the pipeline #### 2.1) Automatic -This pipeline itself needs no installation - NextFlow will automatically fetch it from GitHub if `nf-core/{{ cookiecutter.pipeline_name }}` is specified as the pipeline name. +This pipeline itself needs no installation - NextFlow will automatically fetch it from GitHub if `{{ cookiecutter.name }}` is specified as the pipeline name. #### 2.2) Offline The above method requires an internet connection so that Nextflow can download the pipeline files. If you're running on a system that has no internet connection, you'll need to download and transfer the pipeline files manually: ```bash -wget https://github.com/nf-core/{{ cookiecutter.pipeline_name }}/archive/master.zip -unzip master.zip -d /my-pipelines/ -cd /my_data/ -nextflow run /my-pipelines/{{ cookiecutter.pipeline_slug }}-master +wget https://github.com/{{ cookiecutter.name }}/archive/master.zip +mkdir -p ~/my-pipelines/nf-core/ +unzip master.zip -d ~/my-pipelines/nf-core/ +cd ~/my_data/ +nextflow run ~/my-pipelines/{{ cookiecutter.name }}-master ``` To stop nextflow from looking for updates online, you can tell it to run in offline mode by specifying the following environment variable in your ~/.bashrc file: @@ -69,49 +70,35 @@ Be warned of two important points about this default configuration: * See the [nextflow docs](https://www.nextflow.io/docs/latest/executor.html) for information about running with other hardware backends. Most job scheduler systems are natively supported. 2. Nextflow will expect all software to be installed and available on the `PATH` -#### 3.1) Software deps: Docker and Singularity -Running the pipeline with the option `-profile singularity` or `-with-docker` tells Nextflow to enable either [Singularity](http://singularity.lbl.gov/) or Docker for this run. An image containing all of the software requirements will be automatically fetched and used (https://hub.docker.com/r/nf-core/{{ cookiecutter.pipeline_slug }}). +#### 3.1) Software deps: Docker +First, install docker on your system: [Docker Installation Instructions](https://docs.docker.com/engine/installation/) -If running offline with Singularity, you'll need to download and transfer the Singularity image first: +Then, running the pipeline with the option `-profile standard,docker` tells Nextflow to enable Docker for this run. An image containing all of the software requirements will be automatically fetched and used from dockerhub (https://hub.docker.com/r/{{ cookiecutter.name_docker }}). -```bash -singularity pull --name nfcore-{{ cookiecutter.pipeline_slug }}-[VERSION].simg shub://nfcore/{{ cookiecutter.pipeline_slug }}:[VERSION] -``` +#### 3.1) Software deps: Singularity +If you're not able to use Docker then [Singularity](http://singularity.lbl.gov/) is a great alternative. +The process is very similar: running the pipeline with the option `-profile standard,singularity` tells Nextflow to enable singularity for this run. An image containing all of the software requirements will be automatically fetched and used from singularity hub. -Once transferred, use `-profile singularity` but specify the path to the image file: +If running offline with Singularity, you'll need to download and transfer the Singularity image first: ```bash -nextflow run /path/to/nf-core-{{ cookiecutter.pipeline_slug }} -profile singularity /path/to/{{ cookiecutter.pipeline_slug }}-[VERSION].simg +singularity pull --name {{ cookiecutter.name_noslash }}.simg shub://{{ cookiecutter.name }} ``` -#### 3.2) Software deps: bioconda - -If you're unable to use either Docker or Singularity but you have conda installed, you can use the bioconda environment that comes with the pipeline. Running this command will create a new conda environment with all of the required software installed: +Once transferred, use `-with-singularity` and specify the path to the image file: ```bash -conda env create -f environment.yml -conda clean -a # Recommended, not essential -source activate nfcore-{{ cookiecutter.pipeline_slug }}-1.3 # Name depends on version +nextflow run /path/to/{{ cookiecutter.name_noslash }} -with-singularity {{ cookiecutter.name_noslash }}.simg ``` -The [`environment.yml`](../environment.yml) file is packaged with the pipeline. Note that you may need to download this file from the [GitHub project page](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}) if nextflow is automatically fetching the pipeline files. Ensure that the bioconda environment file version matches the pipeline version that you run. - - -#### 3.3) Configuration profiles +Remember to pull updated versions of the singularity image if you update the pipeline. -Nextflow can be configured to run on a wide range of different computational infrastructures. In addition to the above pipeline-specific parameters it is likely that you will need to define system-specific options. For more information, please see the [Nextflow documentation](https://www.nextflow.io/docs/latest/). -Whilst most parameters can be specified on the command line, it is usually sensible to create a configuration file for your environment. - -If you are the only person to be running this pipeline, you can create your config file as `~/.nextflow/config` and it will be applied every time you run Nextflow. Alternatively, save the file anywhere and reference it when running the pipeline with `-c path/to/config`. - -If you think that there are other people using the pipeline who would benefit from your configuration (eg. other common cluster setups), please let us know. We can add a new configuration and profile which can used by specifying `-profile ` when running the pipeline. - -The pipeline comes with several such config profiles - see the installation appendices and usage documentation for more information. - - -## 4) Reference Genomes -The nf-core/{{ cookiecutter.pipeline_slug }} pipeline needs a reference genome for read alignment. Support for many common genomes is built in if running on UPPMAX or AWS, by using [AWS-iGenomes](https://ewels.github.io/AWS-iGenomes/). +#### 3.2) Software deps: conda +If you're not able to use Docker _or_ Singularity, you can instead use conda to manage the software requirements. +This is slower and less reproducible than the above, but is still better than having to install all requirements yourself! +The pipeline ships with a conda environment file and nextflow has built-in support for this. +To use it first ensure that you have conda installed (we recommend [miniconda](https://conda.io/miniconda.html)), then follow the same pattern as above and use the flag `-profile standard,conda` ## Appendices @@ -125,4 +112,4 @@ Note that you will need to specify your UPPMAX project ID when running a pipelin ```nextflow params.project = 'project_ID' // eg. b2017123 -``` \ No newline at end of file +``` diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/output.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/output.md similarity index 97% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/output.md rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/output.md index ec26d0191a..84310d2672 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/output.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/output.md @@ -1,4 +1,4 @@ -# nf-core/{{ cookiecutter.pipeline_slug }} Output +# {{ cookiecutter.name }}: Output This document describes the output produced by the pipeline. Most of the plots are taken from the MultiQC report, which summarises results at the end of the pipeline. diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/troubleshooting.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/troubleshooting.md similarity index 92% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/troubleshooting.md rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/troubleshooting.md index 5fbed04eb6..7368d57dae 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/troubleshooting.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/troubleshooting.md @@ -1,4 +1,4 @@ -# Troubleshooting +# {{ cookiecutter.name }}: Troubleshooting ## Input files not found @@ -23,6 +23,6 @@ The pipeline can't take a list of multiple input files - it takes a glob express ## Extra resources and getting help If you still have an issue with running the pipeline then feel free to contact us. -Have a look at the [pipeline website](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}) to find out how. +Have a look at the [pipeline website](https://github.com/{{ cookiecutter.name }}) to find out how. If you have problems that are related to Nextflow and not our pipeline then check out the [Nextflow gitter channel](https://gitter.im/nextflow-io/nextflow) or the [google group](https://groups.google.com/forum/#!forum/nextflow). diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/usage.md similarity index 86% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/usage.md index 68314bf48a..e6c914b22f 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/usage.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/usage.md @@ -1,4 +1,4 @@ -# {{ cookiecutter.pipeline_slug }} Usage +# {{ cookiecutter.name }}: Usage ## Table of contents @@ -50,7 +50,7 @@ NXF_OPTS='-Xms1g -Xmx4g' ## Running the pipeline The typical command for running the pipeline is as follows: ```bash -nextflow run nf-core/{{ cookiecutter.pipeline_name }} --reads '*_R{1,2}.fastq.gz' -profile docker +nextflow run {{ cookiecutter.name }} --reads '*_R{1,2}.fastq.gz' -profile standard,docker ``` This will launch the pipeline with the `docker` configuration profile. See below for more information about profiles. @@ -68,13 +68,13 @@ results # Finished results (configurable, see below) When you run the above command, Nextflow automatically pulls the pipeline code from GitHub and stores it as a cached version. When running the pipeline after this, it will always use the cached version if available - even if the pipeline has been updated since. To make sure that you're running the latest version of the pipeline, make sure that you regularly update the cached version of the pipeline: ```bash -nextflow pull nf-core/{{ cookiecutter.pipeline_name }} +nextflow pull {{ cookiecutter.name }} ``` ### Reproducibility It's a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. -First, go to the [{{ cookiecutter.pipeline_name }} releases page](https://github.com/nf-core/{{ cookiecutter.pipeline_name }}/releases) and find the latest version number - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. +First, go to the [{{ cookiecutter.name }} releases page](https://github.com/{{ cookiecutter.name }}/releases) and find the latest version number - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. @@ -82,16 +82,25 @@ This version number will be logged in reports when you run the pipeline, so that ## Main Arguments ### `-profile` -Use this parameter to choose a configuration profile. Each profile is designed for a different compute environment - follow the links below to see instructions for running on that system. Available profiles are: +Use this parameter to choose a configuration profile. Profiles can give configuration presets for different compute environments. Note that multiple profiles can be loaded, for example: `-profile standard,docker` - the order of arguments is important! +* `standard` + * The default profile, used if `-profile` is not specified at all. + * Runs locally and expects all software to be installed and available on the `PATH`. * `docker` * A generic configuration profile to be used with [Docker](http://docker.com/) - * Runs using the `local` executor and pulls software from dockerhub: [`{{ cookiecutter.pipeline_slug }}`](http://hub.docker.com/r/{{ cookiecutter.pipeline_slug }}/) + * Pulls software from dockerhub: [`{{ cookiecutter.name_docker }}`](http://hub.docker.com/r/{{ cookiecutter.name_docker }}/) +* `singularity` + * A generic configuration profile to be used with [Singularity](http://singularity.lbl.gov/) + * Pulls software from singularity-hub +* `conda` + * A generic configuration profile to be used with [conda](https://conda.io/docs/) + * Pulls most software from [Bioconda](https://bioconda.github.io/) * `awsbatch` * A generic configuration profile to be used with AWS Batch. -* `standard` - * The default profile, used if `-profile` is not specified at all. Runs locally and expects all software to be installed and available on the `PATH`. - * This profile is mainly designed to be used as a starting point for other configurations and is inherited by most of the other profiles. +* `test` + * A profile with a complete configuration for automated testing + * Includes links to test data so needs no other parameters * `none` * No configuration at all. Useful if you want to build your own config from scratch and want to avoid loading in the default `base` config profile (not recommended). @@ -226,7 +235,5 @@ Should be a string in the format integer-unit. eg. `--max_cpus 1` ### `--plaintext_email` Set to receive plain-text e-mails instead of HTML formatted. -### `--sampleLevel` -Used to turn of the edgeR MDS and heatmap. Set automatically when running on fewer than 3 samples. - ### `--multiqc_config` +Specify a path to a custom MultiQC configuration file. diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml similarity index 57% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml index f853553f7d..11e838f656 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/environment.yml +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml @@ -1,4 +1,4 @@ -name: nfcore-{{ cookiecutter.pipeline_slug }}-{{ cookiecutter.version }} +name: {{ cookiecutter.name_noslash }}-{{ cookiecutter.version }} channels: - bioconda - conda-forge diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf similarity index 91% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index 54967ffc22..976d040fa1 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -1,11 +1,11 @@ #!/usr/bin/env nextflow /* ======================================================================================== - {{ cookiecutter.pipeline_name }} + {{ cookiecutter.name }} ======================================================================================== - {{ cookiecutter.pipeline_name }} Analysis Pipeline. + {{ cookiecutter.name }} Analysis Pipeline. #### Homepage / Documentation - https://github.com/nf-core/{{ cookiecutter.pipeline_name }} + https://github.com/{{ cookiecutter.name }} ---------------------------------------------------------------------------------------- */ @@ -13,13 +13,13 @@ def helpMessage() { log.info""" ========================================= - {{ cookiecutter.pipeline_name }} v${manifest.pipelineVersion} + {{ cookiecutter.name }} v${manifest.pipelineVersion} ========================================= Usage: The typical command for running the pipeline is as follows: - nextflow run nf-core/{{ cookiecutter.pipeline_name }} --reads '*_R{1,2}.fastq.gz' -profile docker + nextflow run {{ cookiecutter.name }} --reads '*_R{1,2}.fastq.gz' -profile standard,docker Mandatory arguments: --reads Path to input data (must be surrounded with quotes) @@ -128,10 +128,10 @@ log.info """======================================================= | \\| | \\__, \\__/ | \\ |___ \\`-._,-`-, `._,._,\' -{{ cookiecutter.pipeline_name }} v${manifest.pipelineVersion}" +{{ cookiecutter.name }} v${manifest.pipelineVersion}" =======================================================""" def summary = [:] -summary['Pipeline Name'] = '{{ cookiecutter.pipeline_name }}' +summary['Pipeline Name'] = '{{ cookiecutter.name }}' summary['Pipeline Version'] = manifest.pipelineVersion summary['Run Name'] = custom_runName ?: workflow.runName summary['Reads'] = params.reads @@ -164,10 +164,10 @@ def create_workflow_summary(summary) { def yaml_file = workDir.resolve('workflow_summary_mqc.yaml') yaml_file.text = """ - id: 'nf-core-{{ cookiecutter.pipeline_slug }}-summary' + id: '{{ cookiecutter.name_noslash }}-summary' description: " - this information is collected when the pipeline is started." - section_name: 'nf-core/{{ cookiecutter.pipeline_name }} Workflow Summary' - section_href: 'https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}' + section_name: '{{ cookiecutter.name }} Workflow Summary' + section_href: 'https://github.com/{{ cookiecutter.name }}' plot_type: 'html' data: |
@@ -274,9 +274,9 @@ process output_documentation { workflow.onComplete { // Set up the e-mail variables - def subject = "[{{ cookiecutter.pipeline_name }}] Successful: $workflow.runName" + def subject = "[{{ cookiecutter.name }}] Successful: $workflow.runName" if(!workflow.success){ - subject = "[{{ cookiecutter.pipeline_name }}] FAILED: $workflow.runName" + subject = "[{{ cookiecutter.name }}] FAILED: $workflow.runName" } def email_fields = [:] email_fields['version'] = manifest.pipelineVersion @@ -324,11 +324,11 @@ workflow.onComplete { if( params.plaintext_email ){ throw GroovyException('Send plaintext e-mail, not HTML') } // Try to send HTML e-mail using sendmail [ 'sendmail', '-t' ].execute() << sendmail_html - log.info "[{{ cookiecutter.pipeline_name }}] Sent summary e-mail to $params.email (sendmail)" + log.info "[{{ cookiecutter.name }}] Sent summary e-mail to $params.email (sendmail)" } catch (all) { // Catch failures and try with plaintext [ 'mail', '-s', subject, params.email ].execute() << email_txt - log.info "[{{ cookiecutter.pipeline_name }}] Sent summary e-mail to $params.email (mail)" + log.info "[{{ cookiecutter.name }}] Sent summary e-mail to $params.email (mail)" } } @@ -342,6 +342,6 @@ workflow.onComplete { def output_tf = new File( output_d, "pipeline_report.txt" ) output_tf.withWriter { w -> w << email_txt } - log.info "[{{ cookiecutter.pipeline_name }}] Pipeline Complete" + log.info "[{{ cookiecutter.name }}] Pipeline Complete" } diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config similarity index 76% rename from nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config rename to nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config index 998ac2c533..984bcc0cf4 100644 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/nextflow.config +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config @@ -1,6 +1,6 @@ /* * ------------------------------------------------- - * {{ cookiecutter.pipeline_name }} Nextflow config file + * {{ cookiecutter.name }} Nextflow config file * ------------------------------------------------- * Default config options for all environments. * Cluster-specific config options should be saved @@ -11,7 +11,7 @@ // Global default params, used in configs params { - container = 'nfcore/{{ cookiecutter.pipeline_slug }}:latest' // Container slug. Stable releases should specify release tag! + container = '{{ cookiecutter.name_docker }}:latest' // Container slug. Stable releases should specify release tag! help = false reads = "data/*{1,2}.fastq.gz" @@ -58,25 +58,25 @@ process.shell = ['/bin/bash', '-euo', 'pipefail'] timeline { enabled = true - file = "${params.tracedir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_timeline.html" + file = "${params.tracedir}/pipeline_info/{{ cookiecutter.name.replace(' ', '-') }}_timeline.html" } report { enabled = true - file = "${params.tracedir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_report.html" + file = "${params.tracedir}/pipeline_info/{{ cookiecutter.name.replace(' ', '-') }}_report.html" } trace { enabled = true - file = "${params.tracedir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_trace.txt" + file = "${params.tracedir}/pipeline_info/{{ cookiecutter.name.replace(' ', '-') }}_trace.txt" } dag { enabled = true - file = "${params.tracedir}/pipeline_info/{{ cookiecutter.pipeline_name.replace(' ', '-') }}_dag.svg" + file = "${params.tracedir}/pipeline_info/{{ cookiecutter.name.replace(' ', '-') }}_dag.svg" } manifest { - name = 'nf-core/{{ cookiecutter.pipeline_name }}' - description = '{{ cookiecutter.pipeline_short_description }}' - homePage = 'https://github.com/nf-core/{{ cookiecutter.pipeline_name }}' + name = '{{ cookiecutter.name }}' + description = '{{ cookiecutter.description }}' + homePage = 'https://github.com/{{ cookiecutter.name }}' pipelineVersion = '{{ cookiecutter.version }}' mainScript = 'main.nf' nextflowVersion = '>=0.30.0' diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md deleted file mode 100644 index 461168d373..0000000000 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/.github/CONTRIBUTING.md +++ /dev/null @@ -1,25 +0,0 @@ -# nf-core/{{ cookiecutter.pipeline_slug }} Contributing Guidelines - -Hi there! Many thanks for taking an interest in improving nf-core/{{ cookiecutter.pipeline_slug }}. - -We try to manage the required tasks for nf-core/{{ cookiecutter.pipeline_slug }} using GitHub issues, you probably came to this page when creating one. Please use the prefilled template to save time. - -However, don't be put off by this template - other more general issues and suggestions are welcome! Contributions to the code are even more welcome ;) - -> If you need help using nf-core/{{ cookiecutter.pipeline_slug }} then the best place to go is the Gitter chatroom where you can ask us questions directly: https://gitter.im/nf-core/Lobby - -## Contribution workflow -If you'd like to write some code for nf-core/{{ cookiecutter.pipeline_slug }}, the standard workflow -is as follows: - -1. Check that there isn't already an issue about your idea in the - [nf-core/{{ cookiecutter.pipeline_slug }} issues](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}/issues) to avoid - duplicating work. - * Feel free to add a new issue here for the same reason. -2. Fork the [nf-core/{{ cookiecutter.pipeline_slug }} repository](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}) to your GitHub account -3. Make the necessary changes / additions within your forked repository -4. Submit a Pull Request against the master branch and wait for the code to be reviewed and merged. - -If you're not used to this workflow with git, you can start with some [basic docs from GitHub](https://help.github.com/articles/fork-a-repo/) or even their [excellent interactive tutorial](https://try.github.io/). - -For further information/help, please consult the [nf-core/{{ cookiecutter.pipeline_slug }} documentation](https://github.com/nf-core/{{ cookiecutter.pipeline_slug }}README.md#documentation) and don't hesitate to get in touch on [Gitter](https://gitter.im/nf-core/Lobby) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CHANGELOG.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CHANGELOG.md deleted file mode 100644 index 04c1fc9830..0000000000 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/CHANGELOG.md +++ /dev/null @@ -1,3 +0,0 @@ - -## {{ cookiecutter.version }} - -Initial release of {{ cookiecutter.pipeline_name }}, created with the NGI-NFcookiecutter template: https://github.com/ewels/NGI-NFcookiecutter. diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/multiqc_config.yaml b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/multiqc_config.yaml deleted file mode 100644 index f4c2ce9005..0000000000 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/conf/multiqc_config.yaml +++ /dev/null @@ -1,7 +0,0 @@ -report_comment: > - This report has been generated by the {{ cookiecutter.pipeline_name }} - analysis pipeline. For information about how to interpret these results, please see the - documentation. -report_section_order: - {{ cookiecutter.pipeline_name.lower().replace(' ', '-') }}-software-versions: - order: -1000 diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/README.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/README.md deleted file mode 100644 index a988f4a838..0000000000 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# {{ cookiecutter.pipeline_name }} Documentation - -The {{ cookiecutter.pipeline_name }} documentation is split into the following files: - -1. [Installation](installation.md) -2. Pipeline configuration - * [Local installation](configuration/local.md) - * [Adding your own system](configuration/adding_your_own.md) -3. [Running the pipeline](usage.md) -4. [Output and how to interpret the results](output.md) -5. [Troubleshooting](troubleshooting.md) diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md deleted file mode 100644 index 3b66dc4267..0000000000 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/adding_your_own.md +++ /dev/null @@ -1,124 +0,0 @@ -# {{ cookiecutter.pipeline_name }}: Configuration for other clusters - -It is entirely possible to run this pipeline on other clusters, though you will need to set up your own config file so that the pipeline knows how to work with your cluster. - -> If you think that there are other people using the pipeline who would benefit from your configuration (eg. other common cluster setups), please let us know. We can add a new configuration and profile which can used by specifying `-profile ` when running the pipeline. - -If you are the only person to be running this pipeline, you can create your config file as `~/.nextflow/config` and it will be applied every time you run Nextflow. Alternatively, save the file anywhere and reference it when running the pipeline with `-c path/to/config` (see the [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html) for more). - -A basic configuration comes with the pipeline, which runs by default (the `standard` config profile - see [`conf/base.config`](../conf/base.config)). This means that you only need to configure the specifics for your system and overwrite any defaults that you want to change. - -## Cluster Environment -By default, pipeline uses the `local` Nextflow executor - in other words, all jobs are run in the login session. If you're using a simple server, this may be fine. If you're using a compute cluster, this is bad as all jobs will run on the head node. - -To specify your cluster environment, add the following line to your config file: - -```nextflow -process { - executor = 'YOUR_SYSTEM_TYPE' -} -``` - -Many different cluster types are supported by Nextflow. For more information, please see the [Nextflow documentation](https://www.nextflow.io/docs/latest/executor.html). - -Note that you may need to specify cluster options, such as a project or queue. To do so, use the `clusterOptions` config option: - -```nextflow -process { - executor = 'SLURM' - clusterOptions = '-A myproject' -} -``` - - -## Software Requirements -To run the pipeline, several software packages are required. How you satisfy these requirements is essentially up to you and depends on your system. If possible, we _highly_ recommend using either Docker or Singularity. - -### Docker -Docker is a great way to run {{ cookiecutter.pipeline_name }}, as it manages all software installations and allows the pipeline to be run in an identical software environment across a range of systems. - -Nextflow has [excellent integration](https://www.nextflow.io/docs/latest/docker.html) with Docker, and beyond installing the two tools, not much else is required. - -First, install docker on your system: [Docker Installation Instructions](https://docs.docker.com/engine/installation/) - -Then, simply run the analysis pipeline: -```bash -nextflow run nf-core/{{ cookiecutter.pipeline_slug }} -profile docker --reads '' -``` - -Nextflow will recognise `nf-core/{{ cookiecutter.pipeline_slug }}` and download the pipeline from GitHub. The `-profile docker` configuration lists the [{{ cookiecutter.pipeline_slug }}](https://hub.docker.com/r/nfcore/{{ cookiecutter.pipeline_slug }}/) image that we have created and is hosted at dockerhub, and this is downloaded. - -The public docker images are tagged with the same version numbers as the code, which you can use to ensure reproducibility. When running the pipeline, specify the pipeline version with `-r`, for example `-r v1.3`. This uses pipeline code and docker image from this tagged version. - -To add docker support to your own config file (instead of using the `docker` profile, which runs locally), add the following: - -```nextflow -docker { - enabled = true -} -``` - -A test profile comes with the pipeline and is used by the Travis continuous integration (CI) service to test the pipeline for potential errors. Typically, this downloads a small test dataset from [test data repository](https://github.com/nf-core/test-datasets/) and runs the pipeline automatically using docker using [Travis](https://travis-ci.org/nf-core/{{ cookiecutter.pipeline_name }}/) whenever changes are made to the pipeline. Further information on how to add your own test data for a new pipeline can be found at the link mentioned above. - -### Singularity image -Many HPC environments are not able to run Docker due to security issues. [Singularity](http://singularity.lbl.gov/) is a tool designed to run on such HPC systems which is very similar to Docker. Even better, it can use create images directly from dockerhub. -To use the singularity image for a single run, use `-profile standard,singularity`. This will download the singularity container from singularity hub dynamically. - - -To specify singularity usage in your pipeline config file, add the following: - -```nextflow -singularity.enabled = true -process.container = {"shub://${params.container.replace('nfcore', 'nf-core')}"} -``` - -If you intend to run the pipeline offline, nextflow will not be able to automatically download the singularity image for you. Instead, you'll have to do this yourself manually first, transfer the image file and then point to that. - -First, pull the image file where you have an internet connection: - -```bash -singularity pull --name nf-core-{{ cookiecutter.pipeline_slug }}.simg shub://nf-core/{{ cookiecutter.pipeline_slug }} -``` - -Then transfer this file and run the pipeline with this path: - -```bash -nextflow run /path/to/{{ cookiecutter.pipeline_slug }} -with-singularity /path/to/nf-core-{{ cookiecutter.pipeline_slug }}.simg -``` - - -### Manual Installation -As a last resort, you may need to install the required software manually. We recommend using [Bioconda](https://bioconda.github.io/) to do this. The following instructions are an example only and will not be updated with the pipeline. - -#### 1) Install miniconda in your home directory -``` bash -cd -wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -bash Miniconda3-latest-Linux-x86_64.sh -``` - -#### 2) Add the bioconda conda channel (and others) -```bash -conda config --add channels default -conda config --add channels conda-forge -conda config --add channels bioconda -``` - -#### 3) Create a conda environment, with all necessary packages: -```bash -conda create --name {{ cookiecutter.pipeline_slug }}_py2.7 python=2.7 -source activate {{ cookiecutter.pipeline_slug }}_py2.7 -conda install --yes \ - fastqc \ - multiqc -``` -_(Feel free to adjust versions as required.)_ - -##### 4) Usage -Once created, the conda environment can be activated before running the pipeline and deactivated afterwards: - -```bash -source activate {{ cookiecutter.pipeline_slug }}_py2.7 -# run pipeline -source deactivate -``` diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md deleted file mode 100644 index 441adbb9ae..0000000000 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/local.md +++ /dev/null @@ -1,42 +0,0 @@ -# {{ cookiecutter.pipeline_name }}: Local Configuration - -If running the pipeline in a local environment, we highly recommend using either Docker or Singularity. - -## Docker -Docker is a great way to run {{ cookiecutter.pipeline_name }}, as it manages all software installations and allows the pipeline to be run in an identical software environment across a range of systems. - -Nextflow has [excellent integration](https://www.nextflow.io/docs/latest/docker.html) with Docker, and beyond installing the two tools, not much else is required. The {{ cookiecutter.pipeline_name }} profile comes with a configuration profile for docker, making it very easy to use. This also comes with the required presets to use the AWS iGenomes resource, meaning that if using common reference genomes you just specify the reference ID and it will be autaomtically downloaded from AWS S3. - -First, install docker on your system: [Docker Installation Instructions](https://docs.docker.com/engine/installation/) - -Then, simply run the analysis pipeline: -```bash -nextflow run nf-core/{{ cookiecutter.pipeline_slug }} -profile docker --reads '' -``` - -Nextflow will recognise `nf-core/{{ cookiecutter.pipeline_slug }}` and download the pipeline from GitHub. The `-profile docker` configuration lists the [nfcore/{{ cookiecutter.pipeline_slug }}](https://hub.docker.com/r/nfcore/{{ cookiecutter.pipeline_slug }}/) image that we have created and is hosted at dockerhub, and this is downloaded. - -For more information about how to work with reference genomes, see [`docs/configuration/reference_genomes.md`](docs/configuration/reference_genomes.md). - -### Pipeline versions -The public docker images are tagged with the same version numbers as the code, which you can use to ensure reproducibility. When running the pipeline, specify the pipeline version with `-r`, for example `-r v1.3`. This uses pipeline code and docker image from this tagged version. - - -## Singularity image -Many HPC environments are not able to run Docker due to security issues. [Singularity](http://singularity.lbl.gov/) is a tool designed to run on such HPC systems which is very similar to Docker. Even better, it can use create images directly from dockerhub. - -To use the singularity image for a single run, use `-profile singularity`. This will download the docker container from singularity hub dynamically. - -If you intend to run the pipeline offline, nextflow will not be able to automatically download the singularity image for you. Instead, you'll have to do this yourself manually first, transfer the image file and then point to that. - -First, pull the image file where you have an internet connection: - -```bash -singularity pull --name nf-core-{{ cookiecutter.pipeline_slug }}.simg shub://nf-core/{{ cookiecutter.pipeline_slug }} -``` - -Then transfer this file and run the pipeline with this path: - -```bash -nextflow run /path/to/nf-core-{{ cookiecutter.pipeline_slug }} -with-singularity nf-core-{{ cookiecutter.pipeline_slug }}.simg -``` diff --git a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/reference_genomes.md b/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/reference_genomes.md deleted file mode 100644 index 5874acdd51..0000000000 --- a/nf_core/pipeline-template/{{cookiecutter.pipeline_slug}}/docs/configuration/reference_genomes.md +++ /dev/null @@ -1,46 +0,0 @@ -# {{ cookiecutter.pipeline_name }}: Reference Genomes Configuration - -The {{ cookiecutter.pipeline_name }} pipeline needs a reference genome for alignment and annotation. If not already available, start by downloading the relevant reference, for example from [illumina iGenomes](https://support.illumina.com/sequencing/sequencing_software/igenome.html). - -The minimal requirements are a FASTA file. - -## Adding paths to a config file -Specifying long paths every time you run the pipeline is a pain. To make this easier, the pipeline comes configured to understand reference genome keywords which correspond to preconfigured paths, meaning that you can just specify `--genome ID` when running the pipeline. - -Note that this genome key can also be specified in a config file if you always use the same genome. - -To use this system, add paths to your config file using the following template: - -```nextflow -params { - genomes { - 'YOUR-ID' { - fasta = '/genome.fa' - } - 'OTHER-GENOME' { - // [..] - } - } - // Optional - default genome. Ignored if --genome 'OTHER-GENOME' specified on command line - genome = 'YOUR-ID' -} -``` - -You can add as many genomes as you like as long as they have unique IDs. - -## illumina iGenomes -To make the use of reference genomes easier, illumina has developed a centralised resource called [iGenomes](https://support.illumina.com/sequencing/sequencing_software/igenome.html). Multiple reference index types are held together with consistent structure for multiple genomes. - -If possible, we recommend making this resource available on your cluster. We have put a copy of iGenomes up onto AWS S3 hosting and this pipeline is configured to use this for some profiles (`docker`, `aws`). These profiles will automatically pull the required reference files when you run the pipeline. - -To add iGenomes to your config file, add the following line to the end of your config file: - -```nextflow -includeConfig '/path/to/{{ cookiecutter.pipeline_name }}/conf/igenomes.config' -``` - -This works best when you have a `profile` set up in the pipeline - see [`nextflow.config`](../../nextflow.config). - -The hosting fees for AWS iGenomes are currently funded by a grant from Amazon. We hope that this work will be extended past the end of the grant expiry date (mid 2018), but we can't be sure at this point. - -For more information about the AWS iGenomes, see https://ewels.github.io/AWS-iGenomes/ \ No newline at end of file From 0d83ace356f7269186757453eedcd7989a793534 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 14 Aug 2018 17:17:26 +0200 Subject: [PATCH 71/75] Noticed the changelog was waaay out of date --- .../{{cookiecutter.name_noslash}}/CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CHANGELOG.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CHANGELOG.md index 1f720053a7..3516bb6010 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CHANGELOG.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CHANGELOG.md @@ -1,3 +1,3 @@ -## {{ cookiecutter.version }} - -Initial release of {{ cookiecutter.name }}, created with the NGI-NFcookiecutter template: https://github.com/ewels/NGI-NFcookiecutter. +## {{ cookiecutter.name }} version {{ cookiecutter.version }} - +Initial release of {{ cookiecutter.name }}, created with the [nf-core](http://nf-co.re/) template. From 8d2e122e842adcda28283f32b0da543e16374c01 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 14 Aug 2018 17:25:17 +0200 Subject: [PATCH 72/75] Fixed linting dirname error and conda env name lint failure --- .travis.yml | 2 +- docs/lint_errors.md | 6 +++--- nf_core/lint.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2185bb3795..287ec54c39 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,7 +23,7 @@ install: script: - python -m pytest --cov=nf_core . - nf-core create -n testpipeline -d "This pipeline is for testing" - - nf-core lint testpipeline + - nf-core lint nf-core-testpipeline after_success: - codecov diff --git a/docs/lint_errors.md b/docs/lint_errors.md index 82e247d953..c4733641b6 100644 --- a/docs/lint_errors.md +++ b/docs/lint_errors.md @@ -146,9 +146,9 @@ if they are set. > These tests only run when your pipeline has a root file called `environment.yml` * The environment `name` must match the pipeline name and version - * The pipeline name is found from the Nextflow config `manifest.homePage`, - which assumes that the URL is in the format `github.com/nf-core/[pipeline-name]` - * Example: For `github.com/nf-core/test` version 1.4, the conda environment name should be `nfcore-test-1.4` + * The pipeline name is defined in the config variable `manifest.name` + * Replace the slash with a hyphen as environment names shouldn't contain that character + * Example: For `nf-core/test` version 1.4, the conda environment name should be `nf-core-test-1.4` Each dependency is checked using the [Anaconda API service](https://api.anaconda.org/docs). Dependency sublists are ignored with the exception of `- pip`: these packages are also checked diff --git a/nf_core/lint.py b/nf_core/lint.py index c4588450d9..62a5ce8dec 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -504,7 +504,7 @@ def check_conda_env_yaml(self): # Check that the environment name matches the pipeline name pipeline_version = self.config['manifest.pipelineVersion'].strip(' \'"') - expected_env_name = 'nfcore-{}-{}'.format(self.pipeline_name.lower(), pipeline_version) + expected_env_name = 'nf-core-{}-{}'.format(self.pipeline_name.lower(), pipeline_version) if self.conda_config['name'] != expected_env_name: self.failed.append((8, "Conda environment name is incorrect ({}, should be {})".format(self.conda_config['name'], expected_env_name))) else: From 8be8f8fa992838945aa2a84df64a415741fabde4 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 14 Aug 2018 17:45:53 +0200 Subject: [PATCH 73/75] Fix the tests with hyphens in new places --- nf_core/pipeline-template/cookiecutter.json | 2 +- nf_core/release.py | 4 ++-- .../minimal_working_example/Dockerfile | 2 +- .../minimal_working_example/Singularity | 2 +- .../minimal_working_example/environment.yml | 2 +- tests/test_lint.py | 14 +++++++------- 6 files changed, 13 insertions(+), 13 deletions(-) diff --git a/nf_core/pipeline-template/cookiecutter.json b/nf_core/pipeline-template/cookiecutter.json index eece1fe294..d0bd8f4dbc 100644 --- a/nf_core/pipeline-template/cookiecutter.json +++ b/nf_core/pipeline-template/cookiecutter.json @@ -1,5 +1,5 @@ { - "name": "example", + "name": "nf-core/example", "description": "This pipeline takes some data and does something with it.", "name_noslash": "{{ cookiecutter.name.replace('/', '-') }}", "name_docker": "{{ cookiecutter.name_docker }}", diff --git a/nf_core/release.py b/nf_core/release.py index 47674c3779..f916f449c0 100644 --- a/nf_core/release.py +++ b/nf_core/release.py @@ -46,8 +46,8 @@ def make_release(lint_obj, new_version): if 'environment.yml' in lint_obj.files: # Update conda environment.yml - nfconfig_pattern = r"name: nfcore-{}-{}".format(lint_obj.pipeline_name.lower(), current_version.replace('.','\.')) - nfconfig_newstr = "name: nfcore-{}-{}".format(lint_obj.pipeline_name.lower(), new_version) + nfconfig_pattern = r"name: nf-core-{}-{}".format(lint_obj.pipeline_name.lower(), current_version.replace('.','\.')) + nfconfig_newstr = "name: nf-core-{}-{}".format(lint_obj.pipeline_name.lower(), new_version) update_file_version("environment.yml", lint_obj, nfconfig_pattern, nfconfig_newstr) def update_file_version(filename, lint_obj, pattern, newstr): diff --git a/tests/lint_examples/minimal_working_example/Dockerfile b/tests/lint_examples/minimal_working_example/Dockerfile index f2ebb853e0..8029a1f8ae 100644 --- a/tests/lint_examples/minimal_working_example/Dockerfile +++ b/tests/lint_examples/minimal_working_example/Dockerfile @@ -5,4 +5,4 @@ LABEL authors="phil.ewels@scilifelab.se" \ COPY environment.yml / RUN conda env create -f /environment.yml && conda clean -a -ENV PATH /opt/conda/envs/nfcore-tools-0.4/bin:$PATH +ENV PATH /opt/conda/envs/nf-core-tools-0.4/bin:$PATH diff --git a/tests/lint_examples/minimal_working_example/Singularity b/tests/lint_examples/minimal_working_example/Singularity index c9d1026f26..79493f01ea 100644 --- a/tests/lint_examples/minimal_working_example/Singularity +++ b/tests/lint_examples/minimal_working_example/Singularity @@ -7,7 +7,7 @@ Bootstrap:docker VERSION 0.4 %environment - PATH=/opt/conda/envs/nfcore-tools-0.4/bin:$PATH + PATH=/opt/conda/envs/nf-core-tools-0.4/bin:$PATH export PATH %files diff --git a/tests/lint_examples/minimal_working_example/environment.yml b/tests/lint_examples/minimal_working_example/environment.yml index 16cd7fe0e2..0fbbfe1490 100644 --- a/tests/lint_examples/minimal_working_example/environment.yml +++ b/tests/lint_examples/minimal_working_example/environment.yml @@ -1,6 +1,6 @@ # You can use this file to create a conda environment for this pipeline: # conda env create -f environment.yml -name: nfcore-tools-0.4 +name: nf-core-tools-0.4 channels: - defaults - conda-forge diff --git a/tests/test_lint.py b/tests/test_lint.py index ceadbb39c2..77081b6d86 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -309,7 +309,7 @@ def test_conda_dockerfile_pass(self): lint_obj.files = ['environment.yml'] with open(os.path.join(PATH_WORKING_EXAMPLE, 'Dockerfile'), 'r') as fh: lint_obj.dockerfile = fh.read().splitlines() - lint_obj.conda_config['name'] = 'nfcore-tools-0.4' + lint_obj.conda_config['name'] = 'nf-core-tools-0.4' lint_obj.check_conda_dockerfile() expectations = {"failed": 0, "warned": 0, "passed": 1} self.assess_lint_status(lint_obj, **expectations) @@ -318,7 +318,7 @@ def test_conda_dockerfile_fail(self): """ Tests the conda Dockerfile test fails with a bad example """ lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) lint_obj.files = ['environment.yml'] - lint_obj.conda_config['name'] = 'nfcore-tools-0.4' + lint_obj.conda_config['name'] = 'nf-core-tools-0.4' lint_obj.dockerfile = ['fubar'] lint_obj.check_conda_dockerfile() expectations = {"failed": 4, "warned": 0, "passed": 0} @@ -337,7 +337,7 @@ def test_pip_no_version_fail(self): lint_obj.files = ['environment.yml'] lint_obj.pipeline_name = 'tools' lint_obj.config['manifest.pipelineVersion'] = '0.4' - lint_obj.conda_config = {'name': 'nfcore-tools-0.4', 'dependencies': [{'pip': ['multiqc']}]} + lint_obj.conda_config = {'name': 'nf-core-tools-0.4', 'dependencies': [{'pip': ['multiqc']}]} lint_obj.check_conda_env_yaml() expectations = {"failed": 1, "warned": 0, "passed": 1} self.assess_lint_status(lint_obj, **expectations) @@ -348,7 +348,7 @@ def test_pip_package_not_latest_warn(self): lint_obj.files = ['environment.yml'] lint_obj.pipeline_name = 'tools' lint_obj.config['manifest.pipelineVersion'] = '0.4' - lint_obj.conda_config = {'name': 'nfcore-tools-0.4', 'dependencies': [{'pip': ['multiqc=1.4']}]} + lint_obj.conda_config = {'name': 'nf-core-tools-0.4', 'dependencies': [{'pip': ['multiqc=1.4']}]} lint_obj.check_conda_env_yaml() expectations = {"failed": 0, "warned": 1, "passed": 2} self.assess_lint_status(lint_obj, **expectations) @@ -364,7 +364,7 @@ def test_pypi_timeout_warn(self, mock_get): lint_obj.files = ['environment.yml'] lint_obj.pipeline_name = 'tools' lint_obj.config['manifest.pipelineVersion'] = '0.4' - lint_obj.conda_config = {'name': 'nfcore-tools-0.4', 'dependencies': [{'pip': ['multiqc=1.5']}]} + lint_obj.conda_config = {'name': 'nf-core-tools-0.4', 'dependencies': [{'pip': ['multiqc=1.5']}]} lint_obj.check_conda_env_yaml() expectations = {"failed": 0, "warned": 1, "passed": 2} self.assess_lint_status(lint_obj, **expectations) @@ -380,7 +380,7 @@ def test_pypi_connection_error_warn(self, mock_get): lint_obj.files = ['environment.yml'] lint_obj.pipeline_name = 'tools' lint_obj.config['manifest.pipelineVersion'] = '0.4' - lint_obj.conda_config = {'name': 'nfcore-tools-0.4', 'dependencies': [{'pip': ['multiqc=1.5']}]} + lint_obj.conda_config = {'name': 'nf-core-tools-0.4', 'dependencies': [{'pip': ['multiqc=1.5']}]} lint_obj.check_conda_env_yaml() expectations = {"failed": 0, "warned": 1, "passed": 2} self.assess_lint_status(lint_obj, **expectations) @@ -391,7 +391,7 @@ def test_pip_dependency_fail(self): lint_obj.files = ['environment.yml'] lint_obj.pipeline_name = 'tools' lint_obj.config['manifest.pipelineVersion'] = '0.4' - lint_obj.conda_config = {'name': 'nfcore-tools-0.4', 'dependencies': [{'pip': ['notpresent=1.5']}]} + lint_obj.conda_config = {'name': 'nf-core-tools-0.4', 'dependencies': [{'pip': ['notpresent=1.5']}]} lint_obj.check_conda_env_yaml() expectations = {"failed": 1, "warned": 0, "passed": 2} self.assess_lint_status(lint_obj, **expectations) From 1aec57c491268d9fe3d3d7b78b2f4092641e4c76 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 14 Aug 2018 18:11:59 +0200 Subject: [PATCH 74/75] v1.1 version bump & changelog update --- CHANGELOG.md | 21 +++++++++++++++++---- setup.py | 2 +- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8dbdf8fca8..34715e38fe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,23 @@ # nf-core/tools -## v1.1dev +## [v1.1](https://github.com/nf-core/tools/releases/tag/1.1) - 2018-08-14 +Very large release containing lots of work from the first nf-core hackathon, held in SciLifeLab Stockholm. + +* The [Cookiecutter template](https://github.com/nf-core/cookiecutter) has been merged into tools + * The old repo above has been archived + * New pipelines are now created using the command `nf-core create` + * The nf-core template and associated linting are now controlled under the same version system +* Large number of template updates and associated linting changes + * New simplified cookicutter variable usage + * Refactored documentation - simplified and reduced duplication + * Better `manifest` variables instead of `params` for pipeline name and version + * New integrated nextflow version checking + * Updated travis docker pull command to use tagging to allow release tests to pass + * Reverted Docker and Singularity syntax to use `ENV` hack again * Improved Python readme parsing for PyPI -* Update linting and release tools to support new style of Docker & Singularity conda installations -* Merged the cookiecutter template into this tools package -* Added new subcommand to initialise a new pipeline with a local git repo and an initial commit +* Updated Travis tests to check that the correct `dev` branch is being targeted +* New sync tool to automate pipeline updates + * Once initial merges are complete, a nf-core bot account will create PRs for future template updates ## [v1.0.1](https://github.com/nf-core/tools/releases/tag/1.0.1) - 2018-07-18 diff --git a/setup.py b/setup.py index d65bedb773..c649b57786 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages -version = '1.0.1' +version = '1.1' with open('README.md') as f: readme = f.read() From 8e834c3ef9b5800fb4ea22bf6e160304864bf4f3 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 14 Aug 2018 19:29:36 +0200 Subject: [PATCH 75/75] Use config.get to avoid KeyErrors. Don't use --release for nf-core/tools --- nf_core/lint.py | 40 ++++++++++++++++++++-------------------- scripts/nf-core | 2 +- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/nf_core/lint.py b/nf_core/lint.py index 62a5ce8dec..13a2fb93b7 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -325,18 +325,18 @@ def check_nextflow_config(self): # Check that the pipeline name starts with nf-core try: - assert self.config['manifest.name'].strip('\'"').startswith('nf-core/') + assert self.config.get('manifest.name', '').strip('\'"').startswith('nf-core/') except (AssertionError, IndexError): - self.failed.append((4, "Config variable 'manifest.name' did not begin with nf-core/:\n {}".format(self.config['manifest.name'].strip('\'"')))) + self.failed.append((4, "Config variable 'manifest.name' did not begin with nf-core/:\n {}".format(self.config.get('manifest.name', '').strip('\'"')))) else: self.passed.append((4, "Config variable 'manifest.name' began with 'nf-core/'")) - self.pipeline_name = self.config['manifest.name'].strip("'").replace('nf-core/', '') + self.pipeline_name = self.config.get('manifest.name', '').strip("'").replace('nf-core/', '') # Check that the homePage is set to the GitHub URL try: - assert self.config['manifest.homePage'].strip('\'"').startswith('https://github.com/nf-core/') + assert self.config.get('manifest.homePage', '').strip('\'"').startswith('https://github.com/nf-core/') except (AssertionError, IndexError): - self.failed.append((4, "Config variable 'manifest.homePage' did not begin with https://github.com/nf-core/:\n {}".format(self.config['manifest.homePage'].strip('\'"')))) + self.failed.append((4, "Config variable 'manifest.homePage' did not begin with https://github.com/nf-core/:\n {}".format(self.config.get('manifest.homePage', '').strip('\'"')))) else: self.passed.append((4, "Config variable 'manifest.homePage' began with 'https://github.com/nf-core/'")) @@ -349,12 +349,12 @@ def check_nextflow_config(self): # Check that the minimum nextflowVersion is set properly if 'manifest.nextflowVersion' in self.config: - if self.config['manifest.nextflowVersion'].strip('"\'').startswith('>='): + if self.config.get('manifest.nextflowVersion', '').strip('"\'').startswith('>='): self.passed.append((4, "Config variable 'manifest.nextflowVersion' started with >=")) # Save self.minNextflowVersion for convenience self.minNextflowVersion = re.sub(r'[^0-9\.]', '', self.config.get('manifest.nextflowVersion', '')) else: - self.failed.append((4, "Config variable 'manifest.nextflowVersion' did not start with '>=' : '{}'".format(self.config['manifest.nextflowVersion']).strip('"\''))) + self.failed.append((4, "Config variable 'manifest.nextflowVersion' did not start with '>=' : '{}'".format(self.config.get('manifest.nextflowVersion', '')).strip('"\''))) def check_ci_config(self): """ Check that the Travis or Circle CI YAML config is valid @@ -376,8 +376,8 @@ def check_ci_config(self): else: self.passed.append((5, "Continuous integration runs nf-core lint Tests: '{}'".format(fn))) # Check that we're pulling the right docker image - if self.config.get('params.container'): - docker_notag = re.sub(r':(?:[\.\d]+|latest)$', '', self.config['params.container'].strip('"\'')) + if self.config.get('params.container', ''): + docker_notag = re.sub(r':(?:[\.\d]+|latest)$', '', self.config.get('params.container', '').strip('"\'')) docker_pull_cmd = 'docker pull {}'.format(docker_notag) try: assert(docker_pull_cmd in ciconf.get('before_install', [])) @@ -387,7 +387,7 @@ def check_ci_config(self): self.passed.append((5, "CI is pulling the correct docker image: {}".format(docker_pull_cmd))) # Check that we tag the docker image properly - docker_tag_cmd = 'docker tag {} {}'.format(docker_notag, self.config['params.container'].strip('"\'')) + docker_tag_cmd = 'docker tag {} {}'.format(docker_notag, self.config.get('params.container', '').strip('"\'')) try: assert(docker_tag_cmd in ciconf.get('before_install')) except AssertionError: @@ -456,20 +456,20 @@ def check_version_consistency(self): versions = {} # Get the version definitions # Get version from nextflow.config - versions['manifest.pipelineVersion'] = self.config['manifest.pipelineVersion'].strip(' \'"') + versions['manifest.pipelineVersion'] = self.config.get('manifest.pipelineVersion', '').strip(' \'"') # Get version from the docker slug - if self.config.get('params.container') and \ - not ':' in self.config['params.container']: + if self.config.get('params.container', '') and \ + not ':' in self.config.get('params.container', ''): self.failed.append((7, "Docker slug seems not to have " - "a version tag: {}".format(self.config['params.container']))) + "a version tag: {}".format(self.config.get('params.container', '')))) return # Get config container slugs, (if set; one container per workflow) - if self.config.get('params.container'): - versions['params.container'] = self.config['params.container'].strip(' \'"').split(':')[-1] - if self.config.get('process.container'): - versions['process.container'] = self.config['process.container'].strip(' \'"').split(':')[-1] + if self.config.get('params.container', ''): + versions['params.container'] = self.config.get('params.container', '').strip(' \'"').split(':')[-1] + if self.config.get('process.container', ''): + versions['process.container'] = self.config.get('process.container', '').strip(' \'"').split(':')[-1] # Get version from the TRAVIS_TAG env var if os.environ.get('TRAVIS_TAG') and os.environ.get('TRAVIS_REPO_SLUG', '') != 'nf-core/tools': @@ -503,7 +503,7 @@ def check_conda_env_yaml(self): return # Check that the environment name matches the pipeline name - pipeline_version = self.config['manifest.pipelineVersion'].strip(' \'"') + pipeline_version = self.config.get('manifest.pipelineVersion', '').strip(' \'"') expected_env_name = 'nf-core-{}-{}'.format(self.pipeline_name.lower(), pipeline_version) if self.conda_config['name'] != expected_env_name: self.failed.append((8, "Conda environment name is incorrect ({}, should be {})".format(self.conda_config['name'], expected_env_name))) @@ -610,7 +610,7 @@ def check_conda_singularityfile(self): expected_strings = [ 'From:nfcore/base', 'Bootstrap:docker', - 'VERSION {}'.format(self.config['manifest.pipelineVersion'].strip(' \'"')), + 'VERSION {}'.format(self.config.get('manifest.pipelineVersion', '').strip(' \'"')), 'PATH=/opt/conda/envs/{}/bin:$PATH'.format(self.conda_config['name']), 'export PATH', 'environment.yml /', diff --git a/scripts/nf-core b/scripts/nf-core index 7ce861a571..417e76170f 100755 --- a/scripts/nf-core +++ b/scripts/nf-core @@ -35,7 +35,7 @@ def nf_core_cli(verbose): @click.option( '--release', is_flag = True, - default = os.environ.get('TRAVIS_BRANCH') == 'master' and os.environ.get('TRAVIS_REPO_SLUG', '').startswith('nf-core/'), + default = os.environ.get('TRAVIS_BRANCH') == 'master' and os.environ.get('TRAVIS_REPO_SLUG', '').startswith('nf-core/') and not os.environ.get('TRAVIS_REPO_SLUG', 'nf-core/tools'), help = "Execute additional checks for release-ready workflows." ) def lint(pipeline_dir, release):