diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 8f2a1bccf..000000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,45 +0,0 @@ ---- -name: Bug report -about: Report something that is broken or incorrect -labels: bug ---- - - - -## Description of the bug - - - -## Steps to reproduce - -Steps to reproduce the behaviour: - -1. Command line: -2. See error: - -## Expected behaviour - - - -## System - -- Hardware: -- Executor: -- OS: -- Version of nf-core/tools: -- Python version: - -## Nextflow Installation - -- Version: - -## Additional context - - diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 000000000..431cce198 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,42 @@ +name: Bug report +description: Report something that is broken or incorrect +labels: bug +body: + + - type: markdown + attributes: + value: | + Hi there! + + Thanks for telling us about a problem with the nf-core/tools package. + + - type: textarea + id: description + attributes: + label: Description of the bug + description: A clear and concise description of what the bug is. + validations: + required: true + + - type: textarea + id: command_used + attributes: + label: Command used and terminal output + description: Steps to reproduce the behaviour. Please paste the command and output from your terminal. + render: console + placeholder: | + $ nf-core lint ... + + Some output where something broke + + - type: textarea + id: system + attributes: + label: System information + description: | + * Nextflow version _(eg. 21.10.3)_ + * Hardware _(eg. HPC, Desktop, Cloud)_ + * Executor _(eg. slurm, local, awsbatch)_ + * OS _(eg. CentOS Linux, macOS, Linux Mint)_ + * Version of nf-core/tools _(eg. 1.1, 1.5, 1.8.2)_ + * Python version _(eg. 3.7, 3.8)_ diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 2a0317913..a6a4b5193 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,4 +1,3 @@ -blank_issues_enabled: false contact_links: - name: Join nf-core url: https://nf-co.re/join diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 8c4e9237f..000000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,32 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for the nf-core website -labels: enhancement ---- - - - -## Is your feature request related to a problem? Please describe - - - - - -## Describe the solution you'd like - - - -## Describe alternatives you've considered - - - -## Additional context - - diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 000000000..5d4d7b17a --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,11 @@ +name: Feature request +description: Suggest an idea for nf-core/tools +labels: enhancement +body: + - type: textarea + id: description + attributes: + label: Description of feature + description: Please describe your suggestion for a new feature. It might help to describe a problem or use case, plus any alternatives that you have considered. + validations: + required: true diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index cdc354a0e..d71447765 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -1,12 +1,25 @@ name: Create a pipeline and lint it on: [push, pull_request] -# Uncomment if we need an edge release of Nextflow again -# env: NXF_EDGE: 1 +env: + NXF_ANSI_LOG: false + CAPSULE_LOG: none jobs: MakeTestWorkflow: runs-on: ubuntu-latest + env: + NXF_ANSI_LOG: false + strategy: + matrix: + # Nextflow versions + include: + # Test pipeline minimum Nextflow version + - NXF_VER: '21.10.3' + NXF_EDGE: '' + # Test latest edge release of Nextflow + - NXF_VER: '' + NXF_EDGE: '1' steps: - uses: actions/checkout@v2 name: Check out source-code repository @@ -23,7 +36,10 @@ jobs: - name: Install Nextflow env: - CAPSULE_LOG: none + NXF_VER: ${{ matrix.NXF_VER }} + # Uncomment only if the edge release is more recent than the latest stable release + # See https://github.com/nextflow-io/nextflow/issues/2467 + # NXF_EDGE: ${{ matrix.NXF_EDGE }} run: | wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 16c2f1abf..9b13cadd3 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -1,12 +1,25 @@ name: Create a pipeline and test it on: [push, pull_request] -# Uncomment if we need an edge release of Nextflow again -# env: NXF_EDGE: 1 +env: + NXF_ANSI_LOG: false + CAPSULE_LOG: none jobs: RunTestWorkflow: runs-on: ubuntu-latest + env: + NXF_ANSI_LOG: false + strategy: + matrix: + # Nextflow versions + include: + # Test pipeline minimum Nextflow version + - NXF_VER: '21.10.3' + NXF_EDGE: '' + # Test latest edge release of Nextflow + - NXF_VER: '' + NXF_EDGE: '1' steps: - uses: actions/checkout@v2 name: Check out source-code repository @@ -23,7 +36,10 @@ jobs: - name: Install Nextflow env: - CAPSULE_LOG: none + NXF_VER: ${{ matrix.NXF_VER }} + # Uncomment only if the edge release is more recent than the latest stable release + # See https://github.com/nextflow-io/nextflow/issues/2467 + # NXF_EDGE: ${{ matrix.NXF_EDGE }} run: | wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml index 6d8b2457b..e0c26f2f4 100644 --- a/.github/workflows/push_dockerhub_dev.yml +++ b/.github/workflows/push_dockerhub_dev.yml @@ -3,31 +3,28 @@ name: nf-core Docker push (dev) # Runs on nf-core repo releases and push event to 'dev' branch (PR merges) on: push: - branches: - - dev + branches: [dev] jobs: push_dockerhub: name: Push new Docker image to Docker Hub (dev) runs-on: ubuntu-latest # Only run for the nf-core repo, for releases and merged PRs - if: ${{ github.repository == 'nf-core/tools'}} + if: ${{ github.repository == 'nf-core/tools' }} env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKERHUB_PASS: ${{ secrets.DOCKERHUB_PASS }} strategy: - matrix: - image: [base, tools] fail-fast: false steps: - - name: Check out tools code + - name: Check out code uses: actions/checkout@v2 - name: Build new docker image - run: docker build --no-cache . -t nfcore/${{ matrix.image }}:dev -f ${{ matrix.image }}.Dockerfile + run: docker build --no-cache . -t nfcore/tools:dev - name: Push Docker image to DockerHub (dev) run: | echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin - docker push nfcore/${{ matrix.image }}:dev + docker push nfcore/tools:dev diff --git a/.github/workflows/push_dockerhub_release.yml b/.github/workflows/push_dockerhub_release.yml index 7a58d0293..2aebb3440 100644 --- a/.github/workflows/push_dockerhub_release.yml +++ b/.github/workflows/push_dockerhub_release.yml @@ -15,19 +15,17 @@ jobs: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKERHUB_PASS: ${{ secrets.DOCKERHUB_PASS }} strategy: - matrix: - image: [base, tools] fail-fast: false steps: - name: Check out code uses: actions/checkout@v2 - name: Build new docker image - run: docker build --no-cache . -t nfcore/${{ matrix.image }}:latest -f ${{ matrix.image }}.Dockerfile + run: docker build --no-cache . -t nfcore/tools:latest - name: Push Docker image to DockerHub (release) run: | echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin - docker push nfcore/${{ matrix.image }}:latest - docker tag nfcore/${{ matrix.image }}:latest nfcore/${{ matrix.image }}:${{ github.event.release.tag_name }} - docker push nfcore/${{ matrix.image }}:${{ github.event.release.tag_name }} + docker push nfcore/tools:latest + docker tag nfcore/tools:latest nfcore/tools:${{ github.event.release.tag_name }} + docker push nfcore/tools:${{ github.event.release.tag_name }} diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 263a4f565..39aa3acae 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.6, 3.7, 3.8, 3.9] + python-version: ['3.6', '3.7', '3.8', '3.9', '3.10'] steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/python-lint.yml b/.github/workflows/python-lint.yml index cf41eb67d..b75327e05 100644 --- a/.github/workflows/python-lint.yml +++ b/.github/workflows/python-lint.yml @@ -14,7 +14,7 @@ jobs: - uses: actions/checkout@v2 - name: Check code lints with Black - uses: jpetrucciani/black-check@master + uses: psf/black@stable # If the above check failed, post a comment on the PR explaining the failure - name: Post PR comment diff --git a/CHANGELOG.md b/CHANGELOG.md index c49fa0212..93ec4fa4a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,60 @@ # nf-core/tools: Changelog +## [v2.2 - Lead Liger](https://github.com/nf-core/tools/releases/tag/2.2) - [2021-12-14] + +### Template + +* Update repo logos to utilize [GitHub's `#gh-light/dark-mode-only`](https://docs.github.com/en/github/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#specifying-the-theme-an-image-is-shown-to), to switch between logos optimized for light or dark themes. The old repo logos have to be removed (in `docs/images` and `assets/`). +* Deal with authentication with private repositories +* Bump minimun Nextflow version to 21.10.3 +* Convert pipeline template to updated Nextflow DSL2 syntax +* Solve circular import when importing `nf_core.modules.lint` +* Disable cache in `nf_core.utils.fetch_wf_config` while performing `test_wf_use_local_configs`. +* Modify software version channel handling to support multiple software version emissions (e.g. from mulled containers), and multiple software versions. +* Update `dumpsoftwareversion` module to correctly report versions with trailing zeros. +* Remove `params.hostnames` from the pipeline template ([#1304](https://github.com/nf-core/tools/issues/1304)) +* Update `.gitattributes` to mark installed modules and subworkflows as `linguist-generated` ([#1311](https://github.com/nf-core/tools/issues/1311)) +* Adding support for [Julia](https://julialang.org) package environments to `nextflow.config`([#1317](https://github.com/nf-core/tools/pull/1317)) +* New YAML issue templates for pipeline bug reports and feature requests, with a much richer interface ([#1165](https://github.com/nf-core/tools/pull/1165)) +* Update AWS test GitHub Actions to use v2 of [nf-core/tower-action](https://github.com/nf-core/tower-action) +* Post linting comment even when `linting.yml` fails +* Update `CONTRIBUTION.md` bullets to remove points related to `scrape_software_versions.py` +* Update AWS test to set Nextflow version to 21.10.3 + +### General + +* Made lint check for parameters defaults stricter [[#992](https://github.com/nf-core/tools/issues/992)] + * Default values in `nextflow.config` must match the defaults given in the schema (anything with `{` in, or in `main.nf` is ignored) + * Defaults in `nextflow.config` must now match the variable _type_ specified in the schema + * If you want the parameter to not have a default value, use `null` + * Strings set to `false` or an empty string in `nextflow.config` will now fail linting +* Bump minimun Nextflow version to 21.10.3 +* Changed `questionary` `ask()` to `unsafe_ask()` to not catch `KeyboardInterupts` ([#1237](https://github.com/nf-core/tools/issues/1237)) +* Fixed bug in `nf-core launch` due to revisions specified with `-r` not being added to nextflow command. ([#1246](https://github.com/nf-core/tools/issues/1246)) +* Update regex in `readme` test of `nf-core lint` to agree with the pipeline template ([#1260](https://github.com/nf-core/tools/issues/1260)) +* Update 'fix' message in `nf-core lint` to conform to the current command line options. ([#1259](https://github.com/nf-core/tools/issues/1259)) +* Fixed bug in `nf-core list` when `NXF_HOME` is set +* Run CI test used to create and lint/run the pipeline template with minimum and latest edge release of NF ([#1304](https://github.com/nf-core/tools/issues/1304)) +* New YAML issue templates for tools bug reports and feature requests, with a much richer interface ([#1165](https://github.com/nf-core/tools/pull/1165)) +* Handle synax errors in Nextflow config nicely when running `nf-core schema build` ([#1267](https://github.com/nf-core/tools/pull/1267)) +* Erase temporary files and folders while performing Python tests (pytest) +* Remove base `Dockerfile` used for DSL1 pipeline container builds +* Run tests with Python 3.10 +* [#1363](https://github.com/nf-core/tools/pull/1363) Fix tools CI workflow nextflow versions. + +### Modules + +* Fixed typo in `module_utils.py`. +* Fixed failing lint test when process section was missing from module. Also added the local failing tests to the warned section of the output table. ([#1235](https://github.com/nf-core/tools/issues/1235)) +* Added `--diff` flag to `nf-core modules update` which shows the diff between the installed files and the versions +* Update `nf-core modules create` help texts which were not changed with the introduction of the `--dir` flag +* Check if README is from modules repo +* Update module template to DSL2 v2.0 (remove `functions.nf` from modules template and updating `main.nf` ([#1289](https://github.com/nf-core/tools/pull/)) +* Substitute get process/module name custom functions in module `main.nf` using template replacement ([#1284](https://github.com/nf-core/tools/issues/1284)) +* Linting now fails instead of warning if a local copy of a module does not match the remote ([#1313](https://github.com/nf-core/tools/issues/1313)) +* Check test YML file for md5sums corresponding to empty files ([#1302](https://github.com/nf-core/tools/issues/1302)) +* Exit with an error if empty files are found when generating the test YAML file ([#1302](https://github.com/nf-core/tools/issues/1302)) + ## [v2.1 - Zinc Zebra](https://github.com/nf-core/tools/releases/tag/2.1) - [2021-07-27] ### Template diff --git a/tools.Dockerfile b/Dockerfile similarity index 100% rename from tools.Dockerfile rename to Dockerfile diff --git a/MANIFEST.in b/MANIFEST.in index 59d5a1a4e..bb3c5d6da 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,5 @@ include LICENSE include README.md -recursive-include nf_core * +graft nf_core/module-template +graft nf_core/pipeline-template +include requirements.txt \ No newline at end of file diff --git a/README.md b/README.md index 3a8fbc47a..65fb66b00 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# ![nf-core/tools](docs/images/nfcore-tools_logo.png) +# ![nf-core/tools](docs/images/nfcore-tools_logo_light.png#gh-light-mode-only) ![nf-core/tools](docs/images/nfcore-tools_logo_dark.png#gh-dark-mode-only) [![Python tests](https://github.com/nf-core/tools/workflows/Python%20tests/badge.svg?branch=master&event=push)](https://github.com/nf-core/tools/actions?query=workflow%3A%22Python+tests%22+branch%3Amaster) [![codecov](https://codecov.io/gh/nf-core/tools/branch/master/graph/badge.svg)](https://codecov.io/gh/nf-core/tools) @@ -194,7 +194,7 @@ $ nf-core list | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 ┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━┓ ┃ Pipeline Name ┃ Stars ┃ Latest Release ┃ Released ┃ Last Pulled ┃ Have latest release? ┃ @@ -219,7 +219,7 @@ $ nf-core list rna rna-seq | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 ┏━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┓ ┃ Pipeline Name ┃ Stars ┃ Latest Release ┃ Released ┃ Last Pulled ┃ Have latest release? ┃ @@ -248,7 +248,7 @@ $ nf-core list -s stars | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 ┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━┓ ┃ Pipeline Name ┃ Stars ┃ Latest Release ┃ Released ┃ Last Pulled ┃ Have latest release? ┃ @@ -292,7 +292,7 @@ $ nf-core launch rnaseq | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 INFO This tool ignores any pipeline parameter defaults overwritten by Nextflow config files or profiles @@ -372,7 +372,7 @@ $ nf-core download | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 Specify the name of a nf-core pipeline or a GitHub repository name (user/repo). @@ -504,7 +504,7 @@ $ nf-core licences rnaseq | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 1.10 + nf-core/tools version 2.2 INFO Fetching licence information for 25 tools INFO Warning: This tool only prints licence information for the software tools packaged using conda. @@ -558,7 +558,7 @@ $ nf-core create | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 Workflow Name: nextbigthing Description: This pipeline analyses data from the next big 'omics technique @@ -603,7 +603,7 @@ $ nf-core lint |\ | |__ __ / ` / \ |__) |__ } { | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 INFO Testing pipeline: . ╭─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ @@ -713,7 +713,7 @@ $ nf-core schema validate rnaseq nf-params.json | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 @@ -745,7 +745,7 @@ $ nf-core schema build nf-core-testpipeline | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 INFO [✓] Default parameters look valid INFO [✓] Pipeline schema looks valid (found 25 params) @@ -784,7 +784,7 @@ $ nf-core schema lint nextflow_schema.json | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 ERROR [✗] Pipeline schema does not follow nf-core specs: Definition subschema 'input_output_options' not included in schema 'allOf' @@ -800,14 +800,14 @@ Usage is `nf-core bump-version `, eg: ```console $ cd path/to/my_pipeline -$ nf-core bump-version . 1.7 +$ nf-core bump-version 1.7 ,--./,-. ___ __ __ __ ___ /,-._.--~\ |\ | |__ __ / ` / \ |__) |__ } { | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 @@ -862,7 +862,7 @@ $ nf-core sync my_pipeline/ | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 @@ -902,6 +902,8 @@ This allows multiple pipelines to use the same code for share tools and gives a The nf-core DSL2 modules repository is at +### Custom remote modules + The modules supercommand comes with two flags for specifying a custom remote: * `--github-repository `: Specify the repository from which the modules should be fetched. Defaults to `nf-core/modules`. @@ -909,6 +911,34 @@ The modules supercommand comes with two flags for specifying a custom remote: Note that a custom remote must follow a similar directory structure to that of `nf-core/moduleś` for the `nf-core modules` commands to work properly. +### Private remote modules + +In order to get access to your private modules repo, you need to create +the `~/.config/gh/hosts.yml` file, which is the same file required by +[GitHub CLI](https://cli.github.com/) to deal with private repositories. +Such file is structured as follow: + +```conf +github.com: + oauth_token: + user: + git_protocol: +``` + +The easiest way to create this configuration file is through *GitHub CLI*: follow +its [installation instructions](https://cli.github.com/manual/installation) +and then call: + +```bash +gh auth login +``` + +After that, you will be able to list and install your private modules without +providing your github credentials through command line, by using `--github-repository` +and `--branch` options properly. +See the documentation on [gh auth login](https://cli.github.com/manual/gh_auth_login>) +to get more information. + ### List modules The `nf-core modules list` command provides the subcommands `remote` and `local` for listing modules installed in a remote repository and in the local pipeline respectively. Both subcommands come with the `--key ` option for filtering the modules by keywords. @@ -926,7 +956,7 @@ $ nf-core modules list remote | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 INFO Modules available from nf-core/modules (master) @@ -957,7 +987,7 @@ $ nf-core modules list local | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 INFO Modules installed in '.': @@ -983,7 +1013,7 @@ $ nf-core modules install | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 ? Tool name: cat/fastq INFO Installing cat/fastq @@ -1010,7 +1040,7 @@ $ nf-core modules update | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 ? Tool name: fastqc INFO Updating 'nf-core/modules/fastqc' @@ -1019,11 +1049,12 @@ INFO Downloaded 3 files to ./modules/nf-core/modules/fastqc You can pass the module name as an optional argument to `nf-core modules update` instead of using the cli prompt, eg: `nf-core modules update fastqc`. You can specify a pipeline directory other than the current working directory by using the `--dir `. -There are four additional flags that you can use with this command: +There are five additional flags that you can use with this command: * `--force`: Reinstall module even if it appears to be up to date * `--prompt`: Select the module version using a cli prompt. * `--sha `: Install the module at a specific commit from the `nf-core/modules` repository. +* `--diff`: Show the diff between the installed files and the new version before installing. * `--all`: Use this flag to run the command on all modules in the pipeline. If you don't want to update certain modules or want to update them to specific versions, you can make use of the `.nf-core.yml` configuration file. For example, you can prevent the `star/align` module installed from `nf-core/modules` from being updated by adding the following to the `.nf-core.yml` file: @@ -1071,7 +1102,7 @@ $ nf-core modules remove | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 ? Tool name: star/align INFO Removing star/align @@ -1102,7 +1133,7 @@ $ nf-core modules create | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 INFO Press enter to use default values (shown in brackets) or type your own responses. ctrl+click underlined text to open links. @@ -1117,12 +1148,11 @@ INFO Where applicable all sample-specific information e.g. 'id', 'single_end Groovy Map called 'meta'. This information may not be required in some instances, for example indexing reference genome files. Will the module require a meta map of sample information? (yes/no) [y/n] (y): y INFO Created / edited following files: - ./software/star/align/functions.nf ./software/star/align/main.nf ./software/star/align/meta.yml ./tests/software/star/align/main.nf ./tests/software/star/align/test.yml - ./tests/config/pytest_software.yml + ./tests/config/pytest_modules.yml ``` ### Create a module test config file @@ -1140,7 +1170,7 @@ $ nf-core modules create-test-yml | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 INFO Press enter to use default values (shown in brackets) or type your own responses @@ -1186,7 +1216,7 @@ $ nf-core modules lint | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 ? Lint all modules or a single named module? Named module ? Tool name: star/align @@ -1225,7 +1255,7 @@ $ nf-core modules bump-versions -d modules | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.2 diff --git a/base.Dockerfile b/base.Dockerfile deleted file mode 100644 index 8d3fe9941..000000000 --- a/base.Dockerfile +++ /dev/null @@ -1,13 +0,0 @@ -FROM continuumio/miniconda3:4.9.2 -LABEL authors="phil.ewels@scilifelab.se,alexander.peltzer@boehringer-ingelheim.com" \ - description="Docker image containing base requirements for the nfcore pipelines" - -# Install procps so that Nextflow can poll CPU usage and -# deep clean the apt cache to reduce image/layer size -RUN apt-get update \ - && apt-get install -y procps \ - && apt-get clean -y && rm -rf /var/lib/apt/lists/* - -# Instruct R processes to use these empty files instead of clashing with a local version -RUN touch .Rprofile -RUN touch .Renviron diff --git a/docs/images/nfcore-tools_logo.png b/docs/images/nfcore-tools_logo.png deleted file mode 100644 index e45813591..000000000 Binary files a/docs/images/nfcore-tools_logo.png and /dev/null differ diff --git a/docs/images/nfcore-tools_logo_dark.png b/docs/images/nfcore-tools_logo_dark.png new file mode 100644 index 000000000..1b9cc02b1 Binary files /dev/null and b/docs/images/nfcore-tools_logo_dark.png differ diff --git a/docs/images/nfcore-tools_logo_light.png b/docs/images/nfcore-tools_logo_light.png new file mode 100644 index 000000000..cc4ccea1c Binary files /dev/null and b/docs/images/nfcore-tools_logo_light.png differ diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 483d70978..a9500509a 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -449,14 +449,19 @@ def install(ctx, tool, dir, prompt, force, sha): @click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the module") @click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA") @click.option("-a", "--all", is_flag=True, default=False, help="Update all modules installed in pipeline") -def update(ctx, tool, dir, force, prompt, sha, all): +@click.option( + "-c", "--diff", is_flag=True, default=False, help="Show differences between module versions before updating" +) +def update(ctx, tool, dir, force, prompt, sha, all, diff): """ Update DSL2 modules within a pipeline. Fetches and updates module files from a remote repo e.g. nf-core/modules. """ try: - module_install = nf_core.modules.ModuleUpdate(dir, force=force, prompt=prompt, sha=sha, update_all=all) + module_install = nf_core.modules.ModuleUpdate( + dir, force=force, prompt=prompt, sha=sha, update_all=all, diff=diff + ) module_install.modules_repo = ctx.obj["modules_repo_obj"] exit_status = module_install.update(tool) if not exit_status and all: @@ -497,10 +502,10 @@ def create_module(ctx, tool, dir, author, label, meta, no_meta, force, conda_nam """ Create a new DSL2 module from the nf-core template. - If is a pipeline, this function creates a file called + If the specified directory is a pipeline, this function creates a file called 'modules/local/tool_subtool.nf' - If is a clone of nf-core/modules, it creates or modifies files + If the specified directory is a clone of nf-core/modules, it creates or modifies files in 'modules/', 'tests/modules' and 'tests/config/pytest_modules.yml' """ # Combine two bool flags into one variable @@ -663,7 +668,7 @@ def build(dir, no_prompts, web_only, url): schema_obj = nf_core.schema.PipelineSchema() if schema_obj.build_schema(dir, no_prompts, web_only, url) is False: sys.exit(1) - except UserWarning as e: + except (UserWarning, AssertionError) as e: log.error(e) sys.exit(1) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 29f3a39f9..94bc17b66 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -81,9 +81,9 @@ def bump_nextflow_version(pipeline_obj, new_version): pipeline_obj, [ ( - # example: nxf_ver: ['20.04.0', ''] - r"nxf_ver: \[[\'\"]{}[\'\"], [\'\"][\'\"]\]".format(current_version.replace(".", r"\.")), - "nxf_ver: ['{}', '']".format(new_version), + # example: - NXF_VER: '20.04.0' + r"- NXF_VER: [\'\"]{}[\'\"]".format(current_version.replace(".", r"\.")), + "- NXF_VER: '{}'".format(new_version), ) ], ) @@ -97,14 +97,11 @@ def bump_nextflow_version(pipeline_obj, new_version): r"nextflow%20DSL2-%E2%89%A5{}-23aa62.svg".format(current_version.replace(".", r"\.")), "nextflow%20DSL2-%E2%89%A5{}-23aa62.svg".format(new_version), ), - ( - # Replace links to 'nf-co.re' installation page with links to Nextflow installation page - r"https://nf-co.re/usage/installation", - "https://www.nextflow.io/docs/latest/getstarted.html#installation", - ), ( # example: 1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=20.04.0`) - r"1\.\s*Install\s*\[`Nextflow`\]\(y\)\s*\(`>={}`\)".format(current_version.replace(".", r"\.")), + r"1\.\s*Install\s*\[`Nextflow`\]\(https:\/\/www\.nextflow\.io\/docs\/latest\/getstarted\.html#installation\)\s*\(`>={}`\)".format( + current_version.replace(".", r"\.") + ), "1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>={}`)".format( new_version ), diff --git a/nf_core/create.py b/nf_core/create.py index de30f8d3d..02dd50a1b 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -145,23 +145,22 @@ def render_template(self): def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" - logo_url = f"https://nf-co.re/logo/{self.short_name}" + logo_url = f"https://nf-co.re/logo/{self.short_name}?theme=light" log.debug(f"Fetching logo from {logo_url}") - email_logo_path = f"{self.outdir}/assets/{self.name_noslash}_logo.png" + email_logo_path = f"{self.outdir}/assets/{self.name_noslash}_logo_light.png" os.makedirs(os.path.dirname(email_logo_path), exist_ok=True) log.debug(f"Writing logo to '{email_logo_path}'") - r = requests.get(f"{logo_url}?w=400") + r = requests.get(f"{logo_url}&w=400") with open(email_logo_path, "wb") as fh: fh.write(r.content) - - readme_logo_path = f"{self.outdir}/docs/images/{self.name_noslash}_logo.png" - - log.debug(f"Writing logo to '{readme_logo_path}'") - os.makedirs(os.path.dirname(readme_logo_path), exist_ok=True) - r = requests.get(f"{logo_url}?w=600") - with open(readme_logo_path, "wb") as fh: - fh.write(r.content) + for theme in ["dark", "light"]: + readme_logo_path = f"{self.outdir}/docs/images/{self.name_noslash}_logo_{theme}.png" + log.debug(f"Writing logo to '{readme_logo_path}'") + os.makedirs(os.path.dirname(readme_logo_path), exist_ok=True) + r = requests.get(f"{logo_url}?w=600&theme={theme}") + with open(readme_logo_path, "wb") as fh: + fh.write(r.content) def git_init_pipeline(self): """Initialises the new pipeline as a Git repository and submits first commit.""" diff --git a/nf_core/download.py b/nf_core/download.py index bb8356495..7f03459cf 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -396,6 +396,7 @@ def wf_use_local_configs(self): nfconfig = nfconfig_fh.read() # Replace the target string + log.debug(f"Replacing '{find_str}' with '{repl_str}'") nfconfig = nfconfig.replace(find_str, repl_str) # Append the singularity.cacheDir to the end if we need it @@ -407,6 +408,7 @@ def wf_use_local_configs(self): ) # Write the file out again + log.debug(f"Updating '{nfconfig_fn}'") with open(nfconfig_fn, "w") as nfconfig_fh: nfconfig_fh.write(nfconfig) diff --git a/nf_core/launch.py b/nf_core/launch.py index 36a21b076..3bf257ff0 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -209,7 +209,7 @@ def get_pipeline_schema(self): return False self.pipeline_revision = nf_core.utils.prompt_pipeline_release_branch(wf_releases, wf_branches) - self.nextflow_cmd += " -r {}".format(self.pipeline_revision) + self.nextflow_cmd += " -r {}".format(self.pipeline_revision) # Get schema from name, load it and lint it try: diff --git a/nf_core/lint/__init__.py b/nf_core/lint/__init__.py index b61765b16..483872692 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/lint/__init__.py @@ -20,8 +20,8 @@ import nf_core.utils import nf_core.lint_utils +import nf_core.modules.lint from nf_core.lint_utils import console -from nf_core.modules.lint import ModuleLint log = logging.getLogger(__name__) @@ -44,7 +44,9 @@ def run_linting( # Verify that the requested tests exist if key: - all_tests = set(PipelineLint._get_all_lint_tests(release_mode)).union(set(ModuleLint._get_all_lint_tests())) + all_tests = set(PipelineLint._get_all_lint_tests(release_mode)).union( + set(nf_core.modules.lint.ModuleLint._get_all_lint_tests()) + ) bad_keys = [k for k in key if k not in all_tests] if len(bad_keys) > 0: raise AssertionError( @@ -66,7 +68,7 @@ def run_linting( lint_obj._list_files() # Create the modules lint object - module_lint_obj = ModuleLint(pipeline_dir) + module_lint_obj = nf_core.modules.lint.ModuleLint(pipeline_dir) # Verify that the pipeline is correctly configured try: @@ -77,7 +79,7 @@ def run_linting( # Run only the tests we want if key: # Select only the module lint tests - module_lint_tests = list(set(key).intersection(set(ModuleLint._get_all_lint_tests()))) + module_lint_tests = list(set(key).intersection(set(nf_core.modules.lint.ModuleLint._get_all_lint_tests()))) else: # If no key is supplied, run the default modules tests module_lint_tests = ("module_changes", "module_version") diff --git a/nf_core/lint/actions_ci.py b/nf_core/lint/actions_ci.py index fd138ed0a..e9adf706b 100644 --- a/nf_core/lint/actions_ci.py +++ b/nf_core/lint/actions_ci.py @@ -130,12 +130,14 @@ def actions_ci(self): # Check that we are testing the minimum nextflow version try: - matrix = ciwf["jobs"]["test"]["strategy"]["matrix"]["nxf_ver"] - assert any([self.minNextflowVersion in matrix]) + matrix = ciwf["jobs"]["test"]["strategy"]["matrix"]["include"] + assert any([i["NXF_VER"] == self.minNextflowVersion for i in matrix]) except (KeyError, TypeError): failed.append("'.github/workflows/ci.yml' does not check minimum NF version") except AssertionError: - failed.append("Minimum NF version in '.github/workflows/ci.yml' different to pipeline's manifest") + failed.append( + f"Minimum pipeline NF version '{self.minNextflowVersion}' is not tested in '.github/workflows/ci.yml'" + ) else: passed.append("'.github/workflows/ci.yml' checks minimum NF version") diff --git a/nf_core/lint/files_exist.py b/nf_core/lint/files_exist.py index 7b97aa3fb..23759b898 100644 --- a/nf_core/lint/files_exist.py +++ b/nf_core/lint/files_exist.py @@ -23,9 +23,9 @@ def files_exist(self): .markdownlint.yml .github/.dockstore.yml .github/CONTRIBUTING.md - .github/ISSUE_TEMPLATE/bug_report.md + .github/ISSUE_TEMPLATE/bug_report.yml .github/ISSUE_TEMPLATE/config.yml - .github/ISSUE_TEMPLATE/feature_request.md + .github/ISSUE_TEMPLATE/feature_request.yml .github/PULL_REQUEST_TEMPLATE.md .github/workflows/branch.yml .github/workflows/ci.yml @@ -34,16 +34,16 @@ def files_exist(self): [LICENSE, LICENSE.md, LICENCE, LICENCE.md] # NB: British / American spelling assets/email_template.html assets/email_template.txt - assets/nf-core-PIPELINE_logo.png + assets/nf-core-PIPELINE_logo_light.png assets/sendmail_template.txt - bin/scrape_software_versions.py conf/modules.config conf/test.config conf/test_full.config CHANGELOG.md CITATIONS.md CODE_OF_CONDUCT.md - docs/images/nf-core-PIPELINE_logo.png + docs/images/nf-core-PIPELINE_logo_light.png + docs/images/nf-core-PIPELINE_logo_dark.png docs/output.md docs/README.md docs/usage.md @@ -52,7 +52,6 @@ def files_exist(self): lib/NfcoreTemplate.groovy lib/Utils.groovy lib/WorkflowMain.groovy - modules/local/get_software_versions.nf nextflow_schema.json nextflow.config README.md @@ -78,6 +77,8 @@ def files_exist(self): bin/markdown_to_html.r conf/aws.config .github/workflows/push_dockerhub.yml + .github/ISSUE_TEMPLATE/bug_report.md + docs/images/nf-core-PIPELINE_logo.png Files that *should not* be present: @@ -109,9 +110,9 @@ def files_exist(self): ["README.md"], [os.path.join(".github", ".dockstore.yml")], [os.path.join(".github", "CONTRIBUTING.md")], - [os.path.join(".github", "ISSUE_TEMPLATE", "bug_report.md")], + [os.path.join(".github", "ISSUE_TEMPLATE", "bug_report.yml")], [os.path.join(".github", "ISSUE_TEMPLATE", "config.yml")], - [os.path.join(".github", "ISSUE_TEMPLATE", "feature_request.md")], + [os.path.join(".github", "ISSUE_TEMPLATE", "feature_request.yml")], [os.path.join(".github", "PULL_REQUEST_TEMPLATE.md")], [os.path.join(".github", "workflows", "branch.yml")], [os.path.join(".github", "workflows", "ci.yml")], @@ -120,12 +121,12 @@ def files_exist(self): [os.path.join("assets", "email_template.html")], [os.path.join("assets", "email_template.txt")], [os.path.join("assets", "sendmail_template.txt")], - [os.path.join("assets", f"nf-core-{short_name}_logo.png")], - [os.path.join("bin", "scrape_software_versions.py")], + [os.path.join("assets", f"nf-core-{short_name}_logo_light.png")], [os.path.join("conf", "modules.config")], [os.path.join("conf", "test.config")], [os.path.join("conf", "test_full.config")], - [os.path.join("docs", "images", f"nf-core-{short_name}_logo.png")], + [os.path.join("docs", "images", f"nf-core-{short_name}_logo_light.png")], + [os.path.join("docs", "images", f"nf-core-{short_name}_logo_dark.png")], [os.path.join("docs", "output.md")], [os.path.join("docs", "README.md")], [os.path.join("docs", "README.md")], @@ -135,7 +136,6 @@ def files_exist(self): [os.path.join("lib", "NfcoreTemplate.groovy")], [os.path.join("lib", "Utils.groovy")], [os.path.join("lib", "WorkflowMain.groovy")], - [os.path.join("modules", "local", "get_software_versions.nf")], ] files_warn = [ @@ -156,6 +156,9 @@ def files_exist(self): os.path.join("bin", "markdown_to_html.r"), os.path.join("conf", "aws.config"), os.path.join(".github", "workflows", "push_dockerhub.yml"), + os.path.join(".github", "ISSUE_TEMPLATE", "bug_report.md"), + os.path.join(".github", "ISSUE_TEMPLATE", "feature_request.md"), + os.path.join("docs", "images", f"nf-core-{short_name}_logo.png"), ] files_warn_ifexists = [".travis.yml"] diff --git a/nf_core/lint/files_unchanged.py b/nf_core/lint/files_unchanged.py index 37728b8b0..229f5bb94 100644 --- a/nf_core/lint/files_unchanged.py +++ b/nf_core/lint/files_unchanged.py @@ -22,19 +22,20 @@ def files_unchanged(self): .markdownlint.yml .github/.dockstore.yml .github/CONTRIBUTING.md - .github/ISSUE_TEMPLATE/bug_report.md + .github/ISSUE_TEMPLATE/bug_report.yml .github/ISSUE_TEMPLATE/config.yml - .github/ISSUE_TEMPLATE/feature_request.md + .github/ISSUE_TEMPLATE/feature_request.yml .github/PULL_REQUEST_TEMPLATE.md .github/workflows/branch.yml .github/workflows/linting_comment.yml .github/workflows/linting.yml assets/email_template.html assets/email_template.txt - assets/nf-core-PIPELINE_logo.png + assets/nf-core-PIPELINE_logo_light.png assets/sendmail_template.txt CODE_OF_CONDUCT.md - docs/images/nf-core-PIPELINE_logo.png + docs/images/nf-core-PIPELINE_logo_light.png + docs/images/nf-core-PIPELINE_logo_dark.png docs/README.md' lib/nfcore_external_java_deps.jar lib/NfcoreSchema.groovy @@ -81,9 +82,9 @@ def files_unchanged(self): ["LICENSE", "LICENSE.md", "LICENCE", "LICENCE.md"], # NB: British / American spelling [os.path.join(".github", ".dockstore.yml")], [os.path.join(".github", "CONTRIBUTING.md")], - [os.path.join(".github", "ISSUE_TEMPLATE", "bug_report.md")], + [os.path.join(".github", "ISSUE_TEMPLATE", "bug_report.yml")], [os.path.join(".github", "ISSUE_TEMPLATE", "config.yml")], - [os.path.join(".github", "ISSUE_TEMPLATE", "feature_request.md")], + [os.path.join(".github", "ISSUE_TEMPLATE", "feature_request.yml")], [os.path.join(".github", "PULL_REQUEST_TEMPLATE.md")], [os.path.join(".github", "workflows", "branch.yml")], [os.path.join(".github", "workflows", "linting_comment.yml")], @@ -91,9 +92,9 @@ def files_unchanged(self): [os.path.join("assets", "email_template.html")], [os.path.join("assets", "email_template.txt")], [os.path.join("assets", "sendmail_template.txt")], - [os.path.join("assets", f"nf-core-{short_name}_logo.png")], - [os.path.join("bin", "scrape_software_versions.py")], - [os.path.join("docs", "images", f"nf-core-{short_name}_logo.png")], + [os.path.join("assets", f"nf-core-{short_name}_logo_light.png")], + [os.path.join("docs", "images", f"nf-core-{short_name}_logo_light.png")], + [os.path.join("docs", "images", f"nf-core-{short_name}_logo_dark.png")], [os.path.join("docs", "README.md")], [os.path.join("lib", "nfcore_external_java_deps.jar")], [os.path.join("lib", "NfcoreSchema.groovy")], @@ -108,7 +109,9 @@ def files_unchanged(self): logging.getLogger("nf_core.create").setLevel(logging.ERROR) # Generate a new pipeline with nf-core create that we can compare to - test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-{}".format(short_name)) + tmp_dir = tempfile.mkdtemp() + + test_pipeline_dir = os.path.join(tmp_dir, "nf-core-{}".format(short_name)) create_obj = nf_core.create.PipelineCreate( self.nf_config["manifest.name"].strip("\"'"), self.nf_config["manifest.description"].strip("\"'"), @@ -193,4 +196,7 @@ def _tf(file_path): except FileNotFoundError: pass + # cleaning up temporary dir + shutil.rmtree(tmp_dir) + return {"passed": passed, "failed": failed, "ignored": ignored, "fixed": fixed, "could_fix": could_fix} diff --git a/nf_core/lint/readme.py b/nf_core/lint/readme.py index 7031bb05f..8df6155c5 100644 --- a/nf_core/lint/readme.py +++ b/nf_core/lint/readme.py @@ -38,7 +38,7 @@ def readme(self): content = fh.read() # Check that there is a readme badge showing the minimum required version of Nextflow - # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.04.0-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) + # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) # and that it has the correct version nf_badge_re = r"\[!\[Nextflow\]\(https://img\.shields\.io/badge/nextflow%20DSL2-%E2%89%A5([\d\.]+)-23aa62\.svg\?labelColor=000000\)\]\(https://www\.nextflow\.io/\)" match = re.search(nf_badge_re, content) @@ -62,8 +62,8 @@ def readme(self): warned.append("README did not have a Nextflow minimum version badge.") # Check that the minimum version mentioned in the quick start section is consistent - # Looking for: "1. Install [`Nextflow`](https://nf-co.re/usage/installation) (`>=21.04.0`)" - nf_version_re = r"1\.\s*Install\s*\[`Nextflow`\]\(https://nf-co.re/usage/installation\)\s*\(`>=(\d*\.\d*\.\d*)`\)" + # Looking for: "1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.10.3`)" + nf_version_re = r"1\.\s*Install\s*\[`Nextflow`\]\(https://www.nextflow.io/docs/latest/getstarted.html#installation\)\s*\(`>=(\d*\.\d*\.\d*)`\)" match = re.search(nf_version_re, content) if match: nf_quickstart_version = match.group(1) diff --git a/nf_core/lint/schema_params.py b/nf_core/lint/schema_params.py index 20c962c22..436e8caf5 100644 --- a/nf_core/lint/schema_params.py +++ b/nf_core/lint/schema_params.py @@ -29,6 +29,9 @@ def schema_params(self): # Add schema params found in the config but not the schema added_params = self.schema_obj.add_schema_found_configs() + # Invalid default parameters in nextflow.config + invalid_config_default_params = self.schema_obj.invalid_nextflow_config_default_parameters + if len(removed_params) > 0: for param in removed_params: warned.append("Schema param `{}` not found from nextflow config".format(param)) @@ -40,4 +43,8 @@ def schema_params(self): if len(removed_params) == 0 and len(added_params) == 0: passed.append("Schema matched params returned from nextflow config") + if len(invalid_config_default_params) > 0: + for param, msg in invalid_config_default_params.items(): + failed.append(f"Default value for param `{param}` invalid: {msg}") + return {"passed": passed, "warned": warned, "failed": failed} diff --git a/nf_core/lint_utils.py b/nf_core/lint_utils.py index 6e2084922..757a244ed 100644 --- a/nf_core/lint_utils.py +++ b/nf_core/lint_utils.py @@ -38,7 +38,9 @@ def print_fixes(lint_obj, module_lint_obj): """Prints available and applied fixes""" if len(lint_obj.could_fix): - fix_cmd = "nf-core lint {} --fix {}".format(lint_obj.wf_path, " --fix ".join(lint_obj.could_fix)) + fix_cmd = "nf-core lint {} --fix {}".format( + "" if lint_obj.wf_path == "." else f"--dir {lint_obj.wf_path}", " --fix ".join(lint_obj.could_fix) + ) console.print( f"\nTip: Some of these linting errors can automatically be resolved with the following command:\n\n[blue] {fix_cmd}\n" ) diff --git a/nf_core/list.py b/nf_core/list.py index 4d8b31f07..5a94253d0 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -334,7 +334,7 @@ def get_local_nf_workflow_details(self): if len(os.environ.get("NXF_ASSETS", "")) > 0: nf_wfdir = os.path.join(os.environ.get("NXF_ASSETS"), self.full_name) elif len(os.environ.get("NXF_HOME", "")) > 0: - nf_wfdir = os.path.join(os.environ.get("NXF_HOME"), "assets") + nf_wfdir = os.path.join(os.environ.get("NXF_HOME"), "assets", self.full_name) else: nf_wfdir = os.path.join(os.getenv("HOME"), ".nextflow", "assets", self.full_name) if os.path.isdir(nf_wfdir): diff --git a/nf_core/module-template/modules/functions.nf b/nf_core/module-template/modules/functions.nf deleted file mode 100644 index da9da093d..000000000 --- a/nf_core/module-template/modules/functions.nf +++ /dev/null @@ -1,68 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } - } -} diff --git a/nf_core/module-template/modules/main.nf b/nf_core/module-template/modules/main.nf index 6e4dcde63..d152e970b 100644 --- a/nf_core/module-template/modules/main.nf +++ b/nf_core/module-template/modules/main.nf @@ -1,14 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' - // TODO nf-core: If in doubt look at other nf-core/modules to see how we are doing things! :) -// https://github.com/nf-core/modules/tree/master/software +// https://github.com/nf-core/modules/tree/master/modules // You can also ask for help via your pull request or on the #modules channel on the nf-core Slack workspace: // https://nf-co.re/join - // TODO nf-core: A module file SHOULD only define input and output files as command-line parameters. -// All other parameters MUST be provided as a string i.e. "options.args" -// where "params.options" is a Groovy Map that MUST be provided via the addParams section of the including workflow. +// All other parameters MUST be provided using the "task.ext" directive, see here: +// https://www.nextflow.io/docs/latest/process.html#ext +// where "task.ext" is a string. // Any parameters that need to be evaluated in the context of a particular sample // e.g. single-end/paired-end data MUST also be defined and evaluated appropriately. // TODO nf-core: Software that can be piped together SHOULD be added to separate module files @@ -18,32 +15,24 @@ include { initOptions; saveFiles; getSoftwareName } from './functions' // TODO nf-core: Optional inputs are not currently supported by Nextflow. However, using an empty // list (`[]`) instead of a file can be used to work around this issue. -params.options = [:] -options = initOptions(params.options) - process {{ tool_name_underscore|upper }} { tag {{ '"$meta.id"' if has_meta else "'$bam'" }} label '{{ process_label }}' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:{{ 'meta' if has_meta else "[:]" }}, publish_by_meta:{{ "['id']" if has_meta else "[]" }}) } - + // TODO nf-core: List required Conda package(s). // Software MUST be pinned to channel (i.e. "bioconda"), version (i.e. "1.10"). // For Conda, the build (i.e. "h9402c20_2") must be EXCLUDED to support installation on different operating systems. // TODO nf-core: See section in main README for further information regarding finding and adding container addresses to the section below. conda (params.enable_conda ? "{{ bioconda if bioconda else 'YOUR-TOOL-HERE' }}" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "{{ singularity_container if singularity_container else 'https://depot.galaxyproject.org/singularity/YOUR-TOOL-HERE' }}" - } else { - container "{{ docker_container if docker_container else 'quay.io/biocontainers/YOUR-TOOL-HERE' }}" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + '{{ singularity_container if singularity_container else 'https://depot.galaxyproject.org/singularity/YOUR-TOOL-HERE' }}': + '{{ docker_container if docker_container else 'quay.io/biocontainers/YOUR-TOOL-HERE' }}' }" input: // TODO nf-core: Where applicable all sample-specific information e.g. "id", "single_end", "read_group" // MUST be provided as an input via a Groovy Map called "meta". // This information may not be required in some instances e.g. indexing reference genome files: - // https://github.com/nf-core/modules/blob/master/software/bwa/index/main.nf + // https://github.com/nf-core/modules/blob/master/modules/bwa/index/main.nf // TODO nf-core: Where applicable please provide/convert compressed files as input/output // e.g. "*.fastq.gz" and NOT "*.fastq", "*.bam" and NOT "*.sam" etc. {{ 'tuple val(meta), path(bam)' if has_meta else 'path bam' }} @@ -52,17 +41,18 @@ process {{ tool_name_underscore|upper }} { // TODO nf-core: Named file extensions MUST be emitted for ALL output channels {{ 'tuple val(meta), path("*.bam")' if has_meta else 'path "*.bam"' }}, emit: bam // TODO nf-core: List additional required output channels/values here - path "*.version.txt" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) + def args = task.ext.args ?: '' {% if has_meta -%} - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" {%- endif %} // TODO nf-core: Where possible, a command MUST be provided to obtain the version number of the software e.g. 1.10 // If the software is unable to output a version number on the command-line then it can be manually specified - // e.g. https://github.com/nf-core/modules/blob/master/software/homer/annotatepeaks/main.nf - // TODO nf-core: It MUST be possible to pass additional parameters to the tool as a command-line string via the "$options.args" variable + // e.g. https://github.com/nf-core/modules/blob/master/modules/homer/annotatepeaks/main.nf + // Each software used MUST provide the software name and version number in the YAML version file (versions.yml) + // TODO nf-core: It MUST be possible to pass additional parameters to the tool as a command-line string via the "task.ext.args" directive // TODO nf-core: If the tool supports multi-threading then you MUST provide the appropriate parameter // using the Nextflow "task" variable e.g. "--threads $task.cpus" // TODO nf-core: Please replace the example samtools command below with your module's command @@ -70,7 +60,7 @@ process {{ tool_name_underscore|upper }} { """ samtools \\ sort \\ - $options.args \\ + $args \\ -@ $task.cpus \\ {%- if has_meta %} -o ${prefix}.bam \\ @@ -78,6 +68,9 @@ process {{ tool_name_underscore|upper }} { {%- endif %} $bam - echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + "${task.process}": + {{ tool }}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' )) + END_VERSIONS """ } diff --git a/nf_core/module-template/modules/meta.yml b/nf_core/module-template/modules/meta.yml index be6d3e5f9..d58df5a37 100644 --- a/nf_core/module-template/modules/meta.yml +++ b/nf_core/module-template/modules/meta.yml @@ -37,10 +37,10 @@ output: Groovy Map containing sample information e.g. [ id:'test', single_end:false ] {% endif -%} - - version: + - versions: type: file - description: File containing software version - pattern: "*.{version.txt}" + description: File containing software versions + pattern: "versions.yml" ## TODO nf-core: Delete / customise this example output - bam: type: file diff --git a/nf_core/module-template/tests/main.nf b/nf_core/module-template/tests/main.nf index 63bb1f779..b172736c3 100644 --- a/nf_core/module-template/tests/main.nf +++ b/nf_core/module-template/tests/main.nf @@ -2,12 +2,14 @@ nextflow.enable.dsl = 2 -include { {{ tool_name_underscore|upper }} } from '../../../{{ "../" if subtool else "" }}modules/{{ tool_dir }}/main.nf' addParams( options: [:] ) +include { {{ tool_name_underscore|upper }} } from '../../../{{ "../" if subtool else "" }}modules/{{ tool_dir }}/main.nf' workflow test_{{ tool_name_underscore }} { {% if has_meta %} - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] {%- else %} input = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) {%- endif %} diff --git a/nf_core/module-template/tests/nextflow.config b/nf_core/module-template/tests/nextflow.config new file mode 100644 index 000000000..50f50a7a3 --- /dev/null +++ b/nf_core/module-template/tests/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/nf_core/module-template/tests/test.yml b/nf_core/module-template/tests/test.yml index 337444cb7..8679a4494 100644 --- a/nf_core/module-template/tests/test.yml +++ b/nf_core/module-template/tests/test.yml @@ -1,7 +1,7 @@ ## TODO nf-core: Please run the following command to build this file: # nf-core modules create-test-yml {{ tool }}{%- if subtool %}/{{ subtool }}{%- endif %} - name: {{ tool }}{{ ' '+subtool if subtool else '' }} - command: nextflow run ./tests/modules/{{ tool_dir }} -entry test_{{ tool_name_underscore }} -c tests/config/nextflow.config + command: nextflow run ./tests/modules/{{ tool_dir }} -entry test_{{ tool_name_underscore }} -c ./tests/config/nextflow.config -c ./tests/modules/{{ tool_dir }}/nextflow.config tags: - {{ tool }} {%- if subtool %} diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 9189cbb1f..4d24faa52 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -83,7 +83,7 @@ def bump_versions(self, module=None, all_modules=False, show_uptodate=False): "Tool name:", choices=[m.module_name for m in nfcore_modules], style=nf_core.utils.nfcore_question_style, - ).ask() + ).unsafe_ask() if module: self.show_up_to_date = True diff --git a/nf_core/modules/create.py b/nf_core/modules/create.py index daeb18eaf..a9c071c4c 100644 --- a/nf_core/modules/create.py +++ b/nf_core/modules/create.py @@ -63,10 +63,10 @@ def create(self): modules/modules/tool/subtool/ * main.nf * meta.yml - * functions.nf modules/tests/modules/tool/subtool/ * main.nf * test.yml + * nextflow.config tests/config/pytest_modules.yml The function will attempt to automatically find a Bioconda package called @@ -145,7 +145,9 @@ def create(self): log.info(f"Using Bioconda package: '{self.bioconda}'") break except (ValueError, LookupError) as e: - log.warning(f"Could not find Conda dependency using the Anaconda API: '{self.tool}'") + log.warning( + f"Could not find Conda dependency using the Anaconda API: '{self.tool_conda_name if self.tool_conda_name else self.tool}'" + ) if rich.prompt.Confirm.ask(f"[violet]Do you want to enter a different Bioconda package name?"): self.tool_conda_name = rich.prompt.Prompt.ask("[violet]Name of Bioconda package").strip() continue @@ -204,7 +206,7 @@ def create(self): choices=process_label_defaults, style=nf_core.utils.nfcore_question_style, default="process_low", - ).ask() + ).unsafe_ask() if self.has_meta is None: log.info( @@ -279,11 +281,14 @@ def get_repo_type(self, directory): if dir is None or not os.path.exists(directory): raise UserWarning(f"Could not find directory: {directory}") + readme = os.path.join(directory, "README.md") # Determine repository type - if os.path.exists(os.path.join(directory, "main.nf")): - return "pipeline" - elif os.path.exists(os.path.join(directory, "modules")): - return "modules" + if os.path.exists(readme): + with open(readme) as fh: + if fh.readline().rstrip().startswith("# ![nf-core/modules]"): + return "modules" + else: + return "pipeline" else: raise UserWarning( f"This directory does not look like a clone of nf-core/modules or an nf-core pipeline: '{directory}'" @@ -351,10 +356,10 @@ def get_module_dirs(self): ) # Set file paths - can be tool/ or tool/subtool/ so can't do in template directory structure - file_paths[os.path.join("modules", "functions.nf")] = os.path.join(software_dir, "functions.nf") file_paths[os.path.join("modules", "main.nf")] = os.path.join(software_dir, "main.nf") file_paths[os.path.join("modules", "meta.yml")] = os.path.join(software_dir, "meta.yml") file_paths[os.path.join("tests", "main.nf")] = os.path.join(test_dir, "main.nf") file_paths[os.path.join("tests", "test.yml")] = os.path.join(test_dir, "test.yml") + file_paths[os.path.join("tests", "nextflow.config")] = os.path.join(test_dir, "nextflow.config") return file_paths diff --git a/nf_core/modules/install.py b/nf_core/modules/install.py index b0d96c431..c5c33cc2a 100644 --- a/nf_core/modules/install.py +++ b/nf_core/modules/install.py @@ -82,10 +82,10 @@ def install(self, module): current_entry = None # Set the install folder based on the repository name - install_folder = [self.modules_repo.owner, self.modules_repo.repo] + install_folder = [self.dir, "modules", self.modules_repo.owner, self.modules_repo.repo] # Compute the module directory - module_dir = os.path.join(self.dir, "modules", *install_folder, module) + module_dir = os.path.join(*install_folder, module) # Check that the module is not already installed if (current_entry is not None and os.path.exists(module_dir)) and not self.force: @@ -128,7 +128,7 @@ def install(self, module): log.debug(f"Installing module '{module}' at modules hash {version} from {self.modules_repo.name}") # Download module files - if not self.download_module_file(module, version, self.modules_repo, install_folder, module_dir): + if not self.download_module_file(module, version, self.modules_repo, install_folder): return False # Update module.json with newly installed module diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index f9de48a30..af3367765 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -61,11 +61,11 @@ class ModuleLint(ModuleCommand): # Import lint functions from .main_nf import main_nf - from .functions_nf import functions_nf from .meta_yml import meta_yml from .module_changes import module_changes from .module_tests import module_tests from .module_todos import module_todos + from .module_deprecations import module_deprecations from .module_version import module_version def __init__(self, dir): @@ -96,7 +96,7 @@ def __init__(self, dir): @staticmethod def _get_all_lint_tests(): - return ["main_nf", "functions_nf", "meta_yml", "module_changes", "module_todos"] + return ["main_nf", "meta_yml", "module_changes", "module_todos", "module_deprecations"] def lint(self, module=None, key=(), all_modules=False, print_results=True, show_passed=False, local=False): """ @@ -230,8 +230,7 @@ def get_installed_modules(self): # Filter local modules if os.path.exists(local_modules_dir): - local_modules = os.listdir(local_modules_dir) - local_modules = sorted([x for x in local_modules if (x.endswith(".nf") and not x == "functions.nf")]) + local_modules = sorted([x for x in local_modules if x.endswith(".nf")]) # nf-core/modules if self.repo_type == "modules": @@ -239,16 +238,21 @@ def get_installed_modules(self): # Get nf-core modules if os.path.exists(nfcore_modules_dir): - for m in sorted([m for m in os.listdir(nfcore_modules_dir) if not m == "lib"]): + for m in sorted(os.listdir(nfcore_modules_dir)): if not os.path.isdir(os.path.join(nfcore_modules_dir, m)): raise ModuleLintException( f"File found in '{nfcore_modules_dir}': '{m}'! This directory should only contain module directories." ) - m_content = os.listdir(os.path.join(nfcore_modules_dir, m)) + + module_dir = os.path.join(nfcore_modules_dir, m) + module_subdir = os.listdir(module_dir) # Not a module, but contains sub-modules - if not "main.nf" in m_content: - for tool in m_content: - nfcore_modules.append(os.path.join(m, tool)) + if "main.nf" not in module_subdir: + for path in module_subdir: + module_subdir_path = os.path.join(nfcore_modules_dir, m, path) + if os.path.isdir(module_subdir_path): + if os.path.exists(os.path.join(module_subdir_path, "main.nf")): + nfcore_modules.append(os.path.join(m, path)) else: nfcore_modules.append(m) @@ -309,7 +313,6 @@ def lint_module(self, mod, local=False): If the module is a nf-core module we check for existence of the files - main.nf - meta.yml - - functions.nf And verify that their content conform to the nf-core standards. If the linting is run for modules in the central nf-core/modules repo @@ -321,7 +324,7 @@ def lint_module(self, mod, local=False): if local: self.main_nf(mod) self.passed += [LintResult(mod, *m) for m in mod.passed] - self.warned += [LintResult(mod, *m) for m in mod.warned] + self.warned += [LintResult(mod, *m) for m in (mod.warned + mod.failed)] # Otherwise run all the lint tests else: diff --git a/nf_core/modules/lint/functions_nf.py b/nf_core/modules/lint/functions_nf.py deleted file mode 100644 index 600a1ae7f..000000000 --- a/nf_core/modules/lint/functions_nf.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python -import logging -import os -import nf_core - -log = logging.getLogger(__name__) - - -def functions_nf(module_lint_object, module): - """ - Lint a functions.nf file - Verifies that the file exists and contains all necessary functions - """ - local_copy = None - template_copy = None - try: - with open(module.function_nf, "r") as fh: - lines = fh.readlines() - module.passed.append(("functions_nf_exists", "'functions.nf' exists", module.function_nf)) - except FileNotFoundError as e: - module.failed.append(("functions_nf_exists", "'functions.nf' does not exist", module.function_nf)) - return - - # Test whether all required functions are present - required_functions = ["getSoftwareName", "initOptions", "getPathFromList", "saveFiles"] - lines = "\n".join(lines) - contains_all_functions = True - for f in required_functions: - if not "def " + f in lines: - module.failed.append(("functions_nf_func_exist", "Function is missing: `{f}`", module.function_nf)) - contains_all_functions = False - if contains_all_functions: - module.passed.append(("functions_nf_func_exist", "All functions present", module.function_nf)) - - # Compare functions.nf file to the most recent template - # Get file content of the module functions.nf - try: - local_copy = open(module.function_nf, "r").read() - except FileNotFoundError as e: - log.error(f"Could not open {module.function_nf}") - - # Get the template file - template_copy_path = os.path.join(os.path.dirname(nf_core.__file__), "module-template/modules/functions.nf") - try: - template_copy = open(template_copy_path, "r").read() - except FileNotFoundError as e: - log.error(f"Could not open {template_copy_path}") - - # Compare the files - if local_copy and template_copy: - if local_copy != template_copy: - module.failed.append( - ("function_nf_comparison", "New version of functions.nf available", module.function_nf) - ) - else: - module.passed.append(("function_nf_comparison", "functions.nf is up to date", module.function_nf)) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 018dc99af..065910d3b 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -26,13 +26,17 @@ def main_nf(module_lint_object, module): module.failed.append(("main_nf_exists", "Module file does not exist", module.main_nf)) return - # Check that options are defined - initoptions_re = re.compile(r"\s*options\s*=\s*initOptions\s*\(\s*params\.options\s*\)\s*") - paramsoptions_re = re.compile(r"\s*params\.options\s*=\s*\[:\]\s*") - if any(initoptions_re.match(l) for l in lines) and any(paramsoptions_re.match(l) for l in lines): - module.passed.append(("main_nf_options", "'options' variable specified", module.main_nf)) - else: - module.warned.append(("main_nf_options", "'options' variable not specified", module.main_nf)) + deprecated_i = ["initOptions", "saveFiles", "getSoftwareName", "getProcessName", "publishDir"] + lines_j = "\n".join(lines) + for i in deprecated_i: + if i in lines_j: + module.failed.append( + ( + "deprecated_dsl2", + f"`{i}` specified. No longer required for the latest nf-core/modules syntax!", + module.main_nf, + ) + ) # Go through module main.nf file and switch state according to current section # Perform section-specific linting @@ -80,30 +84,6 @@ def main_nf(module_lint_object, module): else: module.failed.append(("main_nf_meta_output", "'meta' map not emitted in output channel(s)", module.main_nf)) - # if meta is specified, it should also be used as "saveAs ... meta:meta, publish_by_meta:['id']" - save_as = [pl for pl in process_lines if "saveAs" in pl] - if len(save_as) > 0 and re.search("\s*meta\s*:\s*meta", save_as[0]): - module.passed.append(("main_nf_meta_saveas", "'meta:meta' specified in saveAs function", module.main_nf)) - else: - module.failed.append(("main_nf_meta_saveas", "'meta:meta' unspecified in saveAs function", module.main_nf)) - - if len(save_as) > 0 and re.search("\s*publish_by_meta\s*:\s*\['id'\]", save_as[0]): - module.passed.append( - ( - "main_nf_publish_meta_saveas", - "'publish_by_meta:['id']' specified in saveAs function", - module.main_nf, - ) - ) - else: - module.failed.append( - ( - "main_nf_publish_meta_saveas", - "'publish_by_meta:['id']' unspecified in saveAs function", - module.main_nf, - ) - ) - # Check that a software version is emitted if "version" in outputs: module.passed.append(("main_nf_version_emitted", "Module emits software version", module.main_nf)) @@ -116,19 +96,19 @@ def main_nf(module_lint_object, module): def check_script_section(self, lines): """ Lint the script section - Checks whether 'def sotware' and 'def prefix' are defined + Checks whether 'def prefix' is defined and whether getProcessName is used for `versions.yml`. """ script = "".join(lines) - # check for software - if re.search("\s*def\s*software\s*=\s*getSoftwareName", script): - self.passed.append(("main_nf_version_script", "Software version specified in script section", self.main_nf)) + # check that process name is used for `versions.yml` + if re.search("\$\{\s*task\.process\s*\}", script): + self.passed.append(("main_nf_version_script", "Process name used for versions.yml", self.main_nf)) else: - self.warned.append(("main_nf_version_script", "Software version unspecified in script section", self.main_nf)) + self.warned.append(("main_nf_version_script", "Process name not used for versions.yml", self.main_nf)) # check for prefix (only if module has a meta map as input) if self.has_meta: - if re.search("\s*prefix\s*=\s*options.suffix", script): + if re.search("\s*prefix\s*=\s*task.ext.prefix", script): self.passed.append(("main_nf_meta_prefix", "'prefix' specified in script section", self.main_nf)) else: self.failed.append(("main_nf_meta_prefix", "'prefix' unspecified in script section", self.main_nf)) @@ -141,6 +121,13 @@ def check_process_section(self, lines): Specifically checks for correct software versions and containers """ + # Check that we have a process section + if len(lines) == 0: + self.failed.append(("process_exist", "Process definition does not exist", self.main_nf)) + return + else: + self.passed.append(("process_exist", "Process definition exists", self.main_nf)) + # Checks that build numbers of bioconda, singularity and docker container are matching build_id = "build" singularity_tag = "singularity" diff --git a/nf_core/modules/lint/module_changes.py b/nf_core/modules/lint/module_changes.py index 57f3b9a03..44c250142 100644 --- a/nf_core/modules/lint/module_changes.py +++ b/nf_core/modules/lint/module_changes.py @@ -11,12 +11,12 @@ def module_changes(module_lint_object, module): """ Checks whether installed nf-core modules have changed compared to the original repository - Downloads the 'main.nf', 'functions.nf' and 'meta.yml' files for every module + Downloads the 'main.nf' and 'meta.yml' files for every module and compares them to the local copies If the module has a 'git_sha', the file content is checked against this sha """ - files_to_check = ["main.nf", "functions.nf", "meta.yml"] + files_to_check = ["main.nf", "meta.yml"] # Loop over nf-core modules module_base_url = f"https://raw.githubusercontent.com/{module_lint_object.modules_repo.name}/{module_lint_object.modules_repo.branch}/modules/{module.module_name}/" @@ -52,7 +52,7 @@ def module_changes(module_lint_object, module): module.warned.append( ( "check_local_copy", - "Local copy of module outdated", + "Local copy of module does not match remote", f"{os.path.join(module.module_dir, f)}", ) ) diff --git a/nf_core/modules/lint/module_deprecations.py b/nf_core/modules/lint/module_deprecations.py new file mode 100644 index 000000000..0a2990d9d --- /dev/null +++ b/nf_core/modules/lint/module_deprecations.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python +import logging +import os + +log = logging.getLogger(__name__) + + +def module_deprecations(module_lint_object, module): + """ + Check that the modules are up to the latest nf-core standard + """ + module.wf_path = module.module_dir + if "functions.nf" in os.listdir(module.module_dir): + module.failed.append( + ( + "module_deprecations", + f"Deprecated file `functions.nf` found. No longer required for the latest nf-core/modules syntax!", + module.module_dir, + ) + ) diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index fad25d20a..7bef0112d 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -52,11 +52,31 @@ def module_tests(module_lint_object, module): if not tag in [module.module_name, module.module_name.split("/")[0]]: all_tags_correct = False + # Look for md5sums of empty files + for tfile in test.get("files", []): + if tfile.get("md5sum") == "d41d8cd98f00b204e9800998ecf8427e": + module.warned.append( + ( + "test_yml_md5sum", + "md5sum for empty file found: d41d8cd98f00b204e9800998ecf8427e", + module.test_yml, + ) + ) + if tfile.get("md5sum") == "7029066c27ac6f5ef18d660d5741979a": + module.warned.append( + ( + "test_yml_md5sum", + "md5sum for compressed empty file found: 7029066c27ac6f5ef18d660d5741979a", + module.test_yml, + ) + ) + if all_tags_correct: module.passed.append(("test_yml_tags", "tags adhere to guidelines", module.test_yml)) else: module.failed.append(("test_yml_tags", "tags do not adhere to guidelines", module.test_yml)) + # Test that the file exists module.passed.append(("test_yml_exists", "Test `test.yml` exists", module.test_yml)) except FileNotFoundError: module.failed.append(("test_yml_exists", "Test `test.yml` does not exist", module.test_yml)) diff --git a/nf_core/modules/list.py b/nf_core/modules/list.py index 355b952c4..743670908 100644 --- a/nf_core/modules/list.py +++ b/nf_core/modules/list.py @@ -103,7 +103,8 @@ def pattern_msg(keywords): if module_entry: version_sha = module_entry["git_sha"] try: - message, date = nf_core.modules.module_utils.get_commit_info(version_sha) + # pass repo_name to get info on modules even outside nf-core/modules + message, date = nf_core.modules.module_utils.get_commit_info(version_sha, repo_name) except LookupError as e: log.warning(e) date = "[red]Not Available" diff --git a/nf_core/modules/module_utils.py b/nf_core/modules/module_utils.py index 00d32090e..dd9afc958 100644 --- a/nf_core/modules/module_utils.py +++ b/nf_core/modules/module_utils.py @@ -46,6 +46,7 @@ def get_module_git_log(module_name, modules_repo=None, per_page=30, page_nbr=1, update breaking backwards compatibility. Args: module_name (str): Name of module + modules_repo (ModulesRepo): A ModulesRepo object configured for the repository in question per_page (int): Number of commits per page returned by API page_nbr (int): Page number of the retrieved commits since (str): Only show commits later than this timestamp. @@ -57,7 +58,7 @@ def get_module_git_log(module_name, modules_repo=None, per_page=30, page_nbr=1, if modules_repo is None: modules_repo = ModulesRepo() api_url = f"https://api.github.com/repos/{modules_repo.name}/commits" - api_url += f"?sha{modules_repo.branch}" + api_url += f"?sha={modules_repo.branch}" if module_name is not None: api_url += f"&path=modules/{module_name}" api_url += f"&page={page_nbr}" @@ -84,19 +85,21 @@ def get_module_git_log(module_name, modules_repo=None, per_page=30, page_nbr=1, ) -def get_commit_info(commit_sha): +def get_commit_info(commit_sha, repo_name="nf-core/modules"): """ Fetches metadata about the commit (dates, message, etc.) Args: - module_name (str): Name of module commit_sha (str): The SHA of the requested commit + repo_name (str): module repos name (def. {0}) Returns: message (str): The commit message for the requested commit date (str): The commit date for the requested commit Raises: LookupError: If the call to the API fails. - """ - api_url = f"https://api.github.com/repos/nf-core/modules/commits/{commit_sha}?stats=false" + """.format( + repo_name + ) + api_url = f"https://api.github.com/repos/{repo_name}/commits/{commit_sha}?stats=false" log.debug(f"Fetching commit metadata for commit at {commit_sha}") response = requests.get(api_url, auth=nf_core.utils.github_api_auto_auth()) if response.status_code == 200: @@ -225,7 +228,7 @@ def iterate_commit_log_page(module_name, module_path, modules_repo, commit_shas) are identical to remote files """ - files_to_check = ["main.nf", "functions.nf", "meta.yml"] + files_to_check = ["main.nf", "meta.yml"] local_file_contents = [None, None, None] for i, file in enumerate(files_to_check): try: @@ -251,7 +254,7 @@ def local_module_equal_to_commit(local_files, module_name, modules_repo, commit_ bool: Whether all local files are identical to remote version """ - files_to_check = ["main.nf", "functions.nf", "meta.yml"] + files_to_check = ["main.nf", "meta.yml"] files_are_equal = [False, False, False] remote_copies = [None, None, None] @@ -259,7 +262,7 @@ def local_module_equal_to_commit(local_files, module_name, modules_repo, commit_ for i, file in enumerate(files_to_check): # Download remote copy and compare api_url = f"{module_base_url}/{file}" - r = requests.get(url=api_url) + r = requests.get(url=api_url, auth=nf_core.utils.github_api_auto_auth()) if r.status_code != 200: log.debug(f"Could not download remote copy of file module {module_name}/{file}") log.debug(api_url) @@ -304,7 +307,7 @@ def get_installed_modules(dir, repo_type="modules"): # Filter local modules if os.path.exists(local_modules_dir): local_modules = os.listdir(local_modules_dir) - local_modules = sorted([x for x in local_modules if (x.endswith(".nf") and not x == "functions.nf")]) + local_modules = sorted([x for x in local_modules if x.endswith(".nf")]) # nf-core/modules if repo_type == "modules": @@ -344,10 +347,12 @@ def get_repo_type(dir): raise LookupError("Could not find directory: {}".format(dir)) # Determine repository type - if os.path.exists(os.path.join(dir, "main.nf")): - return "pipeline" - elif os.path.exists(os.path.join(dir, "modules")): - return "modules" + if os.path.exists(os.path.join(dir, "README.md")): + with open(os.path.join(dir, "README.md")) as fh: + if fh.readline().rstrip().startswith("# ![nf-core/modules]"): + return "modules" + else: + return "pipeline" else: raise LookupError("Could not determine repository type of '{}'".format(dir)) @@ -365,7 +370,7 @@ def verify_pipeline_dir(dir): modules_is_software = False for repo_name in repo_names: api_url = f"https://api.github.com/repos/{repo_name}/contents" - response = requests.get(api_url) + response = requests.get(api_url, auth=nf_core.utils.github_api_auto_auth()) if response.status_code == 404: missing_remote.append(repo_name) if repo_name == "nf-core/software": @@ -396,7 +401,7 @@ def prompt_module_version_sha(module, modules_repo, installed_sha=None): log.warning(e) next_page_commits = None - while git_sha is "": + while git_sha == "": commits = next_page_commits try: next_page_commits = get_module_git_log( diff --git a/nf_core/modules/modules_command.py b/nf_core/modules/modules_command.py index e7ba1a5c0..866bffabb 100644 --- a/nf_core/modules/modules_command.py +++ b/nf_core/modules/modules_command.py @@ -161,6 +161,7 @@ def modules_json_up_to_date(self): modules_repo.get_modules_file_tree() install_folder = [modules_repo.owner, modules_repo.repo] except LookupError as e: + log.warn(f"Could not get module's file tree for '{repo}': {e}") remove_from_mod_json[repo] = list(modules.keys()) continue @@ -169,6 +170,9 @@ def modules_json_up_to_date(self): if sha is None: if repo not in remove_from_mod_json: remove_from_mod_json[repo] = [] + log.warn( + f"Could not find git SHA for module '{module}' in '{repo}' - removing from modules.json" + ) remove_from_mod_json[repo].append(module) continue module_dir = os.path.join(self.dir, "modules", *install_folder, module) @@ -228,8 +232,8 @@ def _s(some_list): return "" if len(some_list) == 1 else "s" log.info( - f"Could not determine 'git_sha' for module{_s(failed_to_find_commit_sha)}: '{', '.join(failed_to_find_commit_sha)}'." - f"\nPlease try to install a newer version of {'this' if len(failed_to_find_commit_sha) == 1 else 'these'} module{_s(failed_to_find_commit_sha)}." + f"Could not determine 'git_sha' for module{_s(failed_to_find_commit_sha)}: {', '.join(failed_to_find_commit_sha)}." + f"\nPlease try to install a newer version of {'this' if len(failed_to_find_commit_sha) == 1 else 'these'} module{_s(failed_to_find_commit_sha)}." ) self.dump_modules_json(fresh_mod_json) @@ -253,19 +257,20 @@ def clear_module_dir(self, module_name, module_dir): log.error("Could not remove module: {}".format(e)) return False - def download_module_file(self, module_name, module_version, modules_repo, install_folder, module_dir): + def download_module_file(self, module_name, module_version, modules_repo, install_folder, dry_run=False): """Downloads the files of a module from the remote repo""" files = modules_repo.get_module_file_urls(module_name, module_version) log.debug("Fetching module files:\n - {}".format("\n - ".join(files.keys()))) for filename, api_url in files.items(): split_filename = filename.split("/") - dl_filename = os.path.join(self.dir, "modules", *install_folder, *split_filename[1:]) + dl_filename = os.path.join(*install_folder, *split_filename[1:]) try: self.modules_repo.download_gh_file(dl_filename, api_url) except (SystemError, LookupError) as e: log.error(e) return False - log.info("Downloaded {} files to {}".format(len(files), module_dir)) + if not dry_run: + log.info("Downloaded {} files to {}".format(len(files), os.path.join(*install_folder, module_name))) return True def load_modules_json(self): diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index d6d2871ec..a59e3262a 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -16,12 +16,21 @@ class ModulesRepo(object): so that this can be used in the same way by all sub-commands. """ - def __init__(self, repo="nf-core/modules", branch="master"): + def __init__(self, repo="nf-core/modules", branch=None): self.name = repo self.branch = branch + # Don't bother fetching default branch if we're using nf-core + if not self.branch and self.name == "nf-core/modules": + self.branch = "master" + # Verify that the repo seems to be correctly configured - if self.name != "nf-core/modules" or self.branch != "master": + if self.name != "nf-core/modules" or self.branch: + + # Get the default branch if not set + if not self.branch: + self.get_default_branch() + try: self.verify_modules_repo() except LookupError: @@ -31,6 +40,16 @@ def __init__(self, repo="nf-core/modules", branch="master"): self.modules_file_tree = {} self.modules_avail_module_names = [] + def get_default_branch(self): + """Get the default branch for a GitHub repo""" + api_url = f"https://api.github.com/repos/{self.name}" + response = requests.get(api_url, auth=nf_core.utils.github_api_auto_auth()) + if response.status_code == 200: + self.branch = response.json()["default_branch"] + log.debug(f"Found default branch to be '{self.branch}'") + else: + raise LookupError(f"Could not find repository '{self.name}' on GitHub") + def verify_modules_repo(self): # Check if name seems to be well formed @@ -39,7 +58,7 @@ def verify_modules_repo(self): # Check if repository exist api_url = f"https://api.github.com/repos/{self.name}/branches" - response = requests.get(api_url) + response = requests.get(api_url, auth=nf_core.utils.github_api_auto_auth()) if response.status_code == 200: branches = [branch["name"] for branch in response.json()] if self.branch not in branches: @@ -48,7 +67,7 @@ def verify_modules_repo(self): raise LookupError(f"Repository '{self.name}' is not available on GitHub") api_url = f"https://api.github.com/repos/{self.name}/contents?ref={self.branch}" - response = requests.get(api_url) + response = requests.get(api_url, auth=nf_core.utils.github_api_auto_auth()) if response.status_code == 200: dir_names = [entry["name"] for entry in response.json() if entry["type"] == "dir"] if "modules" not in dir_names: diff --git a/nf_core/modules/nfcore_module.py b/nf_core/modules/nfcore_module.py index e6e490feb..f828142fd 100644 --- a/nf_core/modules/nfcore_module.py +++ b/nf_core/modules/nfcore_module.py @@ -26,7 +26,6 @@ def __init__(self, module_dir, repo_type, base_dir, nf_core_module=True): # Initialize the important files self.main_nf = os.path.join(self.module_dir, "main.nf") self.meta_yml = os.path.join(self.module_dir, "meta.yml") - self.function_nf = os.path.join(self.module_dir, "functions.nf") if self.repo_type == "pipeline": self.module_name = module_dir.split("nf-core/modules" + os.sep)[1] else: diff --git a/nf_core/modules/remove.py b/nf_core/modules/remove.py index f657c361a..996966e7e 100644 --- a/nf_core/modules/remove.py +++ b/nf_core/modules/remove.py @@ -46,12 +46,12 @@ def remove(self, module): else: repo_name = questionary.autocomplete( "Repo name:", choices=self.module_names.keys(), style=nf_core.utils.nfcore_question_style - ).ask() + ).unsafe_ask() if module is None: module = questionary.autocomplete( "Tool name:", choices=self.module_names[repo_name], style=nf_core.utils.nfcore_question_style - ).ask() + ).unsafe_ask() # Set the remove folder based on the repository name remove_folder = os.path.split(repo_name) diff --git a/nf_core/modules/test_yml_builder.py b/nf_core/modules/test_yml_builder.py index 9b05a9707..d917a8105 100644 --- a/nf_core/modules/test_yml_builder.py +++ b/nf_core/modules/test_yml_builder.py @@ -8,8 +8,10 @@ from rich.syntax import Syntax import errno +import gzip import hashlib import logging +import operator import os import questionary import re @@ -18,7 +20,6 @@ import subprocess import tempfile import yaml -import operator import nf_core.utils @@ -46,6 +47,7 @@ def __init__( self.module_test_main = None self.entry_points = [] self.tests = [] + self.errors = [] def run(self): """Run build steps""" @@ -57,6 +59,9 @@ def run(self): self.scrape_workflow_entry_points() self.build_all_tests() self.print_test_yml() + if len(self.errors) > 0: + errors = "\n - ".join(self.errors) + raise UserWarning(f"Ran, but found errors:\n - {errors}") def check_inputs(self): """Do more complex checks about supplied flags.""" @@ -187,6 +192,27 @@ def build_single_test(self, entry_point): return ep_test + def check_if_empty_file(self, fname): + """Check if the file is empty, or compressed empty""" + if os.path.getsize(fname) == 0: + return True + try: + with open(fname, "rb") as fh: + g_f = gzip.GzipFile(fileobj=fh, mode="rb") + if g_f.read() == b"": + return True + except Exception as e: + # Python 3.8+ + if hasattr(gzip, "BadGzipFile"): + if isinstance(e, gzip.BadGzipFile): + pass + # Python 3.7 + elif isinstance(e, OSError): + pass + else: + raise e + return False + def _md5(self, fname): """Generate md5 sum for file""" hash_md5 = hashlib.md5() @@ -196,16 +222,23 @@ def _md5(self, fname): md5sum = hash_md5.hexdigest() return md5sum - def create_test_file_dict(self, results_dir): + def create_test_file_dict(self, results_dir, is_repeat=False): """Walk through directory and collect md5 sums""" test_files = [] for root, dir, file in os.walk(results_dir): for elem in file: elem = os.path.join(root, elem) - elem_md5 = self._md5(elem) + test_file = {"path": elem} + # Check that this isn't an empty file + if self.check_if_empty_file(elem): + if not is_repeat: + self.errors.append(f"Empty file, skipping md5sum: '{os.path.basename(elem)}'") + else: + elem_md5 = self._md5(elem) + test_file["md5sum"] = elem_md5 # Switch out the results directory path with the expected 'output' directory elem = elem.replace(results_dir, "output") - test_files.append({"path": elem, "md5sum": elem_md5}) + test_files.append(test_file) test_files = sorted(test_files, key=operator.itemgetter("path")) @@ -237,11 +270,11 @@ def get_md5_sums(self, entry_point, command, results_dir=None, results_dir_repea # If test was repeated, compare the md5 sums if results_dir_repeat: - test_files_repeat = self.create_test_file_dict(results_dir=results_dir_repeat) + test_files_repeat = self.create_test_file_dict(results_dir=results_dir_repeat, is_repeat=True) # Compare both test.yml files for i in range(len(test_files)): - if not test_files[i]["md5sum"] == test_files_repeat[i]["md5sum"]: + if test_files[i].get("md5sum") and not test_files[i].get("md5sum") == test_files_repeat[i]["md5sum"]: test_files[i].pop("md5sum") test_files[i][ "contains" diff --git a/nf_core/modules/update.py b/nf_core/modules/update.py index 2bddfec76..dc69c5b15 100644 --- a/nf_core/modules/update.py +++ b/nf_core/modules/update.py @@ -1,6 +1,11 @@ import os +import shutil import questionary import logging +import tempfile +import difflib +from rich.console import Console +from rich.syntax import Syntax import nf_core.utils import nf_core.modules.module_utils @@ -13,12 +18,13 @@ class ModuleUpdate(ModuleCommand): - def __init__(self, pipeline_dir, force=False, prompt=False, sha=None, update_all=False): + def __init__(self, pipeline_dir, force=False, prompt=False, sha=None, update_all=False, diff=False): super().__init__(pipeline_dir) self.force = force self.prompt = prompt self.sha = sha self.update_all = update_all + self.diff = diff def update(self, module): if self.repo_type == "modules": @@ -40,7 +46,7 @@ def update(self, module): "Update all modules or a single named module?", choices=choices, style=nf_core.utils.nfcore_question_style, - ).ask() + ).unsafe_ask() == "All modules" ) @@ -94,12 +100,15 @@ def update(self, module): log.info("Module's update entry in '.nf-core.yml' is set to False") return False elif isinstance(config_entry, str): + sha = config_entry if self.sha: log.warning( - "Found entry in '.nf-core.yml' for module " + f"Found entry in '.nf-core.yml' for module '{module}' " "which will override version specified with '--sha'" ) - sha = config_entry + else: + log.info(f"Found entry in '.nf-core.yml' for module '{module}'") + log.info(f"Updating module to ({sha})") else: log.error("Module's update entry in '.nf-core.yml' is of wrong type") return False @@ -173,6 +182,7 @@ def update(self, module): exit_value = True for modules_repo, module, sha in repos_mods_shas: + dry_run = self.diff if not module_exist_in_repo(module, modules_repo): warn_msg = f"Module '{module}' not found in remote '{modules_repo.name}' ({modules_repo.branch})" if self.update_all: @@ -187,10 +197,10 @@ def update(self, module): current_entry = None # Set the install folder based on the repository name - install_folder = [modules_repo.owner, modules_repo.repo] + install_folder = [self.dir, "modules", modules_repo.owner, modules_repo.repo] # Compute the module directory - module_dir = os.path.join(self.dir, "modules", *install_folder, module) + module_dir = os.path.join(*install_folder, module) if sha: version = sha @@ -225,17 +235,78 @@ def update(self, module): log.info(f"'{modules_repo.name}/{module}' is already up to date") continue - log.info(f"Updating '{modules_repo.name}/{module}'") - log.debug(f"Updating module '{module}' to {version} from {modules_repo.name}") + if not dry_run: + log.info(f"Updating '{modules_repo.name}/{module}'") + log.debug(f"Updating module '{module}' to {version} from {modules_repo.name}") - log.debug(f"Removing old version of module '{module}'") - self.clear_module_dir(module, module_dir) + log.debug(f"Removing old version of module '{module}'") + self.clear_module_dir(module, module_dir) + + if dry_run: + # Set the install folder to a temporary directory + install_folder = ["/tmp", next(tempfile._get_candidate_names())] # Download module files - if not self.download_module_file(module, version, modules_repo, install_folder, module_dir): + if not self.download_module_file(module, version, modules_repo, install_folder, dry_run=dry_run): exit_value = False continue - # Update module.json with newly installed module - self.update_modules_json(modules_json, modules_repo.name, module, version) + if dry_run: + console = Console(force_terminal=nf_core.utils.rich_force_colors()) + files = os.listdir(os.path.join(*install_folder, module)) + temp_folder = os.path.join(*install_folder, module) + log.info( + f"Changes in module '{module}' between ({current_entry['git_sha'] if current_entry is not None else '?'}) and ({version if version is not None else 'latest'})" + ) + + for file in files: + temp_path = os.path.join(temp_folder, file) + curr_path = os.path.join(module_dir, file) + if os.path.exists(temp_path) and os.path.exists(curr_path): + with open(temp_path, "r") as fh: + new_lines = fh.readlines() + with open(curr_path, "r") as fh: + old_lines = fh.readlines() + if new_lines == old_lines: + # The files are identical + log.info(f"'{os.path.join(module, file)}' is unchanged") + else: + log.info(f"Changes in '{os.path.join(module, file)}':") + # Compute the diff + diff = difflib.unified_diff( + old_lines, + new_lines, + fromfile=f"{os.path.join(module, file)} (installed)", + tofile=f"{os.path.join(module, file)} (new)", + ) + + # Pretty print the diff using the pygments diff lexer + console.print(Syntax("".join(diff), "diff", theme="ansi_light")) + + elif os.path.exists(temp_path): + # The file was created between the commits + log.info(f"Created file '{file}'") + + elif os.path.exists(curr_path): + # The file was removed between the commits + log.info(f"Removed file '{file}'") + + # Ask the user if they want to install the module + dry_run = not questionary.confirm("Update module?", default=False).unsafe_ask() + if not dry_run: + # The new module files are already installed + # we just need to clear the directory and move the + # new files from the temporary directory + self.clear_module_dir(module, module_dir) + os.mkdir(module_dir) + for file in files: + path = os.path.join(temp_folder, file) + if os.path.exists(path): + shutil.move(path, os.path.join(module_dir, file)) + log.info(f"Updating '{modules_repo.name}/{module}'") + log.debug(f"Updating module '{module}' to {version} from {modules_repo.name}") + + if not dry_run: + # Update module.json with newly installed module + self.update_modules_json(modules_json, modules_repo.name, module, version) return exit_value diff --git a/nf_core/pipeline-template/.gitattributes b/nf_core/pipeline-template/.gitattributes index 7fe55006f..050bb1203 100644 --- a/nf_core/pipeline-template/.gitattributes +++ b/nf_core/pipeline-template/.gitattributes @@ -1 +1,3 @@ *.config linguist-language=nextflow +modules/nf-core/** linguist-generated +subworkflows/nf-core/** linguist-generated diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index bf43ef3fd..edc89666a 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -68,16 +68,13 @@ If you wish to contribute a new step, please use the following coding standards: 1. Define the corresponding input channel into your new process from the expected previous process channel 2. Write the process block (see below). 3. Define the output channel if needed (see below). -4. Add any new flags/options to `nextflow.config` with a default (see below). -5. Add any new flags/options to `nextflow_schema.json` with help text (with `nf-core schema build`). -6. Add any new flags/options to the help message (for integer/text parameters, print to help the corresponding `nextflow.config` parameter). -7. Add sanity checks for all relevant parameters. -8. Add any new software to the `scrape_software_versions.py` script in `bin/` and the version command to the `scrape_software_versions` process in `main.nf`. -9. Do local tests that the new code works properly and as expected. -10. Add a new test command in `.github/workflow/ci.yml`. -11. If applicable add a [MultiQC](https://https://multiqc.info/) module. -12. Update MultiQC config `assets/multiqc_config.yaml` so relevant suffixes, name clean up, General Statistics Table column order, and module figures are in the right order. -13. Optional: Add any descriptions of MultiQC report sections and output files to `docs/output.md`. +4. Add any new parameters to `nextflow.config` with a default (see below). +5. Add any new parameters to `nextflow_schema.json` with help text (via the `nf-core schema build` tool). +6. Add sanity checks and validation for all relevant parameters. +7. Perform local tests to validate that the new code works as expected. +8. If applicable, add a new test command in `.github/workflow/ci.yml`. +9. Update MultiQC config `assets/multiqc_config.yaml` so relevant suffixes, file name clean up and module plots are in the appropriate order. If applicable, add a [MultiQC](https://https://multiqc.info/) module. +10. Add a description of the output files and if relevant any appropriate images from the MultiQC report to `docs/output.md`. ### Default values @@ -102,27 +99,6 @@ Please use the following naming schemes, to make it easy to understand what is g If you are using a new feature from core Nextflow, you may bump the minimum required version of nextflow in the pipeline with: `nf-core bump-version --nextflow . [min-nf-version]` -### Software version reporting - -If you add a new tool to the pipeline, please ensure you add the information of the tool to the `get_software_version` process. - -Add to the script block of the process, something like the following: - -```bash - --version &> v_.txt 2>&1 || true -``` - -or - -```bash - --help | head -n 1 &> v_.txt 2>&1 || true -``` - -You then need to edit the script `bin/scrape_software_versions.py` to: - -1. Add a Python regex for your tool's `--version` output (as in stored in the `v_.txt` file), to ensure the version is reported as a `v` and the version number e.g. `v2.1.1` -2. Add a HTML entry to the `OrderedDict` for formatting in MultiQC. - ### Images and figures For overview images and other documents we follow the nf-core [style guidelines and examples](https://nf-co.re/developers/design_guidelines). diff --git a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.md b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 5c5100c97..000000000 --- a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,63 +0,0 @@ ---- -name: Bug report -about: Report something that is broken or incorrect -labels: bug ---- - - - -## Check Documentation - -I have checked the following places for your error: - -- [ ] [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting) -- [ ] [{{ name }} pipeline documentation](https://nf-co.re/{{ short_name }}/usage) - -## Description of the bug - - - -## Steps to reproduce - -Steps to reproduce the behaviour: - -1. Command line: -2. See error: - -## Expected behaviour - - - -## Log files - -Have you provided the following extra information/files: - -- [ ] The command used to run the pipeline -- [ ] The `.nextflow.log` file - -## System - -- Hardware: -- Executor: -- OS: -- Version - -## Nextflow Installation - -- Version: - -## Container engine - -- Engine: -- version: - -## Additional context - - diff --git a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 000000000..e904de573 --- /dev/null +++ b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,52 @@ + +name: Bug report +description: Report something that is broken or incorrect +labels: bug +body: + + - type: markdown + attributes: + value: | + Before you post this issue, please check the documentation: + + - [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting) + - [{{ name }} pipeline documentation](https://nf-co.re/{{ short_name }}/usage) + + - type: textarea + id: description + attributes: + label: Description of the bug + description: A clear and concise description of what the bug is. + validations: + required: true + + - type: textarea + id: command_used + attributes: + label: Command used and terminal output + description: Steps to reproduce the behaviour. Please paste the command you used to launch the pipeline and the output from your terminal. + render: console + placeholder: | + $ nextflow run ... + + Some output where something broke + + - type: textarea + id: files + attributes: + label: Relevant files + description: | + Please drag and drop the relevant files here. Create a `.zip` archive if the extension is not allowed. + Your verbose log file `.nextflow.log` is often useful _(this is a hidden file in the directory where you launched the pipeline)_ as well as custom Nextflow configuration files. + + - type: textarea + id: system + attributes: + label: System information + description: | + * Nextflow version _(eg. 21.10.3)_ + * Hardware _(eg. HPC, Desktop, Cloud)_ + * Executor _(eg. slurm, local, awsbatch)_ + * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter or Charliecloud)_ + * OS _(eg. CentOS Linux, macOS, Linux Mint)_ + * Version of {{ name }} _(eg. 1.1, 1.5, 1.8.2)_ diff --git a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/config.yml b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/config.yml index a582ac2fb..19cd8f7c5 100644 --- a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/config.yml +++ b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/config.yml @@ -1,4 +1,3 @@ -blank_issues_enabled: false contact_links: - name: Join nf-core url: https://nf-co.re/join diff --git a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/feature_request.md b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 1727d53f0..000000000 --- a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,32 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for the {{ name }} pipeline -labels: enhancement ---- - - - -## Is your feature request related to a problem? Please describe - - - - - -## Describe the solution you'd like - - - -## Describe alternatives you've considered - - - -## Additional context - - diff --git a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/feature_request.yml b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 000000000..ad3a715de --- /dev/null +++ b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,11 @@ +name: Feature request +description: Suggest an idea for the {{ name }} pipeline +labels: enhancement +body: + - type: textarea + id: description + attributes: + label: Description of feature + description: Please describe your suggestion for a new feature. It might help to describe a problem or use case, plus any alternatives that you have considered. + validations: + required: true diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index 9ec98b29f..8e0ab65b2 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -14,14 +14,14 @@ jobs: runs-on: ubuntu-latest steps: - name: Launch workflow via tower - uses: nf-core/tower-action@master + uses: nf-core/tower-action@v2 # TODO nf-core: You can customise AWS full pipeline tests as required # Add full size test data (but still relatively small datasets for few samples) # on the `test_full.config` test runs with only one set of parameters {% raw %} with: workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} - bearer_token: ${{ secrets.TOWER_BEARER_TOKEN }} + access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} pipeline: ${{ github.repository }} revision: ${{ github.sha }} @@ -30,5 +30,5 @@ jobs: { "outdir": "s3://{% raw %}${{ secrets.AWS_S3_BUCKET }}{% endraw %}/{{ short_name }}/{% raw %}results-${{ github.sha }}{% endraw %}" } - profiles: '[ "test_full", "aws_tower" ]' - + profiles: test_full,aws_tower + pre_run_script: 'export NXF_VER=21.10.3' diff --git a/nf_core/pipeline-template/.github/workflows/awstest.yml b/nf_core/pipeline-template/.github/workflows/awstest.yml index fff75db64..ffa04f14d 100644 --- a/nf_core/pipeline-template/.github/workflows/awstest.yml +++ b/nf_core/pipeline-template/.github/workflows/awstest.yml @@ -11,11 +11,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Launch workflow via tower - uses: nf-core/tower-action@master + uses: nf-core/tower-action@v2 {% raw %} with: workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} - bearer_token: ${{ secrets.TOWER_BEARER_TOKEN }} + access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} pipeline: ${{ github.repository }} revision: ${{ github.sha }} @@ -24,5 +24,5 @@ jobs: { "outdir": "s3://{% raw %}${{ secrets.AWS_S3_BUCKET }}{% endraw %}/{{ short_name }}/{% raw %}results-${{ github.sha }}{% endraw %}" } - profiles: '[ "test", "aws_tower" ]' - + profiles: test,aws_tower + pre_run_script: 'export NXF_VER=21.10.3' diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 458e98d75..d8a7240f7 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -8,8 +8,9 @@ on: release: types: [published] -# Uncomment if we need an edge release of Nextflow again -# env: NXF_EDGE: 1 +env: + NXF_ANSI_LOG: false + CAPSULE_LOG: none jobs: test: @@ -17,20 +18,28 @@ jobs: # Only run on push if this is the nf-core dev branch (merged PRs) if: {% raw %}${{{% endraw %} github.event_name != 'push' || (github.event_name == 'push' && github.repository == '{{ name }}') {% raw %}}}{% endraw %} runs-on: ubuntu-latest - env: - NXF_VER: {% raw %}${{ matrix.nxf_ver }}{% endraw %} - NXF_ANSI_LOG: false strategy: matrix: - # Nextflow versions: check pipeline minimum and current latest - nxf_ver: ['21.04.0', ''] + # Nextflow versions + include: + # Test pipeline minimum Nextflow version + - NXF_VER: '21.10.3' + NXF_EDGE: '' + # Test latest edge release of Nextflow + - NXF_VER: '' + NXF_EDGE: '1' steps: - name: Check out pipeline code uses: actions/checkout@v2 - name: Install Nextflow env: - CAPSULE_LOG: none + {% raw -%} + NXF_VER: ${{ matrix.NXF_VER }} + # Uncomment only if the edge release is more recent than the latest stable release + # See https://github.com/nextflow-io/nextflow/issues/2467 + # NXF_EDGE: ${{ matrix.NXF_EDGE }} + {%- endraw %} run: | wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ diff --git a/nf_core/pipeline-template/.github/workflows/linting_comment.yml b/nf_core/pipeline-template/.github/workflows/linting_comment.yml index 0c718c0d9..68a0feee1 100644 --- a/nf_core/pipeline-template/.github/workflows/linting_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/linting_comment.yml @@ -15,6 +15,7 @@ jobs: uses: dawidd6/action-download-artifact@v2 with: workflow: linting.yml + workflow_conclusion: completed - name: Get PR number id: pr_number diff --git a/nf_core/pipeline-template/CITATIONS.md b/nf_core/pipeline-template/CITATIONS.md index e84b92989..323c681a0 100644 --- a/nf_core/pipeline-template/CITATIONS.md +++ b/nf_core/pipeline-template/CITATIONS.md @@ -12,7 +12,7 @@ * [FastQC](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/) -* [MultiQC](https://www.ncbi.nlm.nih.gov/pubmed/27312411/) +* [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) > Ewels P, Magnusson M, Lundin S, Käller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. ## Software packaging/containerisation tools diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index c502f2ea5..aa1e007ad 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -1,11 +1,11 @@ -# ![{{ name }}](docs/images/{{ name_noslash }}_logo.png) +# ![{{ name }}](docs/images/{{ name_noslash }}_logo_light.png#gh-light-mode-only) ![{{ name }}](docs/images/{{ name_noslash }}_logo_dark.png#gh-dark-mode-only) [![GitHub Actions CI Status](https://github.com/{{ name }}/workflows/nf-core%20CI/badge.svg)](https://github.com/{{ name }}/actions?query=workflow%3A%22nf-core+CI%22) [![GitHub Actions Linting Status](https://github.com/{{ name }}/workflows/nf-core%20linting/badge.svg)](https://github.com/{{ name }}/actions?query=workflow%3A%22nf-core+linting%22) [![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/{{ short_name }}/results) [![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.04.0-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) @@ -33,18 +33,21 @@ On release, automated continuous integration tests run the pipeline on a full-si ## Quick Start -1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.04.0`) +1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.10.3`) 2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_ 3. Download the pipeline and test it on a minimal dataset with a single command: ```console - nextflow run {{ name }} -profile test, + nextflow run {{ name }} -profile test,YOURPROFILE ``` + Note that some form of configuration will be needed so that Nextflow knows how to fetch the required software. This is usually done in the form of a config profile (`YOURPROFILE` in the example command above). You can chain multiple config profiles in a comma-separated string. + + > * The pipeline comes with config profiles called `docker`, `singularity`, `podman`, `shifter`, `charliecloud` and `conda` which instruct the pipeline to use the named tool for software management. For example, `-profile test,docker`. > * Please check [nf-core/configs](https://github.com/nf-core/configs#documentation) to see if a custom config file to run nf-core pipelines already exists for your Institute. If so, you can simply use `-profile ` in your command. This will enable either `docker` or `singularity` and set the appropriate execution settings for your local compute environment. - > * If you are using `singularity` then the pipeline will auto-detect this and attempt to download the Singularity images directly as opposed to performing a conversion from Docker images. If you are persistently observing issues downloading Singularity images directly due to timeout or network issues then please use the `--singularity_pull_docker_container` parameter to pull and convert the Docker image instead. Alternatively, it is highly recommended to use the [`nf-core download`](https://nf-co.re/tools/#downloading-pipelines-for-offline-use) command to pre-download all of the required containers before running the pipeline and to set the [`NXF_SINGULARITY_CACHEDIR` or `singularity.cacheDir`](https://www.nextflow.io/docs/latest/singularity.html?#singularity-docker-hub) Nextflow options to be able to store and re-use the images from a central location for future pipeline runs. + > * If you are using `singularity` and are persistently observing issues downloading Singularity images directly due to timeout or network issues, then you can use the `--singularity_pull_docker_container` parameter to pull and convert the Docker image instead. Alternatively, you can use the [`nf-core download`](https://nf-co.re/tools/#downloading-pipelines-for-offline-use) command to download images first, before running the pipeline. Setting the [`NXF_SINGULARITY_CACHEDIR` or `singularity.cacheDir`](https://www.nextflow.io/docs/latest/singularity.html?#singularity-docker-hub) Nextflow options enables you to store and re-use the images from a central location for future pipeline runs. > * If you are using `conda`, it is highly recommended to use the [`NXF_CONDA_CACHEDIR` or `conda.cacheDir`](https://www.nextflow.io/docs/latest/conda.html) settings to store the environments in a central location for future pipeline runs. 4. Start running your own analysis! diff --git a/nf_core/pipeline-template/assets/multiqc_config.yaml b/nf_core/pipeline-template/assets/multiqc_config.yaml index e3f940c2e..bbc9a14e1 100644 --- a/nf_core/pipeline-template/assets/multiqc_config.yaml +++ b/nf_core/pipeline-template/assets/multiqc_config.yaml @@ -1,7 +1,7 @@ report_comment: > This report has been generated by the {{ name }} analysis pipeline. For information about how to interpret these results, please see the - documentation. + documentation. report_section_order: software_versions: order: -1000 diff --git a/nf_core/pipeline-template/assets/sendmail_template.txt b/nf_core/pipeline-template/assets/sendmail_template.txt index 1abf5b0ab..3e59cd2d6 100644 --- a/nf_core/pipeline-template/assets/sendmail_template.txt +++ b/nf_core/pipeline-template/assets/sendmail_template.txt @@ -12,9 +12,9 @@ $email_html Content-Type: image/png;name="{{ name_noslash }}_logo.png" Content-Transfer-Encoding: base64 Content-ID: -Content-Disposition: inline; filename="{{ name_noslash }}_logo.png" +Content-Disposition: inline; filename="{{ name_noslash }}_logo_light.png" -<% out << new File("$projectDir/assets/{{ name_noslash }}_logo.png"). +<% out << new File("$projectDir/assets/{{ name_noslash }}_logo_light.png"). bytes. encodeBase64(). toString(). diff --git a/nf_core/pipeline-template/bin/scrape_software_versions.py b/nf_core/pipeline-template/bin/scrape_software_versions.py deleted file mode 100755 index 241dc8b7a..000000000 --- a/nf_core/pipeline-template/bin/scrape_software_versions.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python -from __future__ import print_function -import os - -results = {} -version_files = [x for x in os.listdir(".") if x.endswith(".version.txt")] -for version_file in version_files: - - software = version_file.replace(".version.txt", "") - if software == "pipeline": - software = "{{ name }}" - - with open(version_file) as fin: - version = fin.read().strip() - results[software] = version - -# Dump to YAML -print( - """ -id: 'software_versions' -section_name: '{{ name }} Software Versions' -section_href: 'https://github.com/{{ name }}' -plot_type: 'html' -description: 'are collected at run time from the software output.' -data: | -
-""" -) -for k, v in sorted(results.items()): - print("
{}
{}
".format(k, v)) -print("
") - -# Write out as tsv file: -with open("software_versions.tsv", "w") as f: - for k, v in sorted(results.items()): - f.write("{}\t{}\n".format(k, v)) diff --git a/nf_core/pipeline-template/conf/base.config b/nf_core/pipeline-template/conf/base.config index 3fb7b48fc..e0557b9e9 100644 --- a/nf_core/pipeline-template/conf/base.config +++ b/nf_core/pipeline-template/conf/base.config @@ -54,4 +54,7 @@ process { errorStrategy = 'retry' maxRetries = 2 } + withName:CUSTOM_DUMPSOFTWAREVERSIONS { + cache = false + } } diff --git a/nf_core/pipeline-template/conf/modules.config b/nf_core/pipeline-template/conf/modules.config index 0b1bfdec2..a0506a4db 100644 --- a/nf_core/pipeline-template/conf/modules.config +++ b/nf_core/pipeline-template/conf/modules.config @@ -1,32 +1,41 @@ /* ======================================================================================== - Config file for defining DSL2 per module options + Config file for defining DSL2 per module options and publishing paths ======================================================================================== Available keys to override module options: - args = Additional arguments appended to command in module. - args2 = Second set of arguments appended to command in module (multi-tool modules). - args3 = Third set of arguments appended to command in module (multi-tool modules). - publish_dir = Directory to publish results. - publish_by_meta = Groovy list of keys available in meta map to append as directories to "publish_dir" path - If publish_by_meta = true - Value of ${meta['id']} is appended as a directory to "publish_dir" path - If publish_by_meta = ['id', 'custompath'] - If "id" is in meta map and "custompath" isn't then "${meta['id']}/custompath/" - is appended as a directory to "publish_dir" path - If publish_by_meta = false / null - No directories are appended to "publish_dir" path - publish_files = Groovy map where key = "file_ext" and value = "directory" to publish results for that file extension - The value of "directory" is appended to the standard "publish_dir" path as defined above. - If publish_files = null (unspecified) - All files are published. - If publish_files = false - No files are published. - suffix = File name suffix for output files. + ext.args = Additional arguments appended to command in module. + ext.args2 = Second set of arguments appended to command in module (multi-tool modules). + ext.args3 = Third set of arguments appended to command in module (multi-tool modules). + ext.prefix = File name prefix for output files. ---------------------------------------------------------------------------------------- */ -params { - modules { - 'fastqc' { - args = "--quiet" - } - 'multiqc' { - args = "" - } +process { + + publishDir = [ + path: { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }, + mode: 'copy', + saveAs: { filename -> filename.equals('versions.yml') ? null : filename } + ] + + withName: SAMPLESHEET_CHECK { + publishDir = [ + path: { "${params.outdir}/pipeline_info" }, + mode: 'copy', + saveAs: { filename -> filename.equals('versions.yml') ? null : filename } + ] + } + + withName: FASTQC { + ext.args = '--quiet' } + + withName: CUSTOM_DUMPSOFTWAREVERSIONS { + publishDir = [ + path: { "${params.outdir}/pipeline_info" }, + mode: 'copy', + pattern: '*_versions.yml' + ] + } + } diff --git a/nf_core/pipeline-template/conf/test.config b/nf_core/pipeline-template/conf/test.config index e4419fd0d..eb2a725c8 100644 --- a/nf_core/pipeline-template/conf/test.config +++ b/nf_core/pipeline-template/conf/test.config @@ -16,8 +16,8 @@ params { // Limit resources so that this can run on GitHub Actions max_cpus = 2 - max_memory = 6.GB - max_time = 6.h + max_memory = '6.GB' + max_time = '6.h' // Input data // TODO nf-core: Specify the paths to your test data on nf-core/test-datasets diff --git a/nf_core/pipeline-template/docs/output.md b/nf_core/pipeline-template/docs/output.md index 9646e1229..4ef9a4ea0 100644 --- a/nf_core/pipeline-template/docs/output.md +++ b/nf_core/pipeline-template/docs/output.md @@ -60,7 +60,7 @@ Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQ * `pipeline_info/` * Reports generated by Nextflow: `execution_report.html`, `execution_timeline.html`, `execution_trace.txt` and `pipeline_dag.dot`/`pipeline_dag.svg`. - * Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.tsv`. + * Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline. * Reformatted samplesheet files used as input to the pipeline: `samplesheet.valid.csv`. diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index b4a708d8e..485af3af4 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -181,42 +181,6 @@ process { > **NB:** We specify just the process name i.e. `STAR_ALIGN` in the config file and not the full task name string that is printed to screen in the error message or on the terminal whilst the pipeline is running i.e. `RNASEQ:ALIGN_STAR:STAR_ALIGN`. You may get a warning suggesting that the process selector isn't recognised but you can ignore that if the process name has been specified correctly. This is something that needs to be fixed upstream in core Nextflow. -### Tool-specific options - -For the ultimate flexibility, we have implemented and are using Nextflow DSL2 modules in a way where it is possible for both developers and users to change tool-specific command-line arguments (e.g. providing an additional command-line argument to the `STAR_ALIGN` process) as well as publishing options (e.g. saving files produced by the `STAR_ALIGN` process that aren't saved by default by the pipeline). In the majority of instances, as a user you won't have to change the default options set by the pipeline developer(s), however, there may be edge cases where creating a simple custom config file can improve the behaviour of the pipeline if for example it is failing due to a weird error that requires setting a tool-specific parameter to deal with smaller / larger genomes. - -The command-line arguments passed to STAR in the `STAR_ALIGN` module are a combination of: - -* Mandatory arguments or those that need to be evaluated within the scope of the module, as supplied in the [`script`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/modules/nf-core/software/star/align/main.nf#L49-L55) section of the module file. - -* An [`options.args`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/modules/nf-core/software/star/align/main.nf#L56) string of non-mandatory parameters that is set to be empty by default in the module but can be overwritten when including the module in the sub-workflow / workflow context via the `addParams` Nextflow option. - -The nf-core/rnaseq pipeline has a sub-workflow (see [terminology](https://github.com/nf-core/modules#terminology)) specifically to align reads with STAR and to sort, index and generate some basic stats on the resulting BAM files using SAMtools. At the top of this file we import the `STAR_ALIGN` module via the Nextflow [`include`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/subworkflows/nf-core/align_star.nf#L10) keyword and by default the options passed to the module via the `addParams` option are set as an empty Groovy map [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/subworkflows/nf-core/align_star.nf#L5); this in turn means `options.args` will be set to empty by default in the module file too. This is an intentional design choice and allows us to implement well-written sub-workflows composed of a chain of tools that by default run with the bare minimum parameter set for any given tool in order to make it much easier to share across pipelines and to provide the flexibility for users and developers to customise any non-mandatory arguments. - -When including the sub-workflow above in the main pipeline workflow we use the same `include` statement, however, we now have the ability to overwrite options for each of the tools in the sub-workflow including the [`align_options`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/workflows/rnaseq.nf#L225) variable that will be used specifically to overwrite the optional arguments passed to the `STAR_ALIGN` module. In this case, the options to be provided to `STAR_ALIGN` have been assigned sensible defaults by the developer(s) in the pipeline's [`modules.config`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/modules.config#L70-L74) and can be accessed and customised in the [workflow context](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/workflows/rnaseq.nf#L201-L204) too before eventually passing them to the sub-workflow as a Groovy map called `star_align_options`. These options will then be propagated from `workflow -> sub-workflow -> module`. - -As mentioned at the beginning of this section it may also be necessary for users to overwrite the options passed to modules to be able to customise specific aspects of the way in which a particular tool is executed by the pipeline. Given that all of the default module options are stored in the pipeline's `modules.config` as a [`params` variable](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/modules.config#L24-L25) it is also possible to overwrite any of these options via a custom config file. - -Say for example we want to append an additional, non-mandatory parameter (i.e. `--outFilterMismatchNmax 16`) to the arguments passed to the `STAR_ALIGN` module. Firstly, we need to copy across the default `args` specified in the [`modules.config`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/modules.config#L71) and create a custom config file that is a composite of the default `args` as well as the additional options you would like to provide. This is very important because Nextflow will overwrite the default value of `args` that you provide via the custom config. - -As you will see in the example below, we have: - -* appended `--outFilterMismatchNmax 16` to the default `args` used by the module. -* changed the default `publish_dir` value to where the files will eventually be published in the main results directory. -* appended `'bam':''` to the default value of `publish_files` so that the BAM files generated by the process will also be saved in the top-level results directory for the module. Note: `'out':'log'` means any file/directory ending in `out` will now be saved in a separate directory called `my_star_directory/log/`. - -```nextflow -params { - modules { - 'star_align' { - args = "--quantMode TranscriptomeSAM --twopassMode Basic --outSAMtype BAM Unsorted --readFilesCommand zcat --runRNGseed 0 --outFilterMultimapNmax 20 --alignSJDBoverhangMin 1 --outSAMattributes NH HI AS NM MD --quantTranscriptomeBan Singleend --outFilterMismatchNmax 16" - publish_dir = "my_star_directory" - publish_files = ['out':'log', 'tab':'log', 'bam':''] - } - } -} -``` - ### Updating containers The [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html) implementation of this pipeline uses one container per process which makes it much easier to maintain and update software dependencies. If for some reason you need to use a different version of a particular tool with the pipeline then you just need to identify the `process` name and override the Nextflow `container` definition for that process using the `withName` declaration. For example, in the [nf-core/viralrecon](https://nf-co.re/viralrecon) pipeline a tool called [Pangolin](https://github.com/cov-lineages/pangolin) has been used during the COVID-19 pandemic to assign lineages to SARS-CoV-2 genome sequenced samples. Given that the lineage assignments change quite frequently it doesn't make sense to re-release the nf-core/viralrecon everytime a new version of Pangolin has been released. However, you can override the default container used by the pipeline by creating a custom config file and passing it as a command-line argument via `-c custom.config`. diff --git a/nf_core/pipeline-template/lib/NfcoreSchema.groovy b/nf_core/pipeline-template/lib/NfcoreSchema.groovy index 8d6920dd6..40ab65f20 100755 --- a/nf_core/pipeline-template/lib/NfcoreSchema.groovy +++ b/nf_core/pipeline-template/lib/NfcoreSchema.groovy @@ -105,9 +105,13 @@ class NfcoreSchema { // Collect expected parameters from the schema def expectedParams = [] + def enums = [:] for (group in schemaParams) { for (p in group.value['properties']) { expectedParams.push(p.key) + if (group.value['properties'][p.key].containsKey('enum')) { + enums[p.key] = group.value['properties'][p.key]['enum'] + } } } @@ -155,7 +159,7 @@ class NfcoreSchema { println '' log.error 'ERROR: Validation of pipeline parameters failed!' JSONObject exceptionJSON = e.toJSON() - printExceptions(exceptionJSON, params_json, log) + printExceptions(exceptionJSON, params_json, log, enums) println '' has_error = true } @@ -202,7 +206,7 @@ class NfcoreSchema { } def type = '[' + group_params.get(param).type + ']' def description = group_params.get(param).description - def defaultValue = group_params.get(param).default ? " [default: " + group_params.get(param).default.toString() + "]" : '' + def defaultValue = group_params.get(param).default != null ? " [default: " + group_params.get(param).default.toString() + "]" : '' def description_default = description + colors.dim + defaultValue + colors.reset // Wrap long description texts // Loosely based on https://dzone.com/articles/groovy-plain-text-word-wrap @@ -260,13 +264,12 @@ class NfcoreSchema { // Get pipeline parameters defined in JSON Schema def Map params_summary = [:] - def blacklist = ['hostnames'] def params_map = paramsLoad(getSchemaPath(workflow, schema_filename=schema_filename)) for (group in params_map.keySet()) { def sub_params = new LinkedHashMap() def group_params = params_map.get(group) // This gets the parameters of that particular group for (param in group_params.keySet()) { - if (params.containsKey(param) && !blacklist.contains(param)) { + if (params.containsKey(param)) { def params_value = params.get(param) def schema_value = group_params.get(param).default def param_type = group_params.get(param).type @@ -330,7 +333,7 @@ class NfcoreSchema { // // Loop over nested exceptions and print the causingException // - private static void printExceptions(ex_json, params_json, log) { + private static void printExceptions(ex_json, params_json, log, enums, limit=5) { def causingExceptions = ex_json['causingExceptions'] if (causingExceptions.length() == 0) { def m = ex_json['message'] =~ /required key \[([^\]]+)\] not found/ @@ -346,11 +349,20 @@ class NfcoreSchema { else { def param = ex_json['pointerToViolation'] - ~/^#\// def param_val = params_json[param].toString() - log.error "* --${param}: ${ex_json['message']} (${param_val})" + if (enums.containsKey(param)) { + def error_msg = "* --${param}: '${param_val}' is not a valid choice (Available choices" + if (enums[param].size() > limit) { + log.error "${error_msg} (${limit} of ${enums[param].size()}): ${enums[param][0..limit-1].join(', ')}, ... )" + } else { + log.error "${error_msg}: ${enums[param].join(', ')})" + } + } else { + log.error "* --${param}: ${ex_json['message']} (${param_val})" + } } } for (ex in causingExceptions) { - printExceptions(ex, params_json, log) + printExceptions(ex, params_json, log, enums) } } diff --git a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy index 44551e0a3..2fc0a9b9b 100755 --- a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy +++ b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy @@ -19,27 +19,16 @@ class NfcoreTemplate { } // - // Check params.hostnames + // Warn if a -profile or Nextflow config has not been provided to run the pipeline // - public static void hostName(workflow, params, log) { - Map colors = logColours(params.monochrome_logs) - if (params.hostnames) { - try { - def hostname = "hostname".execute().text.trim() - params.hostnames.each { prof, hnames -> - hnames.each { hname -> - if (hostname.contains(hname) && !workflow.profile.contains(prof)) { - log.info "=${colors.yellow}====================================================${colors.reset}=\n" + - "${colors.yellow}WARN: You are running with `-profile $workflow.profile`\n" + - " but your machine hostname is ${colors.white}'$hostname'${colors.reset}.\n" + - " ${colors.yellow_bold}Please use `-profile $prof${colors.reset}`\n" + - "=${colors.yellow}====================================================${colors.reset}=" - } - } - } - } catch (Exception e) { - log.warn "[$workflow.manifest.name] Could not determine 'hostname' - skipping check. Reason: ${e.message}." - } + public static void checkConfigProvided(workflow, log) { + if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) { + log.warn "[$workflow.manifest.name] You are attempting to run the pipeline without any custom configuration!\n\n" + + "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + + " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" + + " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" + + " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" + + "Please refer to the quick start section and usage docs for the pipeline.\n " } } @@ -168,7 +157,6 @@ class NfcoreTemplate { log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" } } else { - hostName(workflow, params, log) log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" } } diff --git a/nf_core/pipeline-template/lib/Utils.groovy b/nf_core/pipeline-template/lib/Utils.groovy index 18173e985..1b88aec0e 100755 --- a/nf_core/pipeline-template/lib/Utils.groovy +++ b/nf_core/pipeline-template/lib/Utils.groovy @@ -37,11 +37,4 @@ class Utils { "===================================================================================" } } - - // - // Join module args with appropriate spacing - // - public static String joinModuleArgs(args_list) { - return ' ' + args_list.join(' ') - } } diff --git a/nf_core/pipeline-template/lib/WorkflowMain.groovy b/nf_core/pipeline-template/lib/WorkflowMain.groovy index 597129cb5..3181f592c 100755 --- a/nf_core/pipeline-template/lib/WorkflowMain.groovy +++ b/nf_core/pipeline-template/lib/WorkflowMain.groovy @@ -61,6 +61,9 @@ class WorkflowMain { // Print parameter summary log to screen log.info paramsSummaryLog(workflow, params, log) + // Check that a -profile or Nextflow config has been provided to run the pipeline + NfcoreTemplate.checkConfigProvided(workflow, log) + // Check that conda channels are set-up correctly if (params.enable_conda) { Utils.checkCondaChannels(log) @@ -69,9 +72,6 @@ class WorkflowMain { // Check AWS batch settings NfcoreTemplate.awsBatch(workflow, params) - // Check the hostnames against configured profiles - NfcoreTemplate.hostName(workflow, params, log) - // Check input has been provided if (!params.input) { log.error "Please provide an input samplesheet to the pipeline e.g. '--input samplesheet.csv'" diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 3cb20a2e0..853ad6a1e 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -3,12 +3,15 @@ "homePage": "https://github.com/{{ name }}", "repos": { "nf-core/modules": { + "custom/dumpsoftwareversions": { + "git_sha": "20d8250d9f39ddb05dfb437603aaf99b5c0b2b41" + }, "fastqc": { - "git_sha": "e937c7950af70930d1f34bb961403d9d2aa81c7d" + "git_sha": "9d0cad583b9a71a6509b754fdf589cbfbed08961" }, "multiqc": { - "git_sha": "e937c7950af70930d1f34bb961403d9d2aa81c7d" + "git_sha": "20d8250d9f39ddb05dfb437603aaf99b5c0b2b41" } } } -} +} \ No newline at end of file diff --git a/nf_core/pipeline-template/modules/local/functions.nf b/nf_core/pipeline-template/modules/local/functions.nf deleted file mode 100644 index da9da093d..000000000 --- a/nf_core/pipeline-template/modules/local/functions.nf +++ /dev/null @@ -1,68 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } - } -} diff --git a/nf_core/pipeline-template/modules/local/get_software_versions.nf b/nf_core/pipeline-template/modules/local/get_software_versions.nf deleted file mode 100644 index 8af8af173..000000000 --- a/nf_core/pipeline-template/modules/local/get_software_versions.nf +++ /dev/null @@ -1,33 +0,0 @@ -// Import generic module functions -include { saveFiles } from './functions' - -params.options = [:] - -process GET_SOFTWARE_VERSIONS { - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'pipeline_info', meta:[:], publish_by_meta:[]) } - - conda (params.enable_conda ? "conda-forge::python=3.8.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/python:3.8.3" - } else { - container "quay.io/biocontainers/python:3.8.3" - } - - cache false - - input: - path versions - - output: - path "software_versions.tsv" , emit: tsv - path 'software_versions_mqc.yaml', emit: yaml - - script: // This script is bundled with the pipeline, in {{ name }}/bin/ - """ - echo $workflow.manifest.version > pipeline.version.txt - echo $workflow.nextflow.version > nextflow.version.txt - scrape_software_versions.py &> software_versions_mqc.yaml - """ -} diff --git a/nf_core/pipeline-template/modules/local/samplesheet_check.nf b/nf_core/pipeline-template/modules/local/samplesheet_check.nf index 9bada69b9..ccfdfecc2 100644 --- a/nf_core/pipeline-template/modules/local/samplesheet_check.nf +++ b/nf_core/pipeline-template/modules/local/samplesheet_check.nf @@ -1,31 +1,27 @@ -// Import generic module functions -include { saveFiles } from './functions' - -params.options = [:] - process SAMPLESHEET_CHECK { tag "$samplesheet" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'pipeline_info', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "conda-forge::python=3.8.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/python:3.8.3" - } else { - container "quay.io/biocontainers/python:3.8.3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/python:3.8.3' : + 'quay.io/biocontainers/python:3.8.3' }" input: path samplesheet output: - path '*.csv' + path '*.csv' , emit: csv + path "versions.yml", emit: versions script: // This script is bundled with the pipeline, in {{ name }}/bin/ """ check_samplesheet.py \\ $samplesheet \\ samplesheet.valid.csv + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + python: \$(python --version | sed 's/Python //g') + END_VERSIONS """ } diff --git a/nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/main.nf b/nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/main.nf new file mode 100644 index 000000000..934bb4672 --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/main.nf @@ -0,0 +1,21 @@ +process CUSTOM_DUMPSOFTWAREVERSIONS { + label 'process_low' + + // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container + conda (params.enable_conda ? "bioconda::multiqc=1.11" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0' : + 'quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0' }" + + input: + path versions + + output: + path "software_versions.yml" , emit: yml + path "software_versions_mqc.yml", emit: mqc_yml + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + template 'dumpsoftwareversions.py' +} diff --git a/nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml b/nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml new file mode 100644 index 000000000..5b5b8a602 --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml @@ -0,0 +1,34 @@ +name: custom_dumpsoftwareversions +description: Custom module used to dump software versions within the nf-core pipeline template +keywords: + - custom + - version +tools: + - custom: + description: Custom module used to dump software versions within the nf-core pipeline template + homepage: https://github.com/nf-core/tools + documentation: https://github.com/nf-core/tools + licence: ['MIT'] +input: + - versions: + type: file + description: YML file containing software versions + pattern: "*.yml" + +output: + - yml: + type: file + description: Standard YML file containing software versions + pattern: "software_versions.yml" + - mqc_yml: + type: file + description: MultiQC custom content YML file containing software versions + pattern: "software_versions_mqc.yml" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@drpatelh" + - "@grst" diff --git a/nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py new file mode 100644 index 000000000..d13903925 --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python + +import yaml +import platform +from textwrap import dedent + + +def _make_versions_html(versions): + html = [ + dedent( + """\\ + + + + + + + + + + """ + ) + ] + for process, tmp_versions in sorted(versions.items()): + html.append("") + for i, (tool, version) in enumerate(sorted(tmp_versions.items())): + html.append( + dedent( + f"""\\ + + + + + + """ + ) + ) + html.append("") + html.append("
Process Name Software Version
{process if (i == 0) else ''}{tool}{version}
") + return "\\n".join(html) + + +versions_this_module = {} +versions_this_module["${task.process}"] = { + "python": platform.python_version(), + "yaml": yaml.__version__, +} + +with open("$versions") as f: + versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module + +# aggregate versions by the module name (derived from fully-qualified process name) +versions_by_module = {} +for process, process_versions in versions_by_process.items(): + module = process.split(":")[-1] + try: + assert versions_by_module[module] == process_versions, ( + "We assume that software versions are the same between all modules. " + "If you see this error-message it means you discovered an edge-case " + "and should open an issue in nf-core/tools. " + ) + except KeyError: + versions_by_module[module] = process_versions + +versions_by_module["Workflow"] = { + "Nextflow": "$workflow.nextflow.version", + "$workflow.manifest.name": "$workflow.manifest.version", +} + +versions_mqc = { + "id": "software_versions", + "section_name": "${workflow.manifest.name} Software Versions", + "section_href": "https://github.com/${workflow.manifest.name}", + "plot_type": "html", + "description": "are collected at run time from the software output.", + "data": _make_versions_html(versions_by_module), +} + +with open("software_versions.yml", "w") as f: + yaml.dump(versions_by_module, f, default_flow_style=False) +with open("software_versions_mqc.yml", "w") as f: + yaml.dump(versions_mqc, f, default_flow_style=False) + +with open("versions.yml", "w") as f: + yaml.dump(versions_this_module, f, default_flow_style=False) diff --git a/nf_core/pipeline-template/modules/nf-core/modules/fastqc/functions.nf b/nf_core/pipeline-template/modules/nf-core/modules/fastqc/functions.nf deleted file mode 100644 index da9da093d..000000000 --- a/nf_core/pipeline-template/modules/nf-core/modules/fastqc/functions.nf +++ /dev/null @@ -1,68 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } - } -} diff --git a/nf_core/pipeline-template/modules/nf-core/modules/fastqc/main.nf b/nf_core/pipeline-template/modules/nf-core/modules/fastqc/main.nf index 39c327b26..d250eca07 100644 --- a/nf_core/pipeline-template/modules/nf-core/modules/fastqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/modules/fastqc/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FASTQC { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fastqc=0.11.9" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0" - } else { - container "quay.io/biocontainers/fastqc:0.11.9--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' : + 'quay.io/biocontainers/fastqc:0.11.9--0' }" input: tuple val(meta), path(reads) @@ -24,24 +13,32 @@ process FASTQC { output: tuple val(meta), path("*.html"), emit: html tuple val(meta), path("*.zip") , emit: zip - path "*.version.txt" , emit: version + path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' // Add soft-links to original FastQs for consistent naming in pipeline - def software = getSoftwareName(task.process) - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz - fastqc $options.args --threads $task.cpus ${prefix}.fastq.gz - fastqc --version | sed -e "s/FastQC v//g" > ${software}.version.txt + fastqc $args --threads $task.cpus ${prefix}.fastq.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + END_VERSIONS """ } else { """ [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz - fastqc $options.args --threads $task.cpus ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz - fastqc --version | sed -e "s/FastQC v//g" > ${software}.version.txt + fastqc $args --threads $task.cpus ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + END_VERSIONS """ } } diff --git a/nf_core/pipeline-template/modules/nf-core/modules/fastqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/modules/fastqc/meta.yml index 8eb9953dc..b09553a3c 100644 --- a/nf_core/pipeline-template/modules/nf-core/modules/fastqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/modules/fastqc/meta.yml @@ -15,6 +15,7 @@ tools: overrepresented sequences. homepage: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/ documentation: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/ + licence: ['GPL-2.0-only'] input: - meta: type: map @@ -40,10 +41,10 @@ output: type: file description: FastQC report archive pattern: "*_{fastqc.zip}" - - version: + - versions: type: file - description: File containing software version - pattern: "*.{version.txt}" + description: File containing software versions + pattern: "versions.yml" authors: - "@drpatelh" - "@grst" diff --git a/nf_core/pipeline-template/modules/nf-core/modules/multiqc/functions.nf b/nf_core/pipeline-template/modules/nf-core/modules/multiqc/functions.nf deleted file mode 100644 index da9da093d..000000000 --- a/nf_core/pipeline-template/modules/nf-core/modules/multiqc/functions.nf +++ /dev/null @@ -1,68 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } - } -} diff --git a/nf_core/pipeline-template/modules/nf-core/modules/multiqc/main.nf b/nf_core/pipeline-template/modules/nf-core/modules/multiqc/main.nf index da7808002..3dceb162a 100644 --- a/nf_core/pipeline-template/modules/nf-core/modules/multiqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/modules/multiqc/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MULTIQC { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::multiqc=1.10.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/multiqc:1.10.1--py_0" - } else { - container "quay.io/biocontainers/multiqc:1.10.1--py_0" - } + conda (params.enable_conda ? 'bioconda::multiqc=1.11' : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0' : + 'quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0' }" input: path multiqc_files @@ -24,12 +13,16 @@ process MULTIQC { path "*multiqc_report.html", emit: report path "*_data" , emit: data path "*_plots" , optional:true, emit: plots - path "*.version.txt" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) + def args = task.ext.args ?: '' """ - multiqc -f $options.args . - multiqc --version | sed -e "s/multiqc, version //g" > ${software}.version.txt + multiqc -f $args . + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) + END_VERSIONS """ } diff --git a/nf_core/pipeline-template/modules/nf-core/modules/multiqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/modules/multiqc/meta.yml index 532a8bb1e..63c75a450 100644 --- a/nf_core/pipeline-template/modules/nf-core/modules/multiqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/modules/multiqc/meta.yml @@ -11,6 +11,7 @@ tools: It's a general use tool, perfect for summarising the output from numerous bioinformatics tools. homepage: https://multiqc.info/ documentation: https://multiqc.info/docs/ + licence: ['GPL-3.0-or-later'] input: - multiqc_files: type: file @@ -29,10 +30,10 @@ output: type: file description: Plots created by MultiQC pattern: "*_data" - - version: + - versions: type: file - description: File containing software version - pattern: "*.{version.txt}" + description: File containing software versions + pattern: "versions.yml" authors: - "@abhi18av" - "@bunop" diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 3f23f45b3..d5e3946ca 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -26,7 +26,6 @@ params { // Boilerplate options outdir = './results' tracedir = "${params.outdir}/pipeline_info" - publish_dir_mode = 'copy' email = null email_on_fail = null plaintext_email = false @@ -34,14 +33,12 @@ params { help = false validate_params = true show_hidden_params = false - schema_ignore_params = 'genomes,modules' + schema_ignore_params = 'genomes' enable_conda = false - singularity_pull_docker_container = false // Config options custom_config_version = 'master' custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" - hostnames = [:] config_profile_description = null config_profile_contact = null config_profile_url = null @@ -58,9 +55,6 @@ params { // Load base.config by default for all pipelines includeConfig 'conf/base.config' -// Load modules.config for DSL2 module specific options -includeConfig 'conf/modules.config' - // Load nf-core custom profiles from different Institutions try { includeConfig "${params.custom_config_base}/nfcore_custom.config" @@ -68,13 +62,6 @@ try { System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config") } -// Load igenomes.config if required -if (!params.igenomes_ignore) { - includeConfig 'conf/igenomes.config' -} else { - params.genomes = [:] -} - profiles { debug { process.beforeScript = 'echo $HOSTNAME' } conda { @@ -126,11 +113,22 @@ profiles { test_full { includeConfig 'conf/test_full.config' } } +// Load igenomes.config if required +if (!params.igenomes_ignore) { + includeConfig 'conf/igenomes.config' +} else { + params.genomes = [:] +} + // Export these variables to prevent local Python/R libraries from conflicting with those in the container +// The JULIA depot path has been adjusted to a fixed path `/usr/local/share/julia` that needs to be used for packages in the container. +// See https://apeltzer.github.io/post/03-julia-lang-nextflow/ for details on that. Once we have a common agreement on where to keep Julia packages, this is adjustable. + env { PYTHONNOUSERSITE = 1 R_PROFILE_USER = "/.Rprofile" R_ENVIRON_USER = "/.Renviron" + JULIA_DEPOT_PATH = "/usr/local/share/julia" } // Capture exit codes from upstream processes when piping @@ -160,10 +158,13 @@ manifest { homePage = 'https://github.com/{{ name }}' description = '{{ description }}' mainScript = 'main.nf' - nextflowVersion = '!>=21.04.0' + nextflowVersion = '!>=21.10.3' version = '{{ version }}' } +// Load modules.config for DSL2 module specific options +includeConfig 'conf/modules.config' + // Function to ensure that resource requirements don't go beyond // a maximum limit def check_max(obj, type) { diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index aacda0004..9ccc78f36 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -104,12 +104,6 @@ "help_text": "If you're running offline, Nextflow will not be able to fetch the institutional config files from the internet. If you don't need them, then this is not a problem. If you do need them, you should download the files from the repo and tell Nextflow where to find them with this parameter.", "fa_icon": "fas fa-users-cog" }, - "hostnames": { - "type": "string", - "description": "Institutional configs hostname.", - "hidden": true, - "fa_icon": "fas fa-users-cog" - }, "config_profile_name": { "type": "string", "description": "Institutional config name.", @@ -184,22 +178,6 @@ "fa_icon": "fas fa-question-circle", "hidden": true }, - "publish_dir_mode": { - "type": "string", - "default": "copy", - "description": "Method used to save pipeline results to output directory.", - "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.", - "fa_icon": "fas fa-copy", - "enum": [ - "symlink", - "rellink", - "link", - "copy", - "copyNoFollow", - "move" - ], - "hidden": true - }, "email_on_fail": { "type": "string", "description": "Email address for completion summary, only when pipeline fails.", @@ -260,13 +238,6 @@ "description": "Run this workflow with Conda. You can also use '-profile conda' instead of providing this parameter.", "hidden": true, "fa_icon": "fas fa-bacon" - }, - "singularity_pull_docker_container": { - "type": "boolean", - "description": "Instead of directly downloading Singularity images for use with Singularity, force the workflow to pull and convert Docker containers instead.", - "hidden": true, - "fa_icon": "fas fa-toolbox", - "help_text": "This may be useful for example if you are unable to directly pull Singularity containers to run the pipeline due to http/https proxy issues." } } } diff --git a/nf_core/pipeline-template/subworkflows/local/input_check.nf b/nf_core/pipeline-template/subworkflows/local/input_check.nf index b664bc8ca..cddcbb3ce 100644 --- a/nf_core/pipeline-template/subworkflows/local/input_check.nf +++ b/nf_core/pipeline-template/subworkflows/local/input_check.nf @@ -2,9 +2,7 @@ // Check input samplesheet and get read channels // -params.options = [:] - -include { SAMPLESHEET_CHECK } from '../../modules/local/samplesheet_check' addParams( options: params.options ) +include { SAMPLESHEET_CHECK } from '../../modules/local/samplesheet_check' workflow INPUT_CHECK { take: @@ -12,12 +10,14 @@ workflow INPUT_CHECK { main: SAMPLESHEET_CHECK ( samplesheet ) + .csv .splitCsv ( header:true, sep:',' ) .map { create_fastq_channels(it) } .set { reads } emit: - reads // channel: [ val(meta), [ reads ] ] + reads // channel: [ val(meta), [ reads ] ] + versions = SAMPLESHEET_CHECK.out.versions // channel: [ versions.yml ] } // Function to get list of [ meta, [ fastq_1, fastq_2 ] ] diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index fe1882b42..460a3cf20 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -32,18 +32,10 @@ ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath(params.multi ======================================================================================== */ -// Don't overwrite global params.modules, create a copy instead and use that within the main script. -def modules = params.modules.clone() - -// -// MODULE: Local to the pipeline -// -include { GET_SOFTWARE_VERSIONS } from '../modules/local/get_software_versions' addParams( options: [publish_files : ['tsv':'']] ) - // // SUBWORKFLOW: Consisting of a mix of local and nf-core/modules // -include { INPUT_CHECK } from '../subworkflows/local/input_check' addParams( options: [:] ) +include { INPUT_CHECK } from '../subworkflows/local/input_check' /* ======================================================================================== @@ -51,14 +43,12 @@ include { INPUT_CHECK } from '../subworkflows/local/input_check' addParams( opti ======================================================================================== */ -def multiqc_options = modules['multiqc'] -multiqc_options.args += params.multiqc_title ? Utils.joinModuleArgs(["--title \"$params.multiqc_title\""]) : '' - // // MODULE: Installed directly from nf-core/modules // -include { FASTQC } from '../modules/nf-core/modules/fastqc/main' addParams( options: modules['fastqc'] ) -include { MULTIQC } from '../modules/nf-core/modules/multiqc/main' addParams( options: multiqc_options ) +include { FASTQC } from '../modules/nf-core/modules/fastqc/main' +include { MULTIQC } from '../modules/nf-core/modules/multiqc/main' +include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/modules/custom/dumpsoftwareversions/main' /* ======================================================================================== @@ -71,7 +61,7 @@ def multiqc_report = [] workflow {{ short_name|upper }} { - ch_software_versions = Channel.empty() + ch_versions = Channel.empty() // // SUBWORKFLOW: Read in samplesheet, validate and stage input files @@ -79,6 +69,7 @@ workflow {{ short_name|upper }} { INPUT_CHECK ( ch_input ) + ch_versions = ch_versions.mix(INPUT_CHECK.out.versions) // // MODULE: Run FastQC @@ -86,21 +77,10 @@ workflow {{ short_name|upper }} { FASTQC ( INPUT_CHECK.out.reads ) - ch_software_versions = ch_software_versions.mix(FASTQC.out.version.first().ifEmpty(null)) + ch_versions = ch_versions.mix(FASTQC.out.versions.first()) - // - // MODULE: Pipeline reporting - // - ch_software_versions - .map { it -> if (it) [ it.baseName, it ] } - .groupTuple() - .map { it[1][0] } - .flatten() - .collect() - .set { ch_software_versions } - - GET_SOFTWARE_VERSIONS ( - ch_software_versions.map { it }.collect() + CUSTOM_DUMPSOFTWAREVERSIONS ( + ch_versions.unique().collectFile(name: 'collated_versions.yml') ) // @@ -113,14 +93,14 @@ workflow {{ short_name|upper }} { ch_multiqc_files = ch_multiqc_files.mix(Channel.from(ch_multiqc_config)) ch_multiqc_files = ch_multiqc_files.mix(ch_multiqc_custom_config.collect().ifEmpty([])) ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) - ch_multiqc_files = ch_multiqc_files.mix(GET_SOFTWARE_VERSIONS.out.yaml.collect()) + ch_multiqc_files = ch_multiqc_files.mix(CUSTOM_DUMPSOFTWAREVERSIONS.out.mqc_yml.collect()) ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}.ifEmpty([])) MULTIQC ( ch_multiqc_files.collect() ) - multiqc_report = MULTIQC.out.report.toList() - ch_software_versions = ch_software_versions.mix(MULTIQC.out.version.ifEmpty(null)) + multiqc_report = MULTIQC.out.report.toList() + ch_versions = ch_versions.mix(MULTIQC.out.versions) } /* diff --git a/nf_core/schema.py b/nf_core/schema.py index fddf6d050..c3825b4ba 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -17,6 +17,7 @@ import webbrowser import yaml import copy +import re import nf_core.list, nf_core.utils @@ -37,6 +38,7 @@ def __init__(self): self.schema_params = [] self.input_params = {} self.pipeline_params = {} + self.invalid_nextflow_config_default_parameters = {} self.pipeline_manifest = {} self.schema_from_scratch = False self.no_prompts = False @@ -81,7 +83,19 @@ def load_lint_schema(self): num_params = self.validate_schema() self.get_schema_defaults() self.validate_default_params() - log.info("[green][✓] Pipeline schema looks valid[/] [dim](found {} params)".format(num_params)) + if len(self.invalid_nextflow_config_default_parameters) > 0: + log.info( + "[red][✗] Invalid default parameters found:\n --{}\n\nNOTE: Use null in config for no default.".format( + "\n --".join( + [ + f"{param}: {msg}" + for param, msg in self.invalid_nextflow_config_default_parameters.items() + ] + ) + ) + ) + else: + log.info("[green][✓] Pipeline schema looks valid[/] [dim](found {} params)".format(num_params)) except json.decoder.JSONDecodeError as e: error_msg = "[bold red]Could not parse schema JSON:[/] {}".format(e) log.error(error_msg) @@ -114,7 +128,8 @@ def sanitise_param_default(self, param): # For everything else, an empty string is an empty string if isinstance(param["default"], str) and param["default"].strip() == "": - return "" + param["default"] = "" + return param # Integers if param["type"] == "integer": @@ -206,6 +221,9 @@ def validate_default_params(self): """ Check that all default parameters in the schema are valid Ignores 'required' flag, as required parameters might have no defaults + + Additional check that all parameters have defaults in nextflow.config and that + these are valid and adhere to guidelines """ try: assert self.schema is not None @@ -221,7 +239,89 @@ def validate_default_params(self): log.error("[red][✗] Pipeline schema not found") except jsonschema.exceptions.ValidationError as e: raise AssertionError("Default parameters are invalid: {}".format(e.message)) - log.info("[green][✓] Default parameters look valid") + log.info("[green][✓] Default parameters match schema validation") + + # Make sure every default parameter exists in the nextflow.config and is of correct type + if self.pipeline_params == {}: + self.get_wf_params() + + # Collect parameters to ignore + if "schema_ignore_params" in self.pipeline_params: + params_ignore = self.pipeline_params.get("schema_ignore_params", "").strip("\"'").split(",") + else: + params_ignore = [] + + # Go over group keys + for group_key, group in schema_no_required["definitions"].items(): + group_properties = group.get("properties") + for param in group_properties: + if param in params_ignore: + continue + if param in self.pipeline_params: + self.validate_config_default_parameter(param, group_properties[param], self.pipeline_params[param]) + else: + self.invalid_nextflow_config_default_parameters[param] = "Not in pipeline parameters" + + # Go over ungrouped params if any exist + ungrouped_properties = self.schema.get("properties") + if ungrouped_properties: + for param in ungrouped_properties: + if param in params_ignore: + continue + if param in self.pipeline_params: + self.validate_config_default_parameter( + param, ungrouped_properties[param], self.pipeline_params[param] + ) + else: + self.invalid_nextflow_config_default_parameters[param] = "Not in pipeline parameters" + + def validate_config_default_parameter(self, param, schema_param, config_default): + """ + Assure that default parameters in the nextflow.config are correctly set + by comparing them to their type in the schema + """ + + # If we have a default in the schema, check it matches the config + if "default" in schema_param and ( + (schema_param["type"] == "boolean" and str(config_default).lower() != str(schema_param["default"]).lower()) + and (str(schema_param["default"]) != str(config_default).strip('"').strip("'")) + ): + # Check that we are not deferring the execution of this parameter in the schema default with squiggly brakcets + if schema_param["type"] != "string" or "{" not in schema_param["default"]: + self.invalid_nextflow_config_default_parameters[ + param + ] = f"Schema default (`{schema_param['default']}`) does not match the config default (`{config_default}`)" + return + + # if default is null, we're good + if config_default == "null": + return + + # Check variable types in nextflow.config + if schema_param["type"] == "string": + if str(config_default) in ["false", "true", "''"]: + self.invalid_nextflow_config_default_parameters[ + param + ] = f"String should not be set to `{config_default}`" + if schema_param["type"] == "boolean": + if not str(config_default) in ["false", "true"]: + self.invalid_nextflow_config_default_parameters[ + param + ] = f"Booleans should only be true or false, not `{config_default}`" + if schema_param["type"] == "integer": + try: + int(config_default) + except ValueError: + self.invalid_nextflow_config_default_parameters[ + param + ] = f"Does not look like an integer: `{config_default}`" + if schema_param["type"] == "number": + try: + float(config_default) + except ValueError: + self.invalid_nextflow_config_default_parameters[ + param + ] = f"Does not look like a number (float): `{config_default}`" def validate_schema(self, schema=None): """ @@ -536,18 +636,13 @@ def build_schema_param(self, p_val): if p_val == "null": p_val = None - # NB: Only test "True" for booleans, as it is very common to initialise - # an empty param as false when really we expect a string at a later date.. - if p_val == "True": - p_val = True + # Booleans + if p_val == "True" or p_val == "False": + p_val = p_val == "True" # Convert to bool p_type = "boolean" p_schema = {"type": p_type, "default": p_val} - # Assume that false and empty strings shouldn't be a default - if p_val == "false" or p_val == "" or p_val is None: - del p_schema["default"] - return p_schema def launch_web_builder(self): diff --git a/nf_core/utils.py b/nf_core/utils.py index de120f10d..34660d431 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -58,14 +58,14 @@ def check_if_outdated(current_version=None, remote_version=None, source_url="htt # Set and clean up the current version string if current_version == None: current_version = nf_core.__version__ - current_version = re.sub("[^0-9\.]", "", current_version) + current_version = re.sub(r"[^0-9\.]", "", current_version) # Build the URL to check against source_url = os.environ.get("NFCORE_VERSION_URL", source_url) source_url = "{}?v={}".format(source_url, current_version) # Fetch and clean up the remote version if remote_version == None: response = requests.get(source_url, timeout=3) - remote_version = re.sub("[^0-9\.]", "", response.text) + remote_version = re.sub(r"[^0-9\.]", "", response.text) # Check if we have an available update is_outdated = version.StrictVersion(remote_version) > version.StrictVersion(current_version) return (is_outdated, current_version, remote_version) @@ -200,17 +200,20 @@ def is_pipeline_directory(wf_path): raise UserWarning(f"'{wf_path}' is not a pipeline - '{fn}' is missing") -def fetch_wf_config(wf_path): +def fetch_wf_config(wf_path, cache_config=True): """Uses Nextflow to retrieve the the configuration variables from a Nextflow workflow. Args: wf_path (str): Nextflow workflow file system path. + cache_config (bool): cache configuration or not (def. True) Returns: dict: Workflow configuration settings. """ + log.debug(f"Got '{wf_path}' as path") + config = dict() cache_fn = None cache_basedir = None @@ -267,12 +270,16 @@ def fetch_wf_config(wf_path): for l in fh: match = re.match(r"^\s*(params\.[a-zA-Z0-9_]+)\s*=", l) if match: - config[match.group(1)] = "false" + config[match.group(1)] = "null" except FileNotFoundError as e: log.debug("Could not open {} to look for parameter declarations - {}".format(main_nf, e)) # If we can, save a cached copy - if cache_path: + # HINT: during testing phase (in test_download, for example) we don't want + # to save configuration copy in $HOME, otherwise the tests/test_download.py::DownloadTest::test_wf_use_local_configs + # will fail after the first attempt. It's better to not save temporary data + # in others folders than tmp when doing tests in general + if cache_path and cache_config: log.debug("Saving config cache: {}".format(cache_path)) with open(cache_path, "w") as fh: json.dump(config, fh, indent=4) @@ -290,7 +297,7 @@ def nextflow_cmd(cmd): raise AssertionError("It looks like Nextflow is not installed. It is required for most nf-core functions.") except subprocess.CalledProcessError as e: raise AssertionError( - f"Command '{cmd}' returned non-zero error code '{e.returncode}':\n[red]> {e.stderr.decode()}" + f"Command '{cmd}' returned non-zero error code '{e.returncode}':\n[red]> {e.stderr.decode()}{e.stdout.decode()}" ) diff --git a/pyproject.toml b/pyproject.toml index 266acbdcb..1bd66769d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,10 @@ +[build-system] +build-backend = 'setuptools.build_meta' +requires = [ + 'setuptools>=40.6.0', + 'wheel' +] + [tool.black] line-length = 120 target_version = ['py36','py37','py38'] @@ -7,3 +14,4 @@ markers = [ "datafiles: load datafiles" ] testpaths = ["tests"] +norecursedirs = [ '.*', 'build', 'dist', '*.egg', 'data', '__pycache__', '.github', 'nf_core', 'docs'] \ No newline at end of file diff --git a/setup.py b/setup.py index 5be47e737..8b910389f 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages -version = "2.1" +version = "2.2" with open("README.md") as f: readme = f.read() @@ -33,7 +33,6 @@ license="MIT", entry_points={"console_scripts": ["nf-core=nf_core.__main__:run_nf_core"]}, install_requires=required, - setup_requires=["twine>=1.11.0", "setuptools>=38.6."], packages=find_packages(exclude=("docs")), include_package_data=True, zip_safe=False, diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/lint/actions_ci.py b/tests/lint/actions_ci.py index 13cdfbbc0..332908997 100644 --- a/tests/lint/actions_ci.py +++ b/tests/lint/actions_ci.py @@ -23,7 +23,7 @@ def test_actions_ci_fail_wrong_nf(self): self.lint_obj._load() self.lint_obj.minNextflowVersion = "1.2.3" results = self.lint_obj.actions_ci() - assert results["failed"] == ["Minimum NF version in '.github/workflows/ci.yml' different to pipeline's manifest"] + assert results["failed"] == ["Minimum pipeline NF version '1.2.3' is not tested in '.github/workflows/ci.yml'"] def test_actions_ci_fail_wrong_docker_ver(self): diff --git a/tests/modules/bump_versions.py b/tests/modules/bump_versions.py index 5defa7d74..df891cd4c 100644 --- a/tests/modules/bump_versions.py +++ b/tests/modules/bump_versions.py @@ -8,15 +8,15 @@ def test_modules_bump_versions_single_module(self): """Test updating a single module""" - # Change the star/align version to an older version - main_nf_path = os.path.join(self.nfcore_modules, "modules", "star", "align", "main.nf") + # Change the bpipe/test version to an older version + main_nf_path = os.path.join(self.nfcore_modules, "modules", "bpipe", "test", "main.nf") with open(main_nf_path, "r") as fh: content = fh.read() new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content) with open(main_nf_path, "w") as fh: fh.write(new_content) version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - version_bumper.bump_versions(module="star/align") + version_bumper.bump_versions(module="bpipe/test") assert len(version_bumper.failed) == 0 @@ -37,13 +37,13 @@ def test_modules_bump_versions_fail(self): def test_modules_bump_versions_fail_unknown_version(self): """Fail because of an unknown version""" - # Change the star/align version to an older version - main_nf_path = os.path.join(self.nfcore_modules, "modules", "star", "align", "main.nf") + # Change the bpipe/test version to an older version + main_nf_path = os.path.join(self.nfcore_modules, "modules", "bpipe", "test", "main.nf") with open(main_nf_path, "r") as fh: content = fh.read() - new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=xxx", content) + new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content) with open(main_nf_path, "w") as fh: fh.write(new_content) version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - version_bumper.bump_versions(module="star/align") + version_bumper.bump_versions(module="bpipe/test") assert "Conda package had unknown version" in version_bumper.failed[0][0] diff --git a/tests/modules/create_test_yml.py b/tests/modules/create_test_yml.py index db286181c..1666cd764 100644 --- a/tests/modules/create_test_yml.py +++ b/tests/modules/create_test_yml.py @@ -1,13 +1,14 @@ import os -import tempfile import pytest import nf_core.modules +from ..utils import with_temporary_folder -def test_modules_custom_yml_dumper(self): + +@with_temporary_folder +def test_modules_custom_yml_dumper(self, out_dir): """Try to create a yml file with the custom yml dumper""" - out_dir = tempfile.mkdtemp() yml_output_path = os.path.join(out_dir, "test.yml") meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", False, "./", False, True) meta_builder.test_yml_output_path = yml_output_path @@ -16,9 +17,9 @@ def test_modules_custom_yml_dumper(self): assert os.path.isfile(yml_output_path) -def test_modules_test_file_dict(self): +@with_temporary_folder +def test_modules_test_file_dict(self, test_file_dir): """Creat dict of test files and create md5 sums""" - test_file_dir = tempfile.mkdtemp() meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", False, "./", False, True) with open(os.path.join(test_file_dir, "test_file.txt"), "w") as fh: fh.write("this line is just for testing") @@ -27,9 +28,9 @@ def test_modules_test_file_dict(self): assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" -def test_modules_create_test_yml_get_md5(self): +@with_temporary_folder +def test_modules_create_test_yml_get_md5(self, test_file_dir): """Get md5 sums from a dummy output""" - test_file_dir = tempfile.mkdtemp() meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", False, "./", False, True) with open(os.path.join(test_file_dir, "test_file.txt"), "w") as fh: fh.write("this line is just for testing") @@ -41,18 +42,18 @@ def test_modules_create_test_yml_get_md5(self): def test_modules_create_test_yml_entry_points(self): """Test extracting test entry points from a main.nf file""" - meta_builder = nf_core.modules.ModulesTestYmlBuilder("star/align", False, "./", False, True) - meta_builder.module_test_main = os.path.join(self.nfcore_modules, "tests", "modules", "star", "align", "main.nf") + meta_builder = nf_core.modules.ModulesTestYmlBuilder("bpipe/test", False, "./", False, True) + meta_builder.module_test_main = os.path.join(self.nfcore_modules, "tests", "modules", "bpipe", "test", "main.nf") meta_builder.scrape_workflow_entry_points() - assert meta_builder.entry_points[0] == "test_star_align" + assert meta_builder.entry_points[0] == "test_bpipe_test" def test_modules_create_test_yml_check_inputs(self): """Test the check_inputs() function - raise UserWarning because test.yml exists""" cwd = os.getcwd() os.chdir(self.nfcore_modules) - meta_builder = nf_core.modules.ModulesTestYmlBuilder("star/align", False, "./", False, True) - meta_builder.module_test_main = os.path.join(self.nfcore_modules, "tests", "modules", "star", "align", "main.nf") + meta_builder = nf_core.modules.ModulesTestYmlBuilder("bpipe/test", False, "./", False, True) + meta_builder.module_test_main = os.path.join(self.nfcore_modules, "tests", "modules", "bpipe", "test", "main.nf") with pytest.raises(UserWarning) as excinfo: meta_builder.check_inputs() os.chdir(cwd) diff --git a/tests/modules/install.py b/tests/modules/install.py index 41307e366..e4c94f6bd 100644 --- a/tests/modules/install.py +++ b/tests/modules/install.py @@ -1,7 +1,8 @@ -import tempfile import pytest import os +from ..utils import with_temporary_folder + def test_modules_install_nopipeline(self): """Test installing a module - no pipeline given""" @@ -9,9 +10,10 @@ def test_modules_install_nopipeline(self): assert self.mods_install.install("foo") is False -def test_modules_install_emptypipeline(self): +@with_temporary_folder +def test_modules_install_emptypipeline(self, tmpdir): """Test installing a module - empty dir given""" - self.mods_install.dir = tempfile.mkdtemp() + self.mods_install.dir = tmpdir with pytest.raises(UserWarning) as excinfo: self.mods_install.install("foo") assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/modules/lint.py b/tests/modules/lint.py index de29371c5..0f60377d5 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -6,26 +6,27 @@ def test_modules_lint_trimgalore(self): self.mods_install.install("trimgalore") module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) module_lint.lint(print_results=False, module="trimgalore") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 - assert len(module_lint.failed) == 0 def test_modules_lint_empty(self): """Test linting a pipeline with no modules installed""" self.mods_remove.remove("fastqc") self.mods_remove.remove("multiqc") + self.mods_remove.remove("custom/dumpsoftwareversions") module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) module_lint.lint(print_results=False, all_modules=True) + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) == 0 assert len(module_lint.warned) == 0 - assert len(module_lint.failed) == 0 def test_modules_lint_new_modules(self): """lint all modules in nf-core/modules repo clone""" module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=True, all_modules=True) + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 - assert len(module_lint.failed) == 0 diff --git a/tests/test_bump_version.py b/tests/test_bump_version.py index bd6c5a1a5..1697fcfd0 100644 --- a/tests/test_bump_version.py +++ b/tests/test_bump_version.py @@ -2,7 +2,6 @@ """Some tests covering the bump_version code. """ import os -import tempfile import yaml import nf_core.bump_version @@ -10,10 +9,13 @@ import nf_core.utils -def test_bump_pipeline_version(datafiles): +# pass tmp_path as argument, which is a pytest feature +# see: https://docs.pytest.org/en/latest/how-to/tmp_path.html#the-tmp-path-fixture +def test_bump_pipeline_version(datafiles, tmp_path): """Test that making a release with the working example files works""" + # Get a workflow and configs - test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") + test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") create_obj = nf_core.create.PipelineCreate( "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir ) @@ -30,10 +32,10 @@ def test_bump_pipeline_version(datafiles): assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.1" -def test_dev_bump_pipeline_version(datafiles): +def test_dev_bump_pipeline_version(datafiles, tmp_path): """Test that making a release works with a dev name and a leading v""" # Get a workflow and configs - test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") + test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") create_obj = nf_core.create.PipelineCreate( "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir ) @@ -50,9 +52,9 @@ def test_dev_bump_pipeline_version(datafiles): assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.2dev" -def test_bump_nextflow_version(datafiles): +def test_bump_nextflow_version(datafiles, tmp_path): # Get a workflow and configs - test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") + test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") create_obj = nf_core.create.PipelineCreate( "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir ) @@ -61,24 +63,24 @@ def test_bump_nextflow_version(datafiles): pipeline_obj._load() # Bump the version number - nf_core.bump_version.bump_nextflow_version(pipeline_obj, "21.04.0") + nf_core.bump_version.bump_nextflow_version(pipeline_obj, "21.10.3") new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) # Check nextflow.config new_pipeline_obj._load_pipeline_config() - assert new_pipeline_obj.nf_config["manifest.nextflowVersion"].strip("'\"") == "!>=21.04.0" + assert new_pipeline_obj.nf_config["manifest.nextflowVersion"].strip("'\"") == "!>=21.10.3" # Check .github/workflows/ci.yml with open(new_pipeline_obj._fp(".github/workflows/ci.yml")) as fh: ci_yaml = yaml.safe_load(fh) - assert ci_yaml["jobs"]["test"]["strategy"]["matrix"]["nxf_ver"][0] == "21.04.0" + assert ci_yaml["jobs"]["test"]["strategy"]["matrix"]["include"][0]["NXF_VER"] == "21.10.3" # Check README.md with open(new_pipeline_obj._fp("README.md")) as fh: readme = fh.read().splitlines() assert ( "[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A5{}-23aa62.svg?labelColor=000000)](https://www.nextflow.io/)".format( - "21.04.0" + "21.10.3" ) in readme ) diff --git a/tests/test_create.py b/tests/test_create.py index 5fb1e53a6..cce494b52 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -3,12 +3,14 @@ """ import os import nf_core.create -import tempfile import unittest +from .utils import with_temporary_folder + class NfcoreCreateTest(unittest.TestCase): - def setUp(self): + @with_temporary_folder + def setUp(self, tmp_path): self.pipeline_name = "nf-core/test" self.pipeline_description = "just for 4w3s0m3 tests" self.pipeline_author = "Chuck Norris" @@ -21,7 +23,7 @@ def setUp(self): version=self.pipeline_version, no_git=False, force=True, - outdir=tempfile.mkdtemp(), + outdir=tmp_path, ) def test_pipeline_creation(self): diff --git a/tests/test_download.py b/tests/test_download.py index a4ae8e205..2dcc7b8cc 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -14,6 +14,8 @@ import tempfile import unittest +from .utils import with_temporary_folder, with_temporary_file + class DownloadTest(unittest.TestCase): @@ -73,8 +75,8 @@ def test_get_release_hash_non_existent_release(self): # # Tests for 'download_wf_files' # - def test_download_wf_files(self): - outdir = tempfile.mkdtemp() + @with_temporary_folder + def test_download_wf_files(self, outdir): download_obj = DownloadWorkflow(pipeline="nf-core/methylseq", revision="1.6") download_obj.outdir = outdir download_obj.wf_sha = "b3e5e3b95aaf01d98391a62a10a3990c0a4de395" @@ -87,8 +89,8 @@ def test_download_wf_files(self): # # Tests for 'download_configs' # - def test_download_configs(self): - outdir = tempfile.mkdtemp() + @with_temporary_folder + def test_download_configs(self, outdir): download_obj = DownloadWorkflow(pipeline="nf-core/methylseq", revision="1.6") download_obj.outdir = outdir download_obj.download_configs() @@ -97,30 +99,32 @@ def test_download_configs(self): # # Tests for 'wf_use_local_configs' # - def test_wf_use_local_configs(self): + @with_temporary_folder + def test_wf_use_local_configs(self, tmp_path): # Get a workflow and configs - test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") + test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") create_obj = nf_core.create.PipelineCreate( "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir ) create_obj.init_pipeline() - test_outdir = tempfile.mkdtemp() - download_obj = DownloadWorkflow(pipeline="dummy", revision="1.2.0", outdir=test_outdir) - shutil.copytree(test_pipeline_dir, os.path.join(test_outdir, "workflow")) - download_obj.download_configs() + with tempfile.TemporaryDirectory() as test_outdir: + download_obj = DownloadWorkflow(pipeline="dummy", revision="1.2.0", outdir=test_outdir) + shutil.copytree(test_pipeline_dir, os.path.join(test_outdir, "workflow")) + download_obj.download_configs() - # Test the function - download_obj.wf_use_local_configs() - wf_config = nf_core.utils.fetch_wf_config(os.path.join(test_outdir, "workflow")) - assert wf_config["params.custom_config_base"] == f"'{test_outdir}/workflow/../configs/'" + # Test the function + download_obj.wf_use_local_configs() + wf_config = nf_core.utils.fetch_wf_config(os.path.join(test_outdir, "workflow"), cache_config=False) + assert wf_config["params.custom_config_base"] == f"'{test_outdir}/workflow/../configs/'" # # Tests for 'find_container_images' # + @with_temporary_folder @mock.patch("nf_core.utils.fetch_wf_config") - def test_find_container_images(self, mock_fetch_wf_config): - download_obj = DownloadWorkflow(pipeline="dummy", outdir=tempfile.mkdtemp()) + def test_find_container_images(self, tmp_path, mock_fetch_wf_config): + download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) mock_fetch_wf_config.return_value = { "process.mapping.container": "cutting-edge-container", "process.nocontainer": "not-so-cutting-edge", @@ -132,60 +136,50 @@ def test_find_container_images(self, mock_fetch_wf_config): # # Tests for 'validate_md5' # - def test_matching_md5sums(self): + @with_temporary_file + def test_matching_md5sums(self, tmpfile): download_obj = DownloadWorkflow(pipeline="dummy") test_hash = hashlib.md5() test_hash.update(b"test") val_hash = test_hash.hexdigest() - tmpfilehandle, tmpfile = tempfile.mkstemp() - with open(tmpfile[1], "w") as f: + with open(tmpfile.name, "w") as f: f.write("test") - download_obj.validate_md5(tmpfile[1], val_hash) - - # Clean up - os.remove(tmpfile[1]) + download_obj.validate_md5(tmpfile.name, val_hash) + @with_temporary_file @pytest.mark.xfail(raises=IOError, strict=True) - def test_mismatching_md5sums(self): + def test_mismatching_md5sums(self, tmpfile): download_obj = DownloadWorkflow(pipeline="dummy") test_hash = hashlib.md5() test_hash.update(b"other value") val_hash = test_hash.hexdigest() - tmpfilehandle, tmpfile = tempfile.mkstemp() - with open(tmpfile, "w") as f: + with open(tmpfile.name, "w") as f: f.write("test") - download_obj.validate_md5(tmpfile[1], val_hash) - - # Clean up - os.remove(tmpfile) + download_obj.validate_md5(tmpfile.name, val_hash) # # Tests for 'singularity_pull_image' # # If Singularity is not installed, will log an error and exit # If Singularity is installed, should raise an OSError due to non-existant image + @with_temporary_folder @pytest.mark.xfail(raises=OSError) @mock.patch("rich.progress.Progress.add_task") - def test_singularity_pull_image(self, mock_rich_progress): - tmp_dir = tempfile.mkdtemp() + def test_singularity_pull_image(self, tmp_dir, mock_rich_progress): download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_dir) download_obj.singularity_pull_image("a-container", tmp_dir, None, mock_rich_progress) - # Clean up - shutil.rmtree(tmp_dir) - # # Tests for the main entry method 'download_workflow' # + @with_temporary_folder @mock.patch("nf_core.download.DownloadWorkflow.singularity_pull_image") @mock.patch("shutil.which") - def test_download_workflow_with_success(self, mock_download_image, mock_singularity_installed): - - tmp_dir = tempfile.mkdtemp() + def test_download_workflow_with_success(self, tmp_dir, mock_download_image, mock_singularity_installed): os.environ["NXF_SINGULARITY_CACHEDIR"] = "foo" download_obj = DownloadWorkflow( @@ -197,6 +191,3 @@ def test_download_workflow_with_success(self, mock_download_image, mock_singular ) download_obj.download_workflow() - - # Clean up - shutil.rmtree(tmp_dir) diff --git a/tests/test_launch.py b/tests/test_launch.py index 090414029..8029213f0 100644 --- a/tests/test_launch.py +++ b/tests/test_launch.py @@ -11,6 +11,8 @@ import tempfile import unittest +from .utils import with_temporary_folder, with_temporary_file + class TestLaunch(unittest.TestCase): """Class for launch tests""" @@ -20,9 +22,21 @@ def setUp(self): # Set up the schema root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") - self.nf_params_fn = os.path.join(tempfile.mkdtemp(), "nf-params.json") + # cannot use a context manager here, since outside setUp the temporary + # file will never exists + self.tmp_dir = tempfile.mkdtemp() + self.nf_params_fn = os.path.join(self.tmp_dir, "nf-params.json") self.launcher = nf_core.launch.Launch(self.template_dir, params_out=self.nf_params_fn) + def tearDown(self): + """Clean up temporary files and folders""" + + if os.path.exists(self.nf_params_fn): + os.remove(self.nf_params_fn) + + if os.path.exists(self.tmp_dir): + os.rmdir(self.tmp_dir) + @mock.patch.object(nf_core.launch.Launch, "prompt_web_gui", side_effect=[True]) @mock.patch.object(nf_core.launch.Launch, "launch_web_gui") def test_launch_pipeline(self, mock_webbrowser, mock_lauch_web_gui): @@ -52,9 +66,10 @@ def test_get_pipeline_schema(self): self.launcher.get_pipeline_schema() assert len(self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]) > 2 - def test_make_pipeline_schema(self): + @with_temporary_folder + def test_make_pipeline_schema(self, tmp_path): """Make a copy of the template workflow, but delete the schema file, then try to load it""" - test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "wf") + test_pipeline_dir = os.path.join(tmp_path, "wf") shutil.copytree(self.template_dir, test_pipeline_dir) os.remove(os.path.join(test_pipeline_dir, "nextflow_schema.json")) self.launcher = nf_core.launch.Launch(test_pipeline_dir, params_out=self.nf_params_fn) @@ -74,12 +89,12 @@ def test_get_pipeline_defaults(self): assert len(self.launcher.schema_obj.input_params) > 0 assert self.launcher.schema_obj.input_params["outdir"] == "./results" - def test_get_pipeline_defaults_input_params(self): + @with_temporary_file + def test_get_pipeline_defaults_input_params(self, tmp_file): """Test fetching default inputs from the pipeline schema with an input params file supplied""" - tmp_filehandle, tmp_filename = tempfile.mkstemp() - with os.fdopen(tmp_filehandle, "w") as fh: + with open(tmp_file.name, "w") as fh: json.dump({"outdir": "fubar"}, fh) - self.launcher.params_in = tmp_filename + self.launcher.params_in = tmp_file.name self.launcher.get_pipeline_schema() self.launcher.set_schema_inputs() assert len(self.launcher.schema_obj.input_params) > 0 diff --git a/tests/test_lint.py b/tests/test_lint.py index c6d5dc351..78c8c26ae 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -16,6 +16,8 @@ import nf_core.create import nf_core.lint +from .utils import with_temporary_folder + class TestLint(unittest.TestCase): """Class for lint tests""" @@ -25,7 +27,9 @@ def setUp(self): Use nf_core.create() to make a pipeline that we can use for testing """ - self.test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") + + self.tmp_dir = tempfile.mkdtemp() + self.test_pipeline_dir = os.path.join(self.tmp_dir, "nf-core-testpipeline") self.create_obj = nf_core.create.PipelineCreate( "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir ) @@ -33,11 +37,17 @@ def setUp(self): # Base lint object on this directory self.lint_obj = nf_core.lint.PipelineLint(self.test_pipeline_dir) + def tearDown(self): + """Clean up temporary files and folders""" + + if os.path.exists(self.tmp_dir): + shutil.rmtree(self.tmp_dir) + def _make_pipeline_copy(self): """Make a copy of the test pipeline that can be edited Returns: Path to new temp directory with pipeline""" - new_pipeline = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") + new_pipeline = os.path.join(self.tmp_dir, "nf-core-testpipeline-copy") shutil.copytree(self.test_pipeline_dir, new_pipeline) return new_pipeline @@ -72,9 +82,9 @@ def test_load_lint_config_not_found(self): def test_load_lint_config_ignore_all_tests(self): """Try to load a linting config file that ignores all tests""" + # Make a copy of the test pipeline and create a lint object - new_pipeline = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") - shutil.copytree(self.test_pipeline_dir, new_pipeline) + new_pipeline = self._make_pipeline_copy() lint_obj = nf_core.lint.PipelineLint(new_pipeline) # Make a config file listing all test names @@ -93,7 +103,8 @@ def test_load_lint_config_ignore_all_tests(self): assert len(lint_obj.failed) == 0 assert len(lint_obj.ignored) == len(lint_obj.lint_tests) - def test_json_output(self): + @with_temporary_folder + def test_json_output(self, tmp_dir): """ Test creation of a JSON file with lint results @@ -122,7 +133,7 @@ def test_json_output(self): self.lint_obj.warned.append(("test_three", "This test gave a warning")) # Make a temp dir for the JSON output - json_fn = os.path.join(tempfile.mkdtemp(), "lint_results.json") + json_fn = os.path.join(tmp_dir, "lint_results.json") self.lint_obj._save_json_results(json_fn) # Load created JSON file and check its contents @@ -176,46 +187,46 @@ def test_sphinx_rst_files(self): ####################### # SPECIFIC LINT TESTS # ####################### - from lint.actions_awsfulltest import ( + from .lint.actions_awsfulltest import ( test_actions_awsfulltest_warn, test_actions_awsfulltest_pass, test_actions_awsfulltest_fail, ) - from lint.actions_awstest import test_actions_awstest_pass, test_actions_awstest_fail - from lint.files_exist import ( + from .lint.actions_awstest import test_actions_awstest_pass, test_actions_awstest_fail + from .lint.files_exist import ( test_files_exist_missing_config, test_files_exist_missing_main, test_files_exist_depreciated_file, test_files_exist_pass, ) - from lint.actions_ci import ( + from .lint.actions_ci import ( test_actions_ci_pass, test_actions_ci_fail_wrong_nf, test_actions_ci_fail_wrong_docker_ver, test_actions_ci_fail_wrong_trigger, ) - from lint.actions_schema_validation import ( + from .lint.actions_schema_validation import ( test_actions_schema_validation_missing_jobs, test_actions_schema_validation_missing_on, ) - from lint.merge_markers import test_merge_markers_found + from .lint.merge_markers import test_merge_markers_found - from lint.nextflow_config import ( + from .lint.nextflow_config import ( test_nextflow_config_example_pass, test_nextflow_config_bad_name_fail, test_nextflow_config_dev_in_release_mode_failed, ) - from lint.files_unchanged import ( + from .lint.files_unchanged import ( test_files_unchanged_pass, test_files_unchanged_fail, ) - from lint.version_consistency import test_version_consistency + from .lint.version_consistency import test_version_consistency - from lint.modules_json import test_modules_json_pass + from .lint.modules_json import test_modules_json_pass # TODO nf-core: Assess and strip out if no longer required for DSL2 diff --git a/tests/test_modules.py b/tests/test_modules.py index 2401dfa76..fb59537c6 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -10,17 +10,20 @@ import unittest -def create_modules_repo_dummy(): +def create_modules_repo_dummy(tmp_dir): """Create a dummy copy of the nf-core/modules repo""" - tmp_dir = tempfile.mkdtemp() + root_dir = os.path.join(tmp_dir, "modules") os.makedirs(os.path.join(root_dir, "modules")) os.makedirs(os.path.join(root_dir, "tests", "modules")) os.makedirs(os.path.join(root_dir, "tests", "config")) with open(os.path.join(root_dir, "tests", "config", "pytest_modules.yml"), "w") as fh: fh.writelines(["test:", "\n - modules/test/**", "\n - tests/modules/test/**"]) + with open(os.path.join(root_dir, "README.md"), "w") as fh: + fh.writelines(["# ![nf-core/modules](docs/images/nfcore-modules_logo.png)", "\n"]) - module_create = nf_core.modules.ModuleCreate(root_dir, "star/align", "@author", "process_medium", False, False) + # bpipe is a valid package on bioconda that is very unlikely to ever be added to nf-core/modules + module_create = nf_core.modules.ModuleCreate(root_dir, "bpipe/test", "@author", "process_medium", False, False) module_create.create() return root_dir @@ -31,10 +34,12 @@ class TestModules(unittest.TestCase): def setUp(self): """Create a new PipelineSchema and Launch objects""" + self.tmp_dir = tempfile.mkdtemp() + # Set up the schema root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") - self.pipeline_dir = os.path.join(tempfile.mkdtemp(), "mypipeline") + self.pipeline_dir = os.path.join(self.tmp_dir, "mypipeline") shutil.copytree(self.template_dir, self.pipeline_dir) # Set up install objects @@ -53,7 +58,13 @@ def setUp(self): # self.mods_remove_alt.modules_repo = nf_core.modules.ModulesRepo(repo="ewels/nf-core-modules", branch="master") # Set up the nf-core/modules repo dummy - self.nfcore_modules = create_modules_repo_dummy() + self.nfcore_modules = create_modules_repo_dummy(self.tmp_dir) + + def tearDown(self): + """Clean up temporary files and folders""" + + if os.path.exists(self.tmp_dir): + shutil.rmtree(self.tmp_dir) def test_modulesrepo_class(self): """Initialise a modules repo object""" @@ -65,13 +76,13 @@ def test_modulesrepo_class(self): # Test of the individual modules commands. # ############################################ - from modules.list import ( + from .modules.list import ( test_modules_list_remote, test_modules_list_pipeline, test_modules_install_and_list_pipeline, ) - from modules.install import ( + from .modules.install import ( test_modules_install_nopipeline, test_modules_install_emptypipeline, test_modules_install_nomodule, @@ -79,21 +90,21 @@ def test_modulesrepo_class(self): test_modules_install_trimgalore_twice, ) - from modules.remove import ( + from .modules.remove import ( test_modules_remove_trimgalore, test_modules_remove_trimgalore_uninstalled, ) - from modules.lint import test_modules_lint_trimgalore, test_modules_lint_empty, test_modules_lint_new_modules + from .modules.lint import test_modules_lint_trimgalore, test_modules_lint_empty, test_modules_lint_new_modules - from modules.create import ( + from .modules.create import ( test_modules_create_succeed, test_modules_create_fail_exists, test_modules_create_nfcore_modules, test_modules_create_nfcore_modules_subtool, ) - from modules.create_test_yml import ( + from .modules.create_test_yml import ( test_modules_custom_yml_dumper, test_modules_test_file_dict, test_modules_create_test_yml_get_md5, @@ -101,7 +112,7 @@ def test_modulesrepo_class(self): test_modules_create_test_yml_check_inputs, ) - from modules.bump_versions import ( + from .modules.bump_versions import ( test_modules_bump_versions_single_module, test_modules_bump_versions_all_modules, test_modules_bump_versions_fail, diff --git a/tests/test_schema.py b/tests/test_schema.py index 175b23880..3a060a516 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -15,6 +15,8 @@ import unittest import yaml +from .utils import with_temporary_file, with_temporary_folder + class TestSchema(unittest.TestCase): """Class for schema tests""" @@ -24,11 +26,16 @@ def setUp(self): self.schema_obj = nf_core.schema.PipelineSchema() self.root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) # Copy the template to a temp directory so that we can use that for tests - self.template_dir = os.path.join(tempfile.mkdtemp(), "wf") + self.tmp_dir = tempfile.mkdtemp() + self.template_dir = os.path.join(self.tmp_dir, "wf") template_dir = os.path.join(self.root_repo_dir, "nf_core", "pipeline-template") shutil.copytree(template_dir, self.template_dir) self.template_schema = os.path.join(self.template_dir, "nextflow_schema.json") + def tearDown(self): + if os.path.exists(self.tmp_dir): + shutil.rmtree(self.tmp_dir) + def test_load_lint_schema(self): """Check linting with the pipeline template directory""" self.schema_obj.get_schema_path(self.template_dir) @@ -46,13 +53,13 @@ def test_load_lint_schema_notjson(self): self.schema_obj.get_schema_path(os.path.join(self.template_dir, "nextflow.config")) self.schema_obj.load_lint_schema() + @with_temporary_file @pytest.mark.xfail(raises=AssertionError, strict=True) - def test_load_lint_schema_noparams(self): + def test_load_lint_schema_noparams(self, tmp_file): """ Check that linting raises properly if a JSON file is given without any params """ - # Make a temporary file to write schema to - tmp_file = tempfile.NamedTemporaryFile() + # write schema to a temporary file with open(tmp_file.name, "w") as fh: json.dump({"type": "fubar"}, fh) self.schema_obj.get_schema_path(tmp_file.name) @@ -88,29 +95,29 @@ def test_load_schema(self): self.schema_obj.schema_filename = self.template_schema self.schema_obj.load_schema() - def test_save_schema(self): + @with_temporary_file + def test_save_schema(self, tmp_file): """Try to save a schema""" # Load the template schema self.schema_obj.schema_filename = self.template_schema self.schema_obj.load_schema() # Make a temporary file to write schema to - tmp_file = tempfile.NamedTemporaryFile() self.schema_obj.schema_filename = tmp_file.name self.schema_obj.save_schema() - def test_load_input_params_json(self): + @with_temporary_file + def test_load_input_params_json(self, tmp_file): """Try to load a JSON file with params for a pipeline run""" - # Make a temporary file to write schema to - tmp_file = tempfile.NamedTemporaryFile() + # write schema to a temporary file with open(tmp_file.name, "w") as fh: json.dump({"input": "fubar"}, fh) self.schema_obj.load_input_params(tmp_file.name) - def test_load_input_params_yaml(self): + @with_temporary_file + def test_load_input_params_yaml(self, tmp_file): """Try to load a YAML file with params for a pipeline run""" - # Make a temporary file to write schema to - tmp_file = tempfile.NamedTemporaryFile() + # write schema to a temporary file with open(tmp_file.name, "w") as fh: yaml.dump({"input": "fubar"}, fh) self.schema_obj.load_input_params(tmp_file.name) @@ -293,14 +300,15 @@ def test_build_schema(self): """ param = self.schema_obj.build_schema(self.template_dir, True, False, None) - def test_build_schema_from_scratch(self): + @with_temporary_folder + def test_build_schema_from_scratch(self, tmp_dir): """ Build a new schema param from a pipeline with no existing file Run code to ensure it doesn't crash. Individual functions tested separately. Pretty much a copy of test_launch.py test_make_pipeline_schema """ - test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "wf") + test_pipeline_dir = os.path.join(tmp_dir, "wf") shutil.copytree(self.template_dir, test_pipeline_dir) os.remove(os.path.join(test_pipeline_dir, "nextflow_schema.json")) diff --git a/tests/test_sync.py b/tests/test_sync.py index ce7d07dc7..727db7010 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -5,6 +5,7 @@ import nf_core.create import nf_core.sync +import git import json import mock import os @@ -12,22 +13,27 @@ import tempfile import unittest +from .utils import with_temporary_folder + class TestModules(unittest.TestCase): """Class for modules tests""" def setUp(self): - self.make_new_pipeline() - - def make_new_pipeline(self): """Create a new pipeline to test""" - self.pipeline_dir = os.path.join(tempfile.mkdtemp(), "test_pipeline") + self.tmp_dir = tempfile.mkdtemp() + self.pipeline_dir = os.path.join(self.tmp_dir, "test_pipeline") self.create_obj = nf_core.create.PipelineCreate("testing", "test pipeline", "tester", outdir=self.pipeline_dir) self.create_obj.init_pipeline() - def test_inspect_sync_dir_notgit(self): + def tearDown(self): + if os.path.exists(self.tmp_dir): + shutil.rmtree(self.tmp_dir) + + @with_temporary_folder + def test_inspect_sync_dir_notgit(self, tmp_dir): """Try syncing an empty directory""" - psync = nf_core.sync.PipelineSync(tempfile.mkdtemp()) + psync = nf_core.sync.PipelineSync(tmp_dir) try: psync.inspect_sync_dir() raise UserWarning("Should have hit an exception") diff --git a/tests/test_utils.py b/tests/test_utils.py index 36d533afe..b62a8c979 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -12,6 +12,9 @@ import requests import tempfile import unittest +import shutil + +from .utils import with_temporary_folder class TestUtils(unittest.TestCase): @@ -22,7 +25,8 @@ def setUp(self): Use nf_core.create() to make a pipeline that we can use for testing """ - self.test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") + self.tmp_dir = tempfile.mkdtemp() + self.test_pipeline_dir = os.path.join(self.tmp_dir, "nf-core-testpipeline") self.create_obj = nf_core.create.PipelineCreate( "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir ) @@ -30,6 +34,10 @@ def setUp(self): # Base Pipeline object on this directory self.pipeline_obj = nf_core.utils.Pipeline(self.test_pipeline_dir) + def tearDown(self): + if os.path.exists(self.tmp_dir): + shutil.rmtree(self.tmp_dir) + def test_check_if_outdated_1(self): current_version = "1.0" remote_version = "2.0" @@ -89,10 +97,10 @@ def test_list_files_git(self): self.pipeline_obj._list_files() assert os.path.join(self.test_pipeline_dir, "main.nf") in self.pipeline_obj.files - def test_list_files_no_git(self): + @with_temporary_folder + def test_list_files_no_git(self, tmpdir): """Test listing pipeline files without `git-ls`""" - # Create directory with a test file - tmpdir = tempfile.mkdtemp() + # Create a test file in a temporary directory tmp_fn = os.path.join(tmpdir, "testfile") open(tmp_fn, "a").close() pipeline_obj = nf_core.utils.Pipeline(tmpdir) diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 000000000..1f4052570 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Helper functions for tests +""" + +import functools +import tempfile + + +def with_temporary_folder(func): + """ + Call the decorated funtion under the tempfile.TemporaryDirectory + context manager. Pass the temporary directory name to the decorated + function + """ + + @functools.wraps(func) + def wrapper(*args, **kwargs): + with tempfile.TemporaryDirectory() as tmpdirname: + return func(*args, tmpdirname, **kwargs) + + return wrapper + + +def with_temporary_file(func): + """ + Call the decorated funtion under the tempfile.NamedTemporaryFile + context manager. Pass the opened file handle to the decorated function + """ + + @functools.wraps(func) + def wrapper(*args, **kwargs): + with tempfile.NamedTemporaryFile() as tmpfile: + return func(*args, tmpfile, **kwargs) + + return wrapper