diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000000..1a13a981a8 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,28 @@ +{ + "name": "nfcore", + "image": "nfcore/gitpod:latest", + "postCreateCommand": "python -m pip install --upgrade -r ../requirements-dev.txt -e ../ && pre-commit install --install-hooks", + "remoteUser": "gitpod", + + // Configure tool-specific properties. + "customizations": { + // Configure properties specific to VS Code. + "vscode": { + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/opt/conda/bin/python", + "python.linting.enabled": true, + "python.linting.pylintEnabled": true, + "python.formatting.autopep8Path": "/opt/conda/bin/autopep8", + "python.formatting.yapfPath": "/opt/conda/bin/yapf", + "python.linting.flake8Path": "/opt/conda/bin/flake8", + "python.linting.pycodestylePath": "/opt/conda/bin/pycodestyle", + "python.linting.pydocstylePath": "/opt/conda/bin/pydocstyle", + "python.linting.pylintPath": "/opt/conda/bin/pylint" + }, + + // Add the IDs of extensions you want installed when the container is created. + "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"] + } + } +} diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 800ba7ab10..fc73294b5a 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -127,3 +127,19 @@ You can replicate this process locally with the following commands: nf-core create -n testpipeline -d "This pipeline is for testing" nf-core lint nf-core-testpipeline ``` + +## GitHub Codespaces + +This repo includes a devcontainer configuration which will create a GitHub Codespaces for Nextflow development! This is an online developer environment that runs in your browser, complete with VSCode and a terminal. + +To get started: + +- Open the repo in [Codespaces](https://github.com/nf-core/tools/codespaces) +- Tools installed + - nf-core + - Nextflow + +Devcontainer specs: + +- [DevContainer config](.devcontainer/devcontainer.json) +- [Dockerfile](.devcontainer/Dockerfile) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 72b71ddd68..6cf8ffd921 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -33,7 +33,7 @@ body: attributes: label: System information description: | - * Nextflow version _(eg. 21.10.3)_ + * Nextflow version _(eg. 22.10.1)_ * Hardware _(eg. HPC, Desktop, Cloud)_ * Executor _(eg. slurm, local, awsbatch)_ * OS _(eg. CentOS Linux, macOS, Linux Mint)_ diff --git a/.github/RELEASE_CHECKLIST.md b/.github/RELEASE_CHECKLIST.md index e086d286aa..f2d2591f16 100644 --- a/.github/RELEASE_CHECKLIST.md +++ b/.github/RELEASE_CHECKLIST.md @@ -2,16 +2,17 @@ 1. Check issue milestones to see outstanding issues to resolve if possible or transfer to the milestones for the next release e.g. [`v1.9`](https://github.com/nf-core/tools/issues?q=is%3Aopen+is%3Aissue+milestone%3A1.9) 2. Most importantly, pick an undeniably outstanding [name](http://www.codenamegenerator.com/) for the release where _Prefix_ = _Metal_ and _Dictionary_ = _Animal_. -3. Check whether the GitHub Actions workflow scripts need updating of the Nextflow versions +3. Check the [pipeline health page](https://nf-co.re/pipeline_health) to make sure that all repos look sane (missing `TEMPLATE` branches etc) 4. Create a PR to `dev` to bump the version in `CHANGELOG.md` and `setup.py`. 5. Make sure all CI tests are passing! 6. Create a PR from `dev` to `master` 7. Make sure all CI tests are passing again (additional tests are run on PRs to `master`) 8. Request review (2 approvals required) -9. Merge the PR into `master` -10. Wait for CI tests on the commit to passed -11. (Optional but a good idea) Run a manual sync on `nf-core/testpipeline` and check that CI is passing on the resulting PR. -12. Create a new release copying the `CHANGELOG` for that release into the description section. +9. Run `rich-codex` to regenerate docs screengrabs (actions `workflow_dispatch` button) +10. Merge the PR into `master` +11. Wait for CI tests on the commit to passed +12. (Optional but a good idea) Run a manual sync on `nf-core/testpipeline` and check that CI is passing on the resulting PR. +13. Create a new release copying the `CHANGELOG` for that release into the description section. ## After release diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 277baf1425..d09bdfd822 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -1,5 +1,11 @@ name: Create a pipeline and run nf-core linting -on: [push, pull_request] +on: + push: + branches: + - dev + pull_request: + release: + types: [published] # Cancel if a newer run is started concurrency: @@ -17,16 +23,16 @@ jobs: strategy: matrix: NXF_VER: - - "21.10.3" + - "22.10.1" - "latest-everything" steps: # Get the repo code - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 name: Check out source-code repository # Set up nf-core/tools - name: Set up Python 3.8 - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: 3.8 @@ -42,7 +48,7 @@ jobs: version: ${{ matrix.NXF_VER }} # Install the Prettier linting tools - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install Prettier run: npm install -g prettier @@ -80,7 +86,7 @@ jobs: # Run nf-core linting - name: nf-core lint - run: nf-core --log-file log.txt lint --dir nf-core-testpipeline --fail-ignored --fail-warned + run: nf-core --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned # Run the other nf-core commands - name: nf-core list @@ -96,13 +102,13 @@ jobs: run: nf-core --log-file log.txt bump-version --dir nf-core-testpipeline/ 1.1 - name: nf-core lint in release mode - run: nf-core --log-file log.txt lint --dir nf-core-testpipeline --fail-ignored --fail-warned --release + run: nf-core --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned --release - name: nf-core modules install run: nf-core --log-file log.txt modules install fastqc --dir nf-core-testpipeline/ --force - name: nf-core modules install gitlab - run: nf-core --log-file log.txt modules --git-remote https://gitlab.com/nf-core/modules-test.git install fastqc --dir nf-core-testpipeline/ + run: nf-core --log-file log.txt modules --git-remote https://gitlab.com/nf-core/modules-test.git --branch main install fastqc --force --dir nf-core-testpipeline/ - name: nf-core modules list local run: nf-core --log-file log.txt modules list local --dir nf-core-testpipeline/ @@ -115,7 +121,7 @@ jobs: - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: nf-core-log-file path: log.txt diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 6b2116d2f4..2da7901d16 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -1,5 +1,11 @@ name: Create a pipeline and test it -on: [push, pull_request] +on: + push: + branches: + - dev + pull_request: + release: + types: [published] # Cancel if a newer run is started concurrency: @@ -17,14 +23,14 @@ jobs: strategy: matrix: NXF_VER: - - "21.10.3" + - "22.10.1" - "latest-everything" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 name: Check out source-code repository - name: Set up Python 3.7 - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: 3.7 @@ -45,7 +51,7 @@ jobs: - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: nf-core-log-file path: log.txt diff --git a/.github/workflows/deploy-pypi.yml b/.github/workflows/deploy-pypi.yml index 391a8ef94f..8657283417 100644 --- a/.github/workflows/deploy-pypi.yml +++ b/.github/workflows/deploy-pypi.yml @@ -13,11 +13,11 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 name: Check out source-code repository - name: Set up Python 3.7 - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: 3.7 @@ -31,6 +31,6 @@ jobs: - name: Publish dist to PyPI if: github.repository == 'nf-core/tools' - uses: pypa/gh-action-pypi-publish@master + uses: pypa/gh-action-pypi-publish@release/v1 with: password: ${{ secrets.pypi_password }} diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml index 4409f1903b..30cf965af0 100644 --- a/.github/workflows/fix-linting.yml +++ b/.github/workflows/fix-linting.yml @@ -24,7 +24,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install Prettier run: npm install -g prettier @prettier/plugin-php @@ -39,11 +39,11 @@ jobs: options: "--color" - name: Set up Python 3.8 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.8 - name: python-isort - uses: isort/isort-action@v0.1.0 + uses: isort/isort-action@v1.0.0 with: isortVersion: "latest" requirementsFiles: "requirements.txt requirements-dev.txt" @@ -55,5 +55,5 @@ jobs: git config push.default upstream git add . git status - git commit -m "[automated] Fix linting with Prettier" + git commit -m "[automated] Fix code linting" git push diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml index af2d41aecf..869d8898d9 100644 --- a/.github/workflows/lint-code.yml +++ b/.github/workflows/lint-code.yml @@ -1,5 +1,11 @@ name: Lint tools code formatting -on: [push, pull_request] +on: + push: + branches: + - dev + pull_request: + release: + types: [published] # Cancel if a newer run is started concurrency: @@ -10,9 +16,9 @@ jobs: EditorConfig: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install editorconfig-checker run: npm install -g editorconfig-checker @@ -24,9 +30,9 @@ jobs: Prettier: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install Prettier run: npm install -g prettier @@ -37,7 +43,7 @@ jobs: PythonBlack: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Check code lints with Black uses: psf/black@stable @@ -68,14 +74,14 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out source-code repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Set up Python 3.8 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.8 - name: python-isort - uses: isort/isort-action@v0.1.0 + uses: isort/isort-action@v1.1.0 with: isortVersion: "latest" requirementsFiles: "requirements.txt requirements-dev.txt" diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml index 88efe88b9d..dea28cdd35 100644 --- a/.github/workflows/push_dockerhub_dev.yml +++ b/.github/workflows/push_dockerhub_dev.yml @@ -23,7 +23,7 @@ jobs: fail-fast: false steps: - name: Check out code - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Build nfcore/tools:dev docker image run: docker build --no-cache . -t nfcore/tools:dev diff --git a/.github/workflows/push_dockerhub_release.yml b/.github/workflows/push_dockerhub_release.yml index 71245244d8..857b241022 100644 --- a/.github/workflows/push_dockerhub_release.yml +++ b/.github/workflows/push_dockerhub_release.yml @@ -23,7 +23,7 @@ jobs: fail-fast: false steps: - name: Check out code - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Build nfcore/tools:latest docker image run: docker build --no-cache . -t nfcore/tools:latest @@ -38,5 +38,5 @@ jobs: docker tag nfcore/tools:latest nfcore/tools:${{ github.event.release.tag_name }} docker push nfcore/tools:${{ github.event.release.tag_name }} docker push nfcore/gitpod:latest - docker tag nfcore/gitpod:latest nfcore/tools:${{ github.event.release.tag_name }} + docker tag nfcore/gitpod:latest nfcore/gitpod:${{ github.event.release.tag_name }} docker push nfcore/gitpod:${{ github.event.release.tag_name }} diff --git a/.github/workflows/pytest-frozen-ubuntu-20.04.yml b/.github/workflows/pytest-frozen-ubuntu-20.04.yml new file mode 100644 index 0000000000..6d49145ed7 --- /dev/null +++ b/.github/workflows/pytest-frozen-ubuntu-20.04.yml @@ -0,0 +1,52 @@ +name: Python tests Ubuntu-20.04 (frozen) +# This workflow is triggered on pushes and PRs to the repository. +# Only run if we changed a Python file +on: + push: + branches: + - dev + pull_request: + release: + types: [published] + +# Cancel if a newer run is started +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + pytest: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 + name: Check out source-code repository + + - name: Set up Python 3.8 + uses: actions/setup-python@v3 + with: + python-version: "3.8" + + - name: Install python dependencies + run: | + python -m pip install --upgrade pip -r requirements-dev.txt + pip install -e . + + - name: Downgrade git to the Ubuntu official repository's version + run: | + sudo apt remove git git-man + sudo add-apt-repository --remove ppa:git-core/ppa + sudo apt install git + + - name: Install Nextflow + uses: nf-core/setup-nextflow@v1 + with: + version: "latest-everything" + + - name: Test with pytest + run: python3 -m pytest tests/ --color=yes --cov-report=xml --cov-config=.github/.coveragerc --cov=nf_core + + - uses: codecov/codecov-action@v1 + name: Upload code coverage report + with: + if: success() + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 0828d93315..00cd3c813e 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -3,7 +3,11 @@ name: Python tests # Only run if we changed a Python file on: push: + branches: + - dev pull_request: + release: + types: [published] # Cancel if a newer run is started concurrency: @@ -15,14 +19,14 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 name: Check out source-code repository - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/rich-codex.yml b/.github/workflows/rich-codex.yml index d2894e9dad..65bc1dd9cc 100644 --- a/.github/workflows/rich-codex.yml +++ b/.github/workflows/rich-codex.yml @@ -1,9 +1,5 @@ name: Generate images for docs on: - push: - branches-ignore: - - "master" - - "dev" workflow_dispatch: jobs: rich_codex: @@ -11,9 +7,8 @@ jobs: steps: - name: Check out the repo uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: 3.x cache: pip @@ -29,8 +24,7 @@ jobs: uses: ewels/rich-codex@v1 env: COLUMNS: 100 - NFCORE_LINT_HIDE_PROGRESS: true - NFCORE_MODULES_LINT_HIDE_PROGRESS: true + HIDE_PROGRESS: "true" with: commit_changes: "true" clean_img_paths: docs/images/*.svg diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index 2d79807a0b..f3dec0e086 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -27,7 +27,7 @@ jobs: else curl -O https://nf-co.re/pipeline_names.json fi - echo "::set-output name=matrix::$(cat pipeline_names.json)" + echo "matrix=$(cat pipeline_names.json)" >> $GITHUB_OUTPUT sync: needs: get-pipelines @@ -36,10 +36,10 @@ jobs: matrix: ${{fromJson(needs.get-pipelines.outputs.matrix)}} fail-fast: false steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 name: Check out nf-core/tools - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 name: Check out nf-core/${{ matrix.pipeline }} with: repository: nf-core/${{ matrix.pipeline }} @@ -49,7 +49,7 @@ jobs: fetch-depth: "0" - name: Set up Python 3.8 - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: 3.8 @@ -78,7 +78,7 @@ jobs: - name: Upload sync log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: sync_log_${{ matrix.pipeline }} path: sync_log_${{ matrix.pipeline }}.txt diff --git a/.github/workflows/tools-api-docs-dev.yml b/.github/workflows/tools-api-docs-dev.yml index 8192c93ef2..0c84d38aea 100644 --- a/.github/workflows/tools-api-docs-dev.yml +++ b/.github/workflows/tools-api-docs-dev.yml @@ -1,6 +1,12 @@ name: nf-core/tools dev API docs # Run on push and PR to test that docs build -on: [pull_request, push] +on: + push: + branches: + - dev + pull_request: + release: + types: [published] # Cancel if a newer run is started concurrency: @@ -14,10 +20,10 @@ jobs: steps: - name: Check out source-code repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Set up Python 3.7 - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: 3.7 diff --git a/.github/workflows/tools-api-docs-release.yml b/.github/workflows/tools-api-docs-release.yml index 6dca273742..98c4f997fb 100644 --- a/.github/workflows/tools-api-docs-release.yml +++ b/.github/workflows/tools-api-docs-release.yml @@ -19,10 +19,10 @@ jobs: - ${{ github.event.release.tag_name }} steps: - name: Check out source-code repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Set up Python 3.7 - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: 3.7 diff --git a/.gitpod.yml b/.gitpod.yml index 64994f3c08..263fcc41db 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -1,7 +1,10 @@ image: nfcore/gitpod:latest tasks: - - name: install current state of nf-core/tools - command: python -m pip install --upgrade -r requirements-dev.txt -e . + - name: install current state of nf-core/tools and setup pre-commit + command: | + python -m pip install -e . + python -m pip install -r requirements-dev.txt + pre-commit install --install-hooks vscode: extensions: # based on nf-core.nf-core-extensionpack - codezombiech.gitignore # Language support for .gitignore files diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 948eb523f1..b7aeeb5bc9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,19 +1,13 @@ repos: - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.1.0 hooks: - id: black - language_version: python3.10 - repo: https://github.com/pycqa/isort - rev: 5.10.1 + rev: 5.12.0 hooks: - id: isort - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v2.6.2" + rev: "v2.7.1" hooks: - id: prettier - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: "v4.3.0" - hooks: - - id: name-tests-test - args: [--pytest-test-first] diff --git a/.prettierignore b/.prettierignore index 10f3c7f4e7..4cd77bb4ed 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,4 +1,7 @@ email_template.html +adaptivecard.json +slackreport.json docs/api/_build testing - +nf_core/module-template/modules/meta.yml +nf_core/module-template/tests/test.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index 4b4fb550df..67e6ada896 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,187 @@ # nf-core/tools: Changelog -## v2.5dev +# v2.8dev + +### Template + +- Turn on automatic clean up of intermediate files in `work/` on successful pipeline completion in full-test config ([#2163](https://github.com/nf-core/tools/pull/2163)) [Contributed by @jfy133] +- Add documentation to `usage.md` on how to use `params.yml` files, based on nf-core/ampliseq text ([#2173](https://github.com/nf-core/tools/pull/2173/)) [Contributed by @jfy133, @d4straub] +- Make jobs automatically resubmit for a much wider range of exit codes (now `104` and `130..145`) ([#2170](https://github.com/nf-core/tools/pull/2170)) + +### Linting + +### Modules + +- Add an `--empty-template` option to create a module without TODO statements or examples ([#2175](https://github.com/nf-core/tools/pull/2175) & [#2177](https://github.com/nf-core/tools/pull/2177)) + +### Subworkflows + +- Fixing problem when a module included in a subworkflow had a name change from TOOL to TOOL/SUBTOOL ([#2177](https://github.com/nf-core/tools/pull/2177)) +- Fix `nf-core subworkflows test` not running subworkflow tests ([#2181](https://github.com/nf-core/tools/pull/2181)) + +### General + +- `nf-core modules/subworkflows info` now prints the include statement for the module/subworkflow ([#2182](https://github.com/nf-core/tools/pull/2182)). + +## [v2.7.2 - Mercury Eagle Patch](https://github.com/nf-core/tools/releases/tag/2.7.2) - [2022-12-19] + +### Template + +- Fix the syntax of github_output in GitHub actions ([#2114](https://github.com/nf-core/tools/pull/2114)) +- Fix a bug introduced in 2.7 that made pipelines hang ([#2132](https://github.com/nf-core/tools/issues/2132)) +- Explicitly disable `conda` when a container profile ([#2140](https://github.com/nf-core/tools/pull/2140)) + +### Linting + +- Allow specifying containers in less than three lines ([#2121](https://github.com/nf-core/tools/pull/2121)) +- Run prettier after dumping a json schema file ([#2124](https://github.com/nf-core/tools/pull/2124)) + +### General + +- Only check that a pipeline name doesn't contain dashes if the name is provided by prompt of `--name`. Don't check if a template file is used. ([#2123](https://github.com/nf-core/tools/pull/2123)) +- Deprecate `--enable_conda` parameter. Use `conda.enable` instead ([#2131](https://github.com/nf-core/tools/pull/2131)) +- Handle `json.load()` exceptions ([#2134](https://github.com/nf-core/tools/pull/2134)) + +## [v2.7.1 - Mercury Eagle Patch](https://github.com/nf-core/tools/releases/tag/2.7.1) - [2022-12-08] + +- Patch release to fix pipeline sync ([#2110](https://github.com/nf-core/tools/pull/2110)) + +## [v2.7 - Mercury Eagle](https://github.com/nf-core/tools/releases/tag/2.7) - [2022-12-07] + +Another big release with lots of new features and bug fixes. Thanks to all contributors! + +**Highlights** + +- New `nf-core subworkflows` subcommand for creating, removing, testing, updating and finding subworkflows, see the [documentation](https://nf-co.re/tools/#subworkflows) for more information. +- Every pipeline has now it's own GitHub codespace template, which can be used to develop the pipeline directly in the browser. +- Improved handling of modules and subworkflows from other repos than nf-core/modules. +- Pre-commit is now installed as a dependency, which allows us, besides other things, to run prettier on the fly even if it is not manually installed. +- Shell completion for nf-core commands, more information [here](https://nf-co.re/tools#shell-completion). + +### Template + +#### Features + +- Ignore files in `bin/` directory when running prettier ([#2080](https://github.com/nf-core/tools/pull/1957)). +- Add GitHub codespaces template ([#1957](https://github.com/nf-core/tools/pull/1957)) +- `nextflow run --version` will now print the workflow version from the manifest and exit ([#1951](https://github.com/nf-core/tools/pull/1951)). +- Add profile for running `docker` with the ARM chips (including Apple silicon) ([#1942](https://github.com/nf-core/tools/pull/1942) and [#2034](https://github.com/nf-core/tools/pull/2034)). +- Flip execution order of parameter summary printing and parameter validation to prevent 'hiding' of parameter errors ([#2033](https://github.com/nf-core/tools/pull/2033)). +- Change colour of 'pipeline completed successfully, but some processes failed' from red to yellow ([#2096](https://github.com/nf-core/tools/pull/2096)). + +#### Bug fixes + +- Fix lint warnings for `samplesheet_check.nf` module ([#1875](https://github.com/nf-core/tools/pull/1875)). +- Check that the workflow name provided with a template doesn't contain dashes ([#1822](https://github.com/nf-core/tools/pull/1822)) +- Remove `CITATION.cff` file from pipeline template, to avoid that pipeline Zenodo entries reference the nf-core publication instead of the pipeline ([#2059](https://github.com/nf-core/tools/pull/2059)). + +### Linting + +#### Features + +- Add `--sort-by` option to linting which allows ordering module lint warnings/errors by either test name or module name ([#2077](https://github.com/nf-core/tools/pull/2077)). + +#### Bug fixes + +- Don't lint pipeline name if `manifest.name` in `.nf-core.yml` ([#2035](https://github.com/nf-core/tools/pull/2035)) +- Don't check for `docker pull` commands in `actions_ci` lint test (leftover from DSL1) ([#2055](https://github.com/nf-core/tools/pull/2055)). + +### General + +#### Features + +- Use pre-commit run prettier if prettier is not available ([#1983](https://github.com/nf-core/tools/pull/1983)) and initialize pre-commit in gitpod and codespaces ([#1957](https://github.com/nf-core/tools/pull/1957)). +- Refactor CLI flag `--hide-progress` to be at the top-level group, like `--verbose` ([#2016](https://github.com/nf-core/tools/pull/2016)) +- `nf-core sync` now supports the template YAML file using `-t/--template-yaml` ([#1880](https://github.com/nf-core/tools/pull/1880)). +- The default branch can now be specified when creating a new pipeline repo [#1959](https://github.com/nf-core/tools/pull/1959). +- Only warn when checking that the pipeline directory contains a `main.nf` and a `nextflow.config` file if the pipeline is not an nf-core pipeline [#1964](https://github.com/nf-core/tools/pull/1964) +- Bump promoted Python version from 3.7 to 3.8 ([#1971](https://github.com/nf-core/tools/pull/1971)). +- Extended the chat notifications to Slack ([#1829](https://github.com/nf-core/tools/pull/1829)). +- Don't print source file + line number on logging messages (except when verbose) ([#2015](https://github.com/nf-core/tools/pull/2015)) +- Automatically format `test.yml` content with Prettier ([#2078](https://github.com/nf-core/tools/pull/2078)) +- Automatically format `modules.json` content with Prettier ([#2074](https://github.com/nf-core/tools/pull/2074)) +- Add shell completion for nf-core tools commands([#2070](https://github.com/nf-core/tools/pull/2070)) + +#### Bug fixes, maintenance and tests + +- Fix error in tagging GitPod docker images during releases ([#1874](https://github.com/nf-core/tools/pull/1874)). +- Fix bug when updating modules from old version in old folder structure ([#1908](https://github.com/nf-core/tools/pull/1908)). +- Don't remove local copy of modules repo, only update it with fetch ([#1881](https://github.com/nf-core/tools/pull/1881)). +- Improve test coverage of `sync.py` and `__main__.py` ([#1936](https://github.com/nf-core/tools/pull/1936), [#1965](https://github.com/nf-core/tools/pull/1965)). +- Add file `versions.yml` when generating `test.yml` with `nf-core modules create-test-yml` but don't check for md5sum [#1963](https://github.com/nf-core/tools/pull/1963). +- Mock biocontainers and anaconda api calls in modules and subworkflows tests [#1967](https://github.com/nf-core/tools/pull/1967) +- Run tests with Python 3.11 ([#1970](https://github.com/nf-core/tools/pull/1970)). +- Run test with a realistic version of git ([#2043](https://github.com/nf-core/tools/pull/2043)). +- Fix incorrect file deletion in `nf-core launch` when `--params_in` has the same name as `--params_out` ([#1986](https://github.com/nf-core/tools/pull/1986)). +- Updated GitHub actions ([#1998](https://github.com/nf-core/tools/pull/1998), [#2001](https://github.com/nf-core/tools/pull/2001)) +- Code maintenance ([#1818](https://github.com/nf-core/tools/pull/1818), [#2032](https://github.com/nf-core/tools/pull/2032), [#2073](https://github.com/nf-core/tools/pull/2073)). +- Track from where modules and subworkflows are installed ([#1999](https://github.com/nf-core/tools/pull/1999)). +- Substitute ModulesCommand and SubworkflowsCommand by ComponentsCommand ([#2000](https://github.com/nf-core/tools/pull/2000)). +- Prevent installation with unsupported Python versions ([#2075](https://github.com/nf-core/tools/pull/2075)). +- Allow other remote URLs not starting with `http` ([#2061](https://github.com/nf-core/tools/pull/2061)) + +### Modules + +- Update patch file paths if the modules directory has the old structure ([#1878](https://github.com/nf-core/tools/pull/1878)). +- Don't write to `modules.json` file when applying a patch file during `nf-core modules update` ([#2017](https://github.com/nf-core/tools/pull/2017)). + +### Subworkflows + +- Add subworkflow commands `create-test-yml`, `create` and `install` ([#1897](https://github.com/nf-core/tools/pull/1897)). +- Update subworkflows install so it installs also imported modules and subworkflows ([#1904](https://github.com/nf-core/tools/pull/1904)). +- `check_up_to_date()` function from `modules_json.py` also checks for subworkflows ([#1934](https://github.com/nf-core/tools/pull/1934)). +- Add tests for `nf-core subworkflows install` command ([#1996](https://github.com/nf-core/tools/pull/1996)). +- Function `create()` from `modules_json.py` adds also subworkflows to `modules.json` file ([#2005](https://github.com/nf-core/tools/pull/2005)). +- Add `nf-core subworkflows update` command ([#2019](https://github.com/nf-core/tools/pull/2019)). + +## [v2.6 - Tin Octopus](https://github.com/nf-core/tools/releases/tag/2.6) - [2022-10-04] + +### Template + +- Add template for subworkflows +- Add `actions/upload-artifact` step to the awstest workflows, to expose the debug log file +- Add `prettier` as a requirement to Gitpod Dockerimage +- Bioconda incompatible conda channel setups now result in more informative error messages ([#1812](https://github.com/nf-core/tools/pull/1812)) +- Improve template customisation documentation ([#1821](https://github.com/nf-core/tools/pull/1821)) +- Update MultiQC module, update supplying MultiQC default and custom config and logo files to module +- Add a 'recommend' methods description text to MultiQC to help pipeline users report pipeline usage in publications ([#1749](https://github.com/nf-core/tools/pull/1749)) +- Fix template spacing modified by JINJA ([#1830](https://github.com/nf-core/tools/pull/1830)) +- Fix MultiQC execution on template ([#1855](https://github.com/nf-core/tools/pull/1855)) +- Don't skip including `base.config` when skipping nf-core/configs + +### Linting + +- Pipelines: Check that the old renamed `lib` files are not still present: + - `Checks.groovy` -> `Utils.groovy` + - `Completion.groovy` -> `NfcoreTemplate.groovy` + - `Workflow.groovy` -> `WorkflowMain.groovy` + +### General + +- Add function to enable chat notifications on MS Teams, accompanied by `hook_url` param to enable it. +- Schema: Remove `allOf` if no definition groups are left. +- Use contextlib to temporarily change working directories ([#1819](https://github.com/nf-core/tools/pull/1819)) +- More helpful error messages if `nf-core download` can't parse a singularity image download +- Add `nf-core subworkflows create` command + +### Modules + +- If something is wrong with the local repo cache, offer to delete it and try again ([#1850](https://github.com/nf-core/tools/issues/1850)) +- Restructure code to work with the directory restructuring in [modules](https://github.com/nf-core/modules/pull/2141) ([#1859](https://github.com/nf-core/tools/pull/1859)) +- Make `label: process_single` default when creating a new module + +## [v2.5.1 - Gold Otter Patch](https://github.com/nf-core/tools/releases/tag/2.5.1) - [2022-08-31] + +- Patch release to fix black linting in pipelines ([#1789](https://github.com/nf-core/tools/pull/1789)) +- Add isort options to pyproject.toml ([#1792](https://github.com/nf-core/tools/pull/1792)) +- Lint pyproject.toml file exists and content ([#1795](https://github.com/nf-core/tools/pull/1795)) +- Update GitHub PyPI package release action to v1 ([#1785](https://github.com/nf-core/tools/pull/1785)) + +### Template + +- Update GitHub actions to use nodejs16 ([#1944](https://github.com/nf-core/tools/pull/1944)) + +## [v2.5 - Gold Otter](https://github.com/nf-core/tools/releases/tag/2.5) - [2022-08-30] ### Template @@ -13,7 +194,7 @@ - Make `nf-core create` fail if Git default branch name is dev or TEMPLATE ([#1705](https://github.com/nf-core/tools/pull/1705)) - Convert `console` snippets to `bash` snippets in the template where applicable ([#1729](https://github.com/nf-core/tools/pull/1729)) - Add `branch` field to module entries in `modules.json` to record what branch a module was installed from ([#1728](https://github.com/nf-core/tools/issues/1728)) -- Remove shcema validation from `lib` folder and use Nextflow nf-validator plugin instead ([#1771](https://github.com/nf-core/tools/pull/1771/)) +- Add customisation option to remove all GitHub support with `nf-core create` ([#1766](https://github.com/nf-core/tools/pull/1766)) ### Linting @@ -200,6 +381,7 @@ Please note that there are many excellent integrations for Prettier available, f - Add `--publish_dir_mode` back into the pipeline template ([nf-core/rnaseq#752](https://github.com/nf-core/rnaseq/issues/752#issuecomment-1039451607)) - Add optional loading of of pipeline-specific institutional configs to `nextflow.config` - Make `--outdir` a mandatory parameter ([nf-core/tools#1415](https://github.com/nf-core/tools/issues/1415)) +- Add pipeline description and authors between triple quotes to avoid errors with apostrophes ([#2066](https://github.com/nf-core/tools/pull/2066), [#2104](https://github.com/nf-core/tools/pull/2104)) ### General @@ -288,7 +470,7 @@ Please note that there are many excellent integrations for Prettier available, f ### Modules -- Fixed typo in `module_utils.py`. +- Fixed typo in `modules_utils.py`. - Fixed failing lint test when process section was missing from module. Also added the local failing tests to the warned section of the output table. ([#1235](https://github.com/nf-core/tools/issues/1235)) - Added `--diff` flag to `nf-core modules update` which shows the diff between the installed files and the versions - Update `nf-core modules create` help texts which were not changed with the introduction of the `--dir` flag diff --git a/CITATION.cff b/CITATION.cff index 4533e2f28c..017666c018 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -13,8 +13,8 @@ authors: given-names: Johannes - family-names: Wilm given-names: Andreas - - family-names: Ulysse Garcia - given-names: Maxime + - family-names: Garcia + given-names: Maxime Ulysse - family-names: Di Tommaso given-names: Paolo - family-names: Nahnsen @@ -39,8 +39,8 @@ prefered-citation: given-names: Johannes - family-names: Wilm given-names: Andreas - - family-names: Ulysse Garcia - given-names: Maxime + - family-names: Garcia + given-names: Maxime Ulysse - family-names: Di Tommaso given-names: Paolo - family-names: Nahnsen diff --git a/MANIFEST.in b/MANIFEST.in index 56005ebc33..7db3ca4353 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,4 +2,6 @@ include LICENSE include README.md graft nf_core/module-template graft nf_core/pipeline-template +graft nf_core/subworkflow-template include requirements.txt +include nf_core/.pre-commit-prettier-config.yaml diff --git a/README.md b/README.md index 7703a91c83..fa4fc96d0e 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,7 @@ A python package with helper tools for the nf-core community. ## Table of contents - [`nf-core` tools installation](#installation) +- [`nf-core` tools update](#update-tools) - [`nf-core list` - List available pipelines](#listing-pipelines) - [`nf-core launch` - Run a pipeline with interactive parameter prompts](#launch-a-pipeline) - [`nf-core download` - Download pipeline for offline use](#downloading-pipelines-for-offline-use) @@ -43,6 +44,17 @@ A python package with helper tools for the nf-core community. - [`modules bump-versions` - Bump software versions of modules](#bump-bioconda-and-container-versions-of-modules-in) - [`modules mulled` - Generate the name for a multi-tool container image](#generate-the-name-for-a-multi-tool-container-image) +- [`nf-core subworkflows` - commands for dealing with subworkflows](#subworkflows) + - [`subworkflows list` - List available subworkflows](#list-subworkflows) + - [`subworkflows list remote` - List remote subworkflows](#list-remote-subworkflows) + - [`subworkflows list local` - List installed subworkflows](#list-installed-subworkflows) + - [`subworkflows info` - Show information about a subworkflow](#show-information-about-a-subworkflow) + - [`subworkflows install` - Install subworkflows in a pipeline](#install-subworkflows-in-a-pipeline) + - [`subworkflows update` - Update subworkflows in a pipeline](#update-subworkflows-in-a-pipeline) + - [`subworkflows remove` - Remove a subworkflow from a pipeline](#remove-a-subworkflow-from-a-pipeline) + - [`subworkflows create` - Create a subworkflow from the template](#create-a-new-subworkflow) + - [`subworkflows create-test-yml` - Create the `test.yml` file for a subworkflow](#create-a-subworkflow-test-config-file) + - [`subworkflows test` - Run the tests for a subworkflow](#run-the-tests-for-a-subworkflow-using-pytest) - [Citation](#citation) The nf-core tools package is written in Python and can be imported and used within other packages. @@ -55,7 +67,7 @@ For documentation of the internal Python functions, please refer to the [Tools P You can install `nf-core/tools` from [bioconda](https://bioconda.github.io/recipes/nf-core/README.html). First, install conda and configure the channels to use bioconda -(see the [bioconda documentation](https://bioconda.github.io/user/install.html)). +(see the [bioconda documentation](https://bioconda.github.io/index.html#usage)). Then, just run the conda installation command: ```bash @@ -65,7 +77,7 @@ conda install nf-core Alternatively, you can create a new environment with both nf-core/tools and nextflow: ```bash -conda create --name nf-core python=3.7 nf-core nextflow +conda create --name nf-core python=3.8 nf-core nextflow conda activate nf-core ``` @@ -186,6 +198,39 @@ If you would prefer to skip this check, set the environment variable `NFCORE_NO_ export NFCORE_NO_VERSION_CHECK=1 ``` +### Update tools + +It is advisable to keep nf-core/tools updated to the most recent version. The command to update depends on the system used to install it, for example if you have installed it with conda you can use: + +```bash +conda update nf-core +``` + +if you used pip: + +```bash +pip install --upgrade nf-core +``` + +Please refer to the respective documentation for further details to manage packages, as for example [conda](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-pkgs.html#updating-packages) or [pip](https://packaging.python.org/en/latest/tutorials/installing-packages/#upgrading-packages). + +### Activate shell completions for nf-core/tools + +Auto-completion for the `nf-core` command is available for bash, zsh and fish. To activate it, add the following lines to the respective shell config files. + +| shell | shell config file | command | +| ----- | ----------------------------------------- | -------------------------------------------------- | +| bash | `~/.bashrc` | `eval "$(_NF_CORE_COMPLETE=bash_source nf-core)"` | +| zsh | `~/.zshrc` | `eval "$(_NF_CORE_COMPLETE=zsh_source nf-core)"` | +| fish | `~/.config/fish/completions/nf-core.fish` | `eval (env _NF_CORE_COMPLETE=fish_source nf-core)` | + +After a restart of the shell session you should have auto-completion for the `nf-core` command and all its sub-commands and options. + +> **NB:** The added line will run the command `nf-core` (which will also slow down startup time of your shell). You should therefore either have the nf-core/tools installed globally. +> You can also wrap it inside `if type nf-core > /dev/null; then ` \ `fi` for bash and zsh or `if command -v nf-core &> /dev/null eval (env _NF_CORE_COMPLETE=fish_source nf-core) end` for fish. You need to then source the config in your environment for the completions to be activated. + +> **NB:** If you see the error `command not found compdef` , be sure that your config file contains the line `autoload -Uz compinit && compinit` before the eval line. + ## Listing pipelines The command `nf-core list` shows all available nf-core pipelines along with their latest version, when that was published and how recently the pipeline code was pulled to your local system (if at all). @@ -216,7 +261,7 @@ Archived pipelines are not returned by default. To include them, use the `--show ## Launch a pipeline Some nextflow pipelines have a considerable number of command line flags that can be used. -To help with this, you can use the `nf-core launch` command +To help with this, you can use the `nf-core launch` command. You can choose between a web-based graphical interface or an interactive command-line wizard tool to enter the pipeline parameters for your run. Both interfaces show documentation alongside each parameter and validate your inputs. @@ -299,6 +344,8 @@ You can run the pipeline by simply providing the directory path for the `workflo nextflow run /path/to/download/nf-core-rnaseq-dev/workflow/ --input mydata.csv --outdir results # usual parameters here ``` +> Note that if you downloaded singularity images, you will need to use `-profile singularity` or have it enabled in your config file. + ### Downloaded nf-core configs The pipeline files are automatically updated (`params.custom_config_base` is set to `../configs`), so that the local copy of institutional configs are available when running the pipeline. @@ -397,22 +444,29 @@ Note that if the required arguments for `nf-core create` are not given, it will The `nf-core create` command comes with a number of options that allow you to customize the creation of a pipeline if you intend to not publish it as an nf-core pipeline. This can be done in two ways: by using interactive prompts, or by supplying a `template.yml` file using the `--template-yaml ` option. -Both options allow you to specify a custom pipeline prefix, as well as selecting parts of the template to be excluded during pipeline creation. +Both options allow you to specify a custom pipeline prefix to use instead of the common `nf-core`, as well as selecting parts of the template to be excluded during pipeline creation. The interactive prompts will guide you through the pipeline creation process. An example of a `template.yml` file is shown below. ```yaml -name: cool-pipe +name: coolpipe description: A cool pipeline author: me -prefix: cool-pipes-company +prefix: myorg skip: + - github - ci - github_badges - igenomes - nf_core_configs ``` -This will create a pipeline called `cool-pipe` in the directory `cool-pipes-company-cool-pipe` with `me` as the author. It will exclude the GitHub CI from the pipeline, remove GitHub badges from the `README.md` file, remove pipeline options related to iGenomes and exclude `nf_core/configs` options. +This will create a pipeline called `coolpipe` in the directory `myorg-coolpipe` (`-`) with `me` as the author. It will exclude all possible parts of the template: + +- `github`: removed all files required for GitHub hosting of the pipeline. Specifically, the `.github` folder and `.gitignore` file. +- `ci`: removes the GitHub continuous integration tests from the pipeline. Specifically, the `.github/workflows/` folder. +- `github_badges`: removes GitHub badges from the `README.md` file. +- `igenomes`: removes pipeline options related to iGenomes. Including the `conf/igenomes.config` file and all references to it. +- `nf_core_configs`: excludes `nf_core/configs` repository options, which make multiple config profiles for various institutional clusters available. To run the pipeline creation silently (i.e. without any prompts) with the nf-core template, you can use the `--plain` option. @@ -424,7 +478,6 @@ This is the same test that is used on the automated continuous integration tests For example, the current version looks something like this: ![`nf-core modules create-test-yml fastqc --no-prompts --force`](docs/images/nf-core-modules-create-test.svg) @@ -832,7 +924,7 @@ before_command: sed 's/1.13a/1.10/g' modules/multiqc/main.nf > modules/multiqc/m ### Run the tests for a module using pytest -To run unit tests of a module that you have installed or the test created by the command [`nf-core mdoules create-test-yml`](#create-a-module-test-config-file), you can use `nf-core modules test` command. This command runs the tests specified in `modules/tests/software///test.yml` file using [pytest](https://pytest-workflow.readthedocs.io/en/stable/). +To run unit tests of a module that you have installed or the test created by the command [`nf-core modules create-test-yml`](#create-a-module-test-config-file), you can use `nf-core modules test` command. This command runs the tests specified in `modules/tests/software///test.yml` file using [pytest](https://pytest-workflow.readthedocs.io/en/stable/). You can specify the module name in the form TOOL/SUBTOOL in command line or provide it later by prompts. @@ -880,6 +972,244 @@ after_command: cd ../../ && rm -rf tmp ![`nf-core modules mulled pysam==0.16.0.1 biopython==1.78`](docs/images/nf-core-modules-mulled.svg) +## Subworkflows + +After the launch of nf-core modules, we can provide now also nf-core subworkflows to fully utilize the power of DSL2 modularization. +Subworkflows are chains of multiple module definitions that can be imported into any pipeline. +This allows multiple pipelines to use the same code for a the same tasks, and gives a greater degree of reusability and unit testing. + +To allow us to test modules and subworkflows together we put the nf-core DSL2 subworkflows into the `subworkflows` directory of the modules repository is at . + +### Custom remote subworkflows + +The subworkflows supercommand released in nf-core/tools version 2.7 comes with two flags for specifying a custom remote repository: + +- `--git-remote `: Specify the repository from which the subworkflows should be fetched as a git URL. Defaults to the github repository of `nf-core/modules`. +- `--branch `: Specify the branch from which the subworkflows should be fetched. Defaults to the default branch of your repository. + +For example, if you want to install the `bam_stats_samtools` subworkflow from the repository `nf-core/modules-test` hosted at `gitlab.com` in the branch `subworkflows`, you can use the following command: + +```bash +nf-core subworkflows --git-remote git@gitlab.com:nf-core/modules-test.git --branch subworkflows install bam_stats_samtools +``` + +Note that a custom remote must follow a similar directory structure to that of `nf-core/modules` for the `nf-core subworkflows` commands to work properly. + +The directory where subworkflows are installed will be prompted or obtained from `org_path` in the `.nf-core.yml` file if available. If your subworkflows are located at `subworkflows/my-folder/SUBWORKFLOW_NAME` your `.nf-core.yml` file should have: + +```yaml +org_path: my-folder +``` + +Please avoid installing the same tools from two different remotes, as this can lead to further errors. + +The subworkflows commands will during initalisation try to pull changes from the remote repositories. If you want to disable this, for example due to performance reason or if you want to run the commands offline, you can use the flag `--no-pull`. Note however that the commands will still need to clone repositories that have previously not been used. + +### Private remote repositories + +You can use the subworkflows command with private remote repositories. Make sure that your local `git` is correctly configured with your private remote +and then specify the remote the same way you would do with a public remote repository. + +### List subworkflows + +The `nf-core subworkflows list` command provides the subcommands `remote` and `local` for listing subworkflows installed in a remote repository and in the local pipeline respectively. Both subcommands allow to use a pattern for filtering the subworkflows by keywords eg: `nf-core subworkflows list `. + +#### List remote subworkflows + +To list all subworkflows available on [nf-core/modules](https://github.com/nf-core/modules), you can use +`nf-core subworkflows list remote`, which will print all available subworkflows to the terminal. + + + +![`nf-core subworkflows list remote`](docs/images/nf-core-subworkflows-list-remote.svg) + +#### List installed subworkflows + +To list subworkflows installed in a local pipeline directory you can use `nf-core subworkflows list local`. This will list the subworkflows install in the current working directory by default. If you want to specify another directory, use the `--dir ` flag. + + + +![`nf-core subworkflows list local`](docs/images/nf-core-subworkflows-list-local.svg) + +## Show information about a subworkflow + +For quick help about how a subworkflow works, use `nf-core subworkflows info `. +This shows documentation about the subworkflow on the command line, similar to what's available on the +[nf-core website](https://nf-co.re/subworkflows). + + + +![`nf-core subworkflows info bam_rseqc`](docs/images/nf-core-subworkflows-info.svg) + +### Install subworkflows in a pipeline + +You can install subworkflows from [nf-core/modules](https://github.com/nf-core/modules) in your pipeline using `nf-core subworkflows install`. +A subworkflow installed this way will be installed to the `./subworkflows/nf-core` directory. + + + +![`nf-core subworkflows install bam_rseqc`](docs/images/nf-core-subworkflows-install.svg) + +You can pass the subworkflow name as an optional argument to `nf-core subworkflows install` like above or select it from a list of available subworkflows by only running `nf-core subworkflows install`. + +There are four additional flags that you can use when installing a subworkflow: + +- `--dir`: Pipeline directory, the default is the current working directory. +- `--force`: Overwrite a previously installed version of the subworkflow. +- `--prompt`: Select the subworkflow version using a cli prompt. +- `--sha `: Install the subworkflow at a specific commit. + +### Update subworkflows in a pipeline + +You can update subworkflows installed from a remote repository in your pipeline using `nf-core subworkflows update`. + + + +![`nf-core subworkflows update --all --no-preview`](docs/images/nf-core-subworkflows-update.svg) + +You can pass the subworkflow name as an optional argument to `nf-core subworkflows update` like above or select it from the list of available subworkflows by only running `nf-core subworkflows update`. + +There are six additional flags that you can use with this command: + +- `--dir`: Pipeline directory, the default is the current working directory. +- `--force`: Reinstall subworkflow even if it appears to be up to date +- `--prompt`: Select the subworkflow version using a cli prompt. +- `--sha `: Install the subworkflow at a specific commit from the `nf-core/modules` repository. +- `--preview/--no-preview`: Show the diff between the installed files and the new version before installing. +- `--save-diff `: Save diffs to a file instead of updating in place. The diffs can then be applied with `git apply `. +- `--all`: Use this flag to run the command on all subworkflows in the pipeline. +- `--update-deps`: Use this flag to automatically update all dependencies of a subworkflow. + +If you don't want to update certain subworkflows or want to update them to specific versions, you can make use of the `.nf-core.yml` configuration file. For example, you can prevent the `bam_rseqc` subworkflow installed from `nf-core/modules` from being updated by adding the following to the `.nf-core.yml` file: + +```yaml +update: + https://github.com/nf-core/modules.git: + nf-core: + bam_rseqc: False +``` + +If you want this subworkflow to be updated only to a specific version (or downgraded), you could instead specifiy the version: + +```yaml +update: + https://github.com/nf-core/modules.git: + nf-core: + bam_rseqc: "36a77f7c6decf2d1fb9f639ae982bc148d6828aa" +``` + +This also works at the repository level. For example, if you want to exclude all modules and subworkflows installed from `nf-core/modules` from being updated you could add: + +```yaml +update: + https://github.com/nf-core/modules.git: + nf-core: False +``` + +or if you want all subworkflows in `nf-core/modules` at a specific version: + +```yaml +update: + https://github.com/nf-core/modules.git: + nf-core: "e937c7950af70930d1f34bb961403d9d2aa81c7" +``` + +Note that the subworkflow versions specified in the `.nf-core.yml` file has higher precedence than versions specified with the command line flags, thus aiding you in writing reproducible pipelines. + +### Remove a subworkflow from a pipeline + +To delete a subworkflow from your pipeline, run `nf-core subworkflows remove`. + + + +![`nf-core subworkflows remove bam_rseqc`](docs/images/nf-core-subworkflows-remove.svg) + +You can pass the subworkflow name as an optional argument to `nf-core subworkflows remove` like above or select it from the list of available subworkflows by only running `nf-core subworkflows remove`. To specify the pipeline directory, use `--dir `. + +### Create a new subworkflow + +This command creates a new nf-core subworkflow from the nf-core subworkflow template. +This ensures that your subworkflow follows the nf-core guidelines. +The template contains extensive `TODO` messages to walk you through the changes you need to make to the template. +See the [subworkflow documentation](https://nf-co.re/docs/contributing/subworkflows) for more details around creating a new subworkflow, including rules about nomenclature and a step-by-step guide. + +You can create a new subworkflow using `nf-core subworkflows create`. + +This command can be used both when writing a subworkflow for the shared [nf-core/modules](https://github.com/nf-core/modules) repository, +and also when creating local subworkflows for a pipeline. + +Which type of repository you are working in is detected by the `repository_type` flag in a `.nf-core.yml` file in the root directory, +set to either `pipeline` or `modules`. +The command will automatically look through parent directories for this file to set the root path, so that you can run the command in a subdirectory. +It will start in the current working directory, or whatever is specified with `--dir `. + +The `nf-core subworkflows create` command will prompt you with the relevant questions in order to create all of the necessary subworkflow files. + + + +![`cd modules && nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force`](docs/images/nf-core-subworkflows-create.svg) + +### Create a subworkflow test config file + +All subworkflows on [nf-core/modules](https://github.com/nf-core/modules) have a strict requirement of being unit tested using minimal test data. +To help developers build new subworkflows, the `nf-core subworkflows create-test-yml` command automates the creation of the yaml file required to document the output file `md5sum` and other information generated by the testing. +After you have written a minimal Nextflow script to test your subworkflow in `/tests/subworkflow//main.nf`, this command will run the tests for you and create the `/tests/subworkflow///test.yml` file. + + + +![`nf-core subworkflows create-test-yml bam_stats_samtools --no-prompts --force`](docs/images/nf-core-subworkflows-create-test.svg) + +### Run the tests for a subworkflow using pytest + +To run unit tests of a subworkflow that you have installed or the test created by the command [`nf-core subworkflow create-test-yml`](#create-a-subworkflow-test-config-file), you can use `nf-core subworkflows test` command. This command runs the tests specified in `tests/subworkflows//test.yml` file using [pytest](https://pytest-workflow.readthedocs.io/en/stable/). + +You can specify the subworkflow name in the form TOOL/SUBTOOL in command line or provide it later by prompts. + + + +![`nf-core subworkflows test bam_rseqc --no-prompts`](docs/images/nf-core-subworkflows-test.svg) + ## Citation If you use `nf-core tools` in your work, please cite the `nf-core` publication as follows: diff --git a/docs/api/_src/api/subworkflows.md b/docs/api/_src/api/subworkflows.md new file mode 100644 index 0000000000..438ccd0185 --- /dev/null +++ b/docs/api/_src/api/subworkflows.md @@ -0,0 +1,9 @@ +# nf_core.subworkflows + +```{eval-rst} +.. automodule:: nf_core.subworkflows + :members: + :undoc-members: + :show-inheritance: + :private-members: +``` diff --git a/docs/api/_src/module_lint_tests/module_changes.md b/docs/api/_src/module_lint_tests/module_changes.md new file mode 100644 index 0000000000..ce2d428ca9 --- /dev/null +++ b/docs/api/_src/module_lint_tests/module_changes.md @@ -0,0 +1,5 @@ +# module_changes + +```{eval-rst} +.. automethod:: nf_core.modules.lint.ModuleLint.module_changes +``` diff --git a/docs/api/_src/module_lint_tests/module_patch.md b/docs/api/_src/module_lint_tests/module_patch.md new file mode 100644 index 0000000000..dc98d97a6f --- /dev/null +++ b/docs/api/_src/module_lint_tests/module_patch.md @@ -0,0 +1,5 @@ +# module_patch + +```{eval-rst} +.. automethod:: nf_core.modules.lint.ModuleLint.module_patch +``` diff --git a/docs/api/_src/module_lint_tests/module_tests.md b/docs/api/_src/module_lint_tests/module_tests.md new file mode 100644 index 0000000000..d10934c0d6 --- /dev/null +++ b/docs/api/_src/module_lint_tests/module_tests.md @@ -0,0 +1,5 @@ +# module_tests + +```{eval-rst} +.. automethod:: nf_core.modules.lint.ModuleLint.module_tests +``` diff --git a/docs/api/_src/module_lint_tests/module_version.md b/docs/api/_src/module_lint_tests/module_version.md new file mode 100644 index 0000000000..a088b4cc66 --- /dev/null +++ b/docs/api/_src/module_lint_tests/module_version.md @@ -0,0 +1,5 @@ +# module_version + +```{eval-rst} +.. automethod:: nf_core.modules.lint.ModuleLint.module_version +``` diff --git a/docs/api/_src/pipeline_lint_tests/modules_structure.md b/docs/api/_src/pipeline_lint_tests/modules_structure.md new file mode 100644 index 0000000000..faa39ca77f --- /dev/null +++ b/docs/api/_src/pipeline_lint_tests/modules_structure.md @@ -0,0 +1,5 @@ +# modules_structure + +```{eval-rst} +.. automethod:: nf_core.lint.PipelineLint.modules_structure +``` diff --git a/docs/api/make_lint_md.py b/docs/api/make_lint_md.py index e0265c707d..9b5a706473 100644 --- a/docs/api/make_lint_md.py +++ b/docs/api/make_lint_md.py @@ -21,7 +21,6 @@ def make_docs(docs_basedir, lint_tests, md_template): else: with open(fn, "w") as fh: fh.write(md_template.format(test_name)) - print(test_name) for fn in existing_docs: os.remove(fn) diff --git a/docs/images/nf-core-bump-version.svg b/docs/images/nf-core-bump-version.svg index 6e8b89d1c2..89843856b2 100644 --- a/docs/images/nf-core-bump-version.svg +++ b/docs/images/nf-core-bump-version.svg @@ -19,78 +19,77 @@ font-weight: 700; } - .terminal-1189121654-matrix { + .terminal-2980173434-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1189121654-title { + .terminal-2980173434-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1189121654-r1 { fill: #c5c8c6 } -.terminal-1189121654-r2 { fill: #98a84b } -.terminal-1189121654-r3 { fill: #9a9b99 } -.terminal-1189121654-r4 { fill: #608ab1 } -.terminal-1189121654-r5 { fill: #d0b344 } -.terminal-1189121654-r6 { fill: #868887 } -.terminal-1189121654-r7 { fill: #cc555a } + .terminal-2980173434-r1 { fill: #c5c8c6 } +.terminal-2980173434-r2 { fill: #98a84b } +.terminal-2980173434-r3 { fill: #9a9b99 } +.terminal-2980173434-r4 { fill: #608ab1 } +.terminal-2980173434-r5 { fill: #d0b344 } +.terminal-2980173434-r6 { fill: #cc555a } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -102,26 +101,26 @@ - + - - $ nf-core bump-version 1.1 - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - -INFO     Changing version number from '1.0dev' to '1.1'bump_version.py:35 -INFO     Updated version in 'nextflow.config'bump_version.py:164 - - version         = '1.0dev' - + version = '1.1' - - + + $ nf-core bump-version 1.1 + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + +INFO     Changing version number from '1.0dev' to '1.1' +INFO     Updated version in 'nextflow.config' + - version         = '1.0dev' + + version = '1.1' + + diff --git a/docs/images/nf-core-create.svg b/docs/images/nf-core-create.svg index a453d8c57c..17b28b0b67 100644 --- a/docs/images/nf-core-create.svg +++ b/docs/images/nf-core-create.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - + - + - - $ nf-core create -n nextbigthing -d "This pipeline analyses data from the next big omics technique"  --a "Big Steve" --plain - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - -INFO     Creating new nf-core pipeline: 'nf-core/nextbigthing'create.py:226 -INFO     Initialising pipeline git repository                                          create.py:518 -INFO     Done. Remember to add a remote and push to GitHub:                            create.py:525 - cd /home/runner/work/tools/tools/tmp/nf-core-nextbigthing - git remote add origin git@github.com:USERNAME/REPO_NAME.git  - git push --all origin                                        -INFO     This will also push your newly created dev branch and the TEMPLATE branch for create.py:531 -         syncing.                                                                       -INFO    !!!!!! IMPORTANT !!!!!!create.py:217 - -If you are interested in adding your pipeline to the nf-core community, -PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE! - -Please read: https://nf-co.re/developers/adding_pipelines#join-the-community + + $ nf-core create -n nextbigthing -d "This pipeline analyses data from the next big omics technique"  +-a "Big Steve" --plain + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + +INFO     Creating new nf-core pipeline: 'nf-core/nextbigthing' +INFO     Initialising pipeline git repository                                                        +INFO     Done. Remember to add a remote and push to GitHub:                                          + cd /home/runner/work/tools/tools/tmp/nf-core-nextbigthing + git remote add origin git@github.com:USERNAME/REPO_NAME.git  + git push --all origin                                        +INFO     This will also push your newly created dev branch and the TEMPLATE branch for syncing.      +INFO    !!!!!! IMPORTANT !!!!!! + +If you are interested in adding your pipeline to the nf-core community, +PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE! + +Please read: https://nf-co.re/developers/adding_pipelines#join-the-community diff --git a/docs/images/nf-core-download-tree.svg b/docs/images/nf-core-download-tree.svg index 24a0f671fe..a06c743fdf 100644 --- a/docs/images/nf-core-download-tree.svg +++ b/docs/images/nf-core-download-tree.svg @@ -19,123 +19,123 @@ font-weight: 700; } - .terminal-187724284-matrix { + .terminal-3113317903-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-187724284-title { + .terminal-3113317903-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-187724284-r1 { fill: #c5c8c6 } + .terminal-3113317903-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -147,43 +147,43 @@ - + - - $ tree -L 2 nf-core-rnaseq/ -nf-core-rnaseq/ -├── configs -│   ├── CITATION.cff -│   ├── LICENSE -│   ├── README.md -│   ├── bin -│   ├── conf -│   ├── configtest.nf -│   ├── docs -│   ├── nextflow.config -│   ├── nfcore_custom.config -│   └── pipeline -└── workflow -    ├── CHANGELOG.md -    ├── CITATIONS.md -    ├── CODE_OF_CONDUCT.md -    ├── LICENSE -    ├── README.md -    ├── assets -    ├── bin -    ├── conf -    ├── docs -    ├── lib -    ├── main.nf -    ├── modules -    ├── modules.json -    ├── nextflow.config -    ├── nextflow_schema.json -    ├── subworkflows -    ├── tower.yml -    └── workflows - -14 directories, 16 files + + $ tree -L 2 nf-core-rnaseq/ +nf-core-rnaseq/ +├── configs +│   ├── CITATION.cff +│   ├── LICENSE +│   ├── README.md +│   ├── bin +│   ├── conf +│   ├── configtest.nf +│   ├── docs +│   ├── nextflow.config +│   ├── nfcore_custom.config +│   └── pipeline +└── workflow +    ├── CHANGELOG.md +    ├── CITATIONS.md +    ├── CODE_OF_CONDUCT.md +    ├── LICENSE +    ├── README.md +    ├── assets +    ├── bin +    ├── conf +    ├── docs +    ├── lib +    ├── main.nf +    ├── modules +    ├── modules.json +    ├── nextflow.config +    ├── nextflow_schema.json +    ├── subworkflows +    ├── tower.yml +    └── workflows + +14 directories, 16 files diff --git a/docs/images/nf-core-download.svg b/docs/images/nf-core-download.svg index 5502d4fc46..d26c6011d5 100644 --- a/docs/images/nf-core-download.svg +++ b/docs/images/nf-core-download.svg @@ -19,77 +19,76 @@ font-weight: 700; } - .terminal-2172378614-matrix { + .terminal-512307359-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2172378614-title { + .terminal-512307359-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2172378614-r1 { fill: #c5c8c6 } -.terminal-2172378614-r2 { fill: #98a84b } -.terminal-2172378614-r3 { fill: #9a9b99 } -.terminal-2172378614-r4 { fill: #608ab1 } -.terminal-2172378614-r5 { fill: #d0b344 } -.terminal-2172378614-r6 { fill: #868887 } + .terminal-512307359-r1 { fill: #c5c8c6 } +.terminal-512307359-r2 { fill: #98a84b } +.terminal-512307359-r3 { fill: #9a9b99 } +.terminal-512307359-r4 { fill: #608ab1 } +.terminal-512307359-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -101,26 +100,26 @@ - + - - $ nf-core download rnaseq -r 3.8 --outdir nf-core-rnaseq -x none -c none - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - -INFO     Saving 'nf-core/rnaseq'download.py:158 -          Pipeline revision: '3.8' -          Pull containers: 'none' -          Output directory: 'nf-core-rnaseq' -INFO     Downloading workflow files from GitHub                                      download.py:161 -INFO     Downloading centralised configs from GitHub                                 download.py:165 + + $ nf-core download rnaseq -r 3.8 --outdir nf-core-rnaseq -x none -c none + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + +INFO     Saving 'nf-core/rnaseq' +          Pipeline revision: '3.8' +          Pull containers: 'none' +          Output directory: 'nf-core-rnaseq' +INFO     Downloading workflow files from GitHub                                                      +INFO     Downloading centralised configs from GitHub                                                 diff --git a/docs/images/nf-core-launch-rnaseq.svg b/docs/images/nf-core-launch-rnaseq.svg index 96ecbd3426..c36c892e5a 100644 --- a/docs/images/nf-core-launch-rnaseq.svg +++ b/docs/images/nf-core-launch-rnaseq.svg @@ -19,73 +19,72 @@ font-weight: 700; } - .terminal-2229446587-matrix { + .terminal-3166875143-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2229446587-title { + .terminal-3166875143-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2229446587-r1 { fill: #c5c8c6 } -.terminal-2229446587-r2 { fill: #98a84b } -.terminal-2229446587-r3 { fill: #9a9b99 } -.terminal-2229446587-r4 { fill: #608ab1 } -.terminal-2229446587-r5 { fill: #d0b344 } -.terminal-2229446587-r6 { fill: #868887 } -.terminal-2229446587-r7 { fill: #c5c8c6;font-weight: bold } -.terminal-2229446587-r8 { fill: #68a0b3;font-weight: bold } + .terminal-3166875143-r1 { fill: #c5c8c6 } +.terminal-3166875143-r2 { fill: #98a84b } +.terminal-3166875143-r3 { fill: #9a9b99 } +.terminal-3166875143-r4 { fill: #608ab1 } +.terminal-3166875143-r5 { fill: #d0b344 } +.terminal-3166875143-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-3166875143-r7 { fill: #68a0b3;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -97,24 +96,24 @@ - + - - $ nf-core launch rnaseq -r 3.8.1 - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - -INFO     NOTE: This tool ignores any pipeline parameter defaults overwritten by        launch.py:131 -         Nextflow config files or profiles                                              - -INFO     Downloading workflow: nf-core/rnaseq (3.8.1)list.py:67 + + $ nf-core launch rnaseq -r 3.8.1 + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + +INFO     NOTE: This tool ignores any pipeline parameter defaults overwritten by Nextflow config      +         files or profiles                                                                           + +INFO     Downloading workflow: nf-core/rnaseq (3.8.1) diff --git a/docs/images/nf-core-licences.svg b/docs/images/nf-core-licences.svg index 5f84e722e9..f96e135487 100644 --- a/docs/images/nf-core-licences.svg +++ b/docs/images/nf-core-licences.svg @@ -19,109 +19,108 @@ font-weight: 700; } - .terminal-290135093-matrix { + .terminal-3773418102-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-290135093-title { + .terminal-3773418102-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-290135093-r1 { fill: #c5c8c6 } -.terminal-290135093-r2 { fill: #98a84b } -.terminal-290135093-r3 { fill: #9a9b99 } -.terminal-290135093-r4 { fill: #608ab1 } -.terminal-290135093-r5 { fill: #d0b344 } -.terminal-290135093-r6 { fill: #68a0b3;font-weight: bold } -.terminal-290135093-r7 { fill: #868887 } -.terminal-290135093-r8 { fill: #c5c8c6;font-weight: bold } + .terminal-3773418102-r1 { fill: #c5c8c6 } +.terminal-3773418102-r2 { fill: #98a84b } +.terminal-3773418102-r3 { fill: #9a9b99 } +.terminal-3773418102-r4 { fill: #608ab1 } +.terminal-3773418102-r5 { fill: #d0b344 } +.terminal-3773418102-r6 { fill: #68a0b3;font-weight: bold } +.terminal-3773418102-r7 { fill: #c5c8c6;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -133,36 +132,36 @@ - + - - $ nf-core licences deepvariant - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - -INFO     Fetching licence information for 8 tools                                     licences.py:77 -INFO     Warning: This tool only prints licence information for the software tools    licences.py:98 -         packaged using conda.                                                         -INFO     The pipeline may use other software and dependencies not described here.     licences.py:99 -┏━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┓ -Package NameVersionLicence -┡━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━┩ -│ lbzip2       │ 2.5     │ GPL v3  │ -│ deepvariant  │ 0.7.0   │ MIT     │ -│ htslib       │ 1.9     │ MIT     │ -│ picard       │ 2.18.7  │ MIT     │ -│ pip          │ 10.0.1  │ MIT     │ -│ samtools     │ 1.9     │ MIT     │ -│ python       │ 2.7.15  │ PSF     │ -│ bzip2        │ 1.0.6   │ bzip2   │ -└──────────────┴─────────┴─────────┘ + + $ nf-core licences deepvariant + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + +INFO     Fetching licence information for 8 tools                                                    +INFO     Warning: This tool only prints licence information for the software tools packaged using    +         conda.                                                                                      +INFO     The pipeline may use other software and dependencies not described here.                    +┏━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┓ +Package NameVersionLicence +┡━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━┩ +│ lbzip2       │ 2.5     │ GPL v3  │ +│ deepvariant  │ 0.7.0   │ MIT     │ +│ htslib       │ 1.9     │ MIT     │ +│ picard       │ 2.18.7  │ MIT     │ +│ pip          │ 10.0.1  │ MIT     │ +│ samtools     │ 1.9     │ MIT     │ +│ python       │ 2.7.15  │ PSF     │ +│ bzip2        │ 1.0.6   │ bzip2   │ +└──────────────┴─────────┴─────────┘ diff --git a/docs/images/nf-core-lint.svg b/docs/images/nf-core-lint.svg index 6e0bf2af48..f31644df1e 100644 --- a/docs/images/nf-core-lint.svg +++ b/docs/images/nf-core-lint.svg @@ -1,4 +1,8 @@ +<<<<<<< HEAD +======= + +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 +<<<<<<< HEAD @@ -261,6 +379,40 @@ [!]   5 Test Warnings [✗]   3 Tests Failed ╰───────────────────────╯ +======= + + + + $ nf-core lint + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +INFO     Testing pipeline: . + + +╭─[?] 1 Pipeline Test Ignored────────────────────────────────────────────────────────────────────╮ + +pipeline_todos: pipeline_todos + +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ + +╭───────────────────────╮ +LINT RESULTS SUMMARY  +├───────────────────────┤ +[✔] 181 Tests Passed +[?]   1 Test Ignored +[!]   0 Test Warnings +[✗]   0 Tests Failed +╰───────────────────────╯ +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 diff --git a/docs/images/nf-core-list-rna.svg b/docs/images/nf-core-list-rna.svg index cbd098b947..30c4496b25 100644 --- a/docs/images/nf-core-list-rna.svg +++ b/docs/images/nf-core-list-rna.svg @@ -19,19 +19,28 @@ font-weight: 700; } +<<<<<<< HEAD .terminal-4236301330-matrix { +======= + .terminal-3844799147-matrix { +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } +<<<<<<< HEAD .terminal-4236301330-title { +======= + .terminal-3844799147-title { +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 font-size: 18px; font-weight: bold; font-family: arial; } +<<<<<<< HEAD .terminal-4236301330-r1 { fill: #c5c8c6 } .terminal-4236301330-r2 { fill: #98a84b } .terminal-4236301330-r3 { fill: #9a9b99 } @@ -112,6 +121,88 @@ +======= + .terminal-3844799147-r1 { fill: #c5c8c6 } +.terminal-3844799147-r2 { fill: #98a84b } +.terminal-3844799147-r3 { fill: #9a9b99 } +.terminal-3844799147-r4 { fill: #608ab1 } +.terminal-3844799147-r5 { fill: #d0b344 } +.terminal-3844799147-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-3844799147-r7 { fill: #868887 } + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 @@ -123,6 +214,7 @@ +<<<<<<< HEAD @@ -150,6 +242,35 @@ │ scflow               │    12 │            dev │            - │           - │ -                   │ │ spatialtranscriptom… │    10 │            dev │            - │           - │ -                   │ └──────────────────────┴───────┴────────────────┴──────────────┴─────────────┴─────────────────────┘ +======= + + + + $ nf-core list rna rna-seq + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + +┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ +Have latest         +Pipeline Name       StarsLatest Release    ReleasedLast Pulledrelease?            +┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ +│ smrnaseq             │    44 │          2.1.0 │ 2 months ago │           - │ -                   │ +│ rnaseq               │   542 │            3.9 │ 3 months ago │           - │ -                   │ +│ rnafusion            │    85 │          2.1.0 │ 5 months ago │           - │ -                   │ +│ dualrnaseq           │     8 │          1.0.0 │  2 years ago │           - │ -                   │ +│ circrna              │    23 │            dev │            - │           - │ -                   │ +│ lncpipe              │    23 │            dev │            - │           - │ -                   │ +│ scflow               │    15 │            dev │            - │           - │ -                   │ +│ spatialtranscriptom… │    12 │            dev │            - │           - │ -                   │ +└──────────────────────┴───────┴────────────────┴──────────────┴─────────────┴─────────────────────┘ +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 diff --git a/docs/images/nf-core-list-stars.svg b/docs/images/nf-core-list-stars.svg index da3f02c7db..4696bbf1f3 100644 --- a/docs/images/nf-core-list-stars.svg +++ b/docs/images/nf-core-list-stars.svg @@ -19,19 +19,28 @@ font-weight: 700; } +<<<<<<< HEAD .terminal-1473353797-matrix { +======= + .terminal-1326709712-matrix { +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } +<<<<<<< HEAD .terminal-1473353797-title { +======= + .terminal-1326709712-title { +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 font-size: 18px; font-weight: bold; font-family: arial; } +<<<<<<< HEAD .terminal-1473353797-r1 { fill: #c5c8c6 } .terminal-1473353797-r2 { fill: #98a84b } .terminal-1473353797-r3 { fill: #9a9b99 } @@ -101,6 +110,77 @@ +======= + .terminal-1326709712-r1 { fill: #c5c8c6 } +.terminal-1326709712-r2 { fill: #98a84b } +.terminal-1326709712-r3 { fill: #9a9b99 } +.terminal-1326709712-r4 { fill: #608ab1 } +.terminal-1326709712-r5 { fill: #d0b344 } +.terminal-1326709712-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-1326709712-r7 { fill: #868887 } +.terminal-1326709712-r8 { fill: #868887;font-style: italic; } + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 @@ -112,6 +192,7 @@ +<<<<<<< HEAD @@ -135,6 +216,31 @@ │ chipseq              │   121 │          1.2.2 │  1 years ago │           - │ -                   │ │ atacseq              │   116 │          1.2.2 │ 4 months ago │           - │ -                   │ [..truncated..] +======= + + + + $ nf-core list -s stars + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + +┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ +Have latest         +Pipeline Name       StarsLatest Release    ReleasedLast Pulledrelease?            +┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ +│ rnaseq               │   542 │            3.9 │ 3 months ago │           - │ -                   │ +│ sarek                │   211 │          3.1.1 │  4 weeks ago │           - │ -                   │ +│ chipseq              │   133 │          2.0.0 │ 3 months ago │           - │ -                   │ +│ atacseq              │   124 │            2.0 │  3 weeks ago │           - │ -                   │ +[..truncated..] +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 diff --git a/docs/images/nf-core-list.svg b/docs/images/nf-core-list.svg index c5e7e17dd8..65be0d1310 100644 --- a/docs/images/nf-core-list.svg +++ b/docs/images/nf-core-list.svg @@ -19,19 +19,28 @@ font-weight: 700; } +<<<<<<< HEAD .terminal-2711598360-matrix { +======= + .terminal-4187913691-matrix { +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } +<<<<<<< HEAD .terminal-2711598360-title { +======= + .terminal-4187913691-title { +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 font-size: 18px; font-weight: bold; font-family: arial; } +<<<<<<< HEAD .terminal-2711598360-r1 { fill: #c5c8c6 } .terminal-2711598360-r2 { fill: #98a84b } .terminal-2711598360-r3 { fill: #9a9b99 } @@ -104,6 +113,80 @@ +======= + .terminal-4187913691-r1 { fill: #c5c8c6 } +.terminal-4187913691-r2 { fill: #98a84b } +.terminal-4187913691-r3 { fill: #9a9b99 } +.terminal-4187913691-r4 { fill: #608ab1 } +.terminal-4187913691-r5 { fill: #d0b344 } +.terminal-4187913691-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-4187913691-r7 { fill: #868887 } +.terminal-4187913691-r8 { fill: #868887;font-style: italic; } + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 @@ -115,6 +198,7 @@ +<<<<<<< HEAD @@ -139,6 +223,32 @@ │ eager                │    71 │          2.4.5 │  4 weeks ago │           - │ -                   │ │ viralrecon           │    74 │            2.5 │ 2 months ago │           - │ -                   │ [..truncated..] +======= + + + + $ nf-core list + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + +┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ +Have latest         +Pipeline Name       StarsLatest Release    ReleasedLast Pulledrelease?            +┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ +│ methylseq            │    95 │          2.3.0 │   2 days ago │           - │ -                   │ +│ ampliseq             │   102 │          2.4.1 │  2 weeks ago │           - │ -                   │ +│ airrflow             │    23 │          2.4.0 │  2 weeks ago │           - │ -                   │ +│ mhcquant             │    21 │          2.4.0 │  2 weeks ago │           - │ -                   │ +│ atacseq              │   124 │            2.0 │  3 weeks ago │           - │ -                   │ +[..truncated..] +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 diff --git a/docs/images/nf-core-modules-bump-version.svg b/docs/images/nf-core-modules-bump-version.svg index e76e7a0a12..227de6bb26 100644 --- a/docs/images/nf-core-modules-bump-version.svg +++ b/docs/images/nf-core-modules-bump-version.svg @@ -19,90 +19,90 @@ font-weight: 700; } - .terminal-1308324414-matrix { + .terminal-2041051160-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1308324414-title { + .terminal-2041051160-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1308324414-r1 { fill: #c5c8c6 } -.terminal-1308324414-r2 { fill: #98a84b } -.terminal-1308324414-r3 { fill: #9a9b99 } -.terminal-1308324414-r4 { fill: #608ab1 } -.terminal-1308324414-r5 { fill: #d0b344 } -.terminal-1308324414-r6 { fill: #98a84b;font-weight: bold } -.terminal-1308324414-r7 { fill: #c5c8c6;font-weight: bold } + .terminal-2041051160-r1 { fill: #c5c8c6 } +.terminal-2041051160-r2 { fill: #98a84b } +.terminal-2041051160-r3 { fill: #9a9b99 } +.terminal-2041051160-r4 { fill: #608ab1 } +.terminal-2041051160-r5 { fill: #d0b344 } +.terminal-2041051160-r6 { fill: #98a84b;font-weight: bold } +.terminal-2041051160-r7 { fill: #c5c8c6;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -114,30 +114,30 @@ - + - - $ nf-core modules bump-versions fastqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - - - -╭──────────────────────────────────────────────────────────────────────────────────────────────────╮ -[!] 1 Module version up to date. -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭──────────────────────────────────────────┬───────────────────────────────────────────────────────╮ -Module name                             Update Message                                        -├──────────────────────────────────────────┼───────────────────────────────────────────────────────┤ - fastqc                                    Module version up to date: fastqc                      -╰──────────────────────────────────────────┴───────────────────────────────────────────────────────╯ + + $ nf-core modules bump-versions fastqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + + +╭──────────────────────────────────────────────────────────────────────────────────────────────────╮ +[!] 1 Module version up to date. +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭──────────────────────────────────────────┬───────────────────────────────────────────────────────╮ +Module name                             Update Message                                        +├──────────────────────────────────────────┼───────────────────────────────────────────────────────┤ + fastqc                                    Module version up to date: fastqc                      +╰──────────────────────────────────────────┴───────────────────────────────────────────────────────╯ diff --git a/docs/images/nf-core-modules-create-test.svg b/docs/images/nf-core-modules-create-test.svg index 5711bd8431..57e29b961f 100644 --- a/docs/images/nf-core-modules-create-test.svg +++ b/docs/images/nf-core-modules-create-test.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - + - + - - $ nf-core modules create-test-yml fastqc --no-prompts --force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - -INFO     Looking for test workflow entry points:                             test_yml_builder.py:123 -'tests/modules/fastqc/main.nf' -──────────────────────────────────────────────────────────────────────────────────────────────────── -INFO     Building test meta for entry point 'test_fastqc'test_yml_builder.py:157 -INFO     Setting env var '$PROFILE' to an empty string as not set.           test_yml_builder.py:301 -         Tests will run with Docker by default. To use Singularity set        -'export PROFILE=singularity' in your shell before running this       -         command.                                                             -INFO     Running 'fastqc' test with command:                                 test_yml_builder.py:325 -nextflow run ./tests/modules/fastqc -entry test_fastqc -c  -./tests/config/nextflow.config  -c  -./tests/modules/fastqc/nextflow.config --outdir /tmp/tmpzotojksy --work-dir /tmp/tmps14qhvf6 + + $ nf-core modules create-test-yml fastqc --no-prompts --force + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +INFO     Looking for test workflow entry points: 'tests/modules/nf-core/fastqc/main.nf' +──────────────────────────────────────────────────────────────────────────────────────────────────── +INFO     Building test meta for entry point 'test_fastqc_single_end' +INFO     Running 'fastqc' test with command:                                                         +nextflow run ./tests/modules/nf-core/fastqc -entry test_fastqc_single_end -c  +./tests/config/nextflow.config -c ./tests/modules/nf-core/fastqc/nextflow.config --outdir  +/tmp/tmp17isquh4 -work-dir /tmp/tmpseaglcoa diff --git a/docs/images/nf-core-modules-create.svg b/docs/images/nf-core-modules-create.svg index 13e19b5e7a..6b3ce27a41 100644 --- a/docs/images/nf-core-modules-create.svg +++ b/docs/images/nf-core-modules-create.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - + - - $ nf-core modules create fastqc --author @nf-core-bot  --label process_low --meta --force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - -INFO     Repository type: modulescreate.py:93 -INFO    Press enter to use default values (shown in brackets)or type your own create.py:97 -responses. ctrl+click underlined text to open links. -INFO     Using Bioconda package: 'bioconda::fastqc=0.11.9'create.py:165 -INFO     Using Docker container: 'quay.io/biocontainers/fastqc:0.11.9--hdfd78af_1'create.py:191 -INFO     Using Singularity container:                                                  create.py:192 -'https://depot.galaxyproject.org/singularity/fastqc:0.11.9--hdfd78af_1' -INFO     Created / edited following files:                                             create.py:270 -           ./modules/fastqc/main.nf -           ./modules/fastqc/meta.yml -           ./tests/modules/fastqc/main.nf -           ./tests/modules/fastqc/test.yml -           ./tests/modules/fastqc/nextflow.config -           ./tests/config/pytest_modules.yml + + $ nf-core modules create fastqc --author @nf-core-bot  --label process_low --meta --force + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +INFO     Repository type: modules +INFO    Press enter to use default values (shown in brackets)or type your own responses.  +ctrl+click underlined text to open links. +INFO     Using Bioconda package: 'bioconda::fastqc=0.11.9' diff --git a/docs/images/nf-core-modules-info.svg b/docs/images/nf-core-modules-info.svg index 5c3a82569d..320672f91a 100644 --- a/docs/images/nf-core-modules-info.svg +++ b/docs/images/nf-core-modules-info.svg @@ -19,163 +19,163 @@ font-weight: 700; } - .terminal-378639408-matrix { + .terminal-3462197848-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-378639408-title { + .terminal-3462197848-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-378639408-r1 { fill: #c5c8c6 } -.terminal-378639408-r2 { fill: #98a84b } -.terminal-378639408-r3 { fill: #9a9b99 } -.terminal-378639408-r4 { fill: #608ab1 } -.terminal-378639408-r5 { fill: #d0b344 } -.terminal-378639408-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-378639408-r7 { fill: #98a84b;font-weight: bold } -.terminal-378639408-r8 { fill: #868887 } -.terminal-378639408-r9 { fill: #d08442 } -.terminal-378639408-r10 { fill: #868887;font-style: italic; } -.terminal-378639408-r11 { fill: #98729f } + .terminal-3462197848-r1 { fill: #c5c8c6 } +.terminal-3462197848-r2 { fill: #98a84b } +.terminal-3462197848-r3 { fill: #9a9b99 } +.terminal-3462197848-r4 { fill: #608ab1 } +.terminal-3462197848-r5 { fill: #d0b344 } +.terminal-3462197848-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-3462197848-r7 { fill: #98a84b;font-weight: bold } +.terminal-3462197848-r8 { fill: #868887 } +.terminal-3462197848-r9 { fill: #d08442 } +.terminal-3462197848-r10 { fill: #868887;font-style: italic; } +.terminal-3462197848-r11 { fill: #98729f } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -187,53 +187,53 @@ - + - - $ nf-core modules info abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - - -╭─ Module: abacas  ────────────────────────────────────────────────────────────────────────────────╮ -│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ -│ 🔧 Tools: abacas                                                                                 │ -│ 📖 Description: contiguate draft genome assembly                                                 │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -                  ╷                                                                   ╷              -📥 Inputs        Description                                                             Pattern -╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ - meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ -                  │single_end:false ]                                                 │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - scaffold  (file)│Fasta file containing scaffold                                     │*.{fasta,fa} -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - fasta  (file)   │FASTA reference file                                               │*.{fasta,fa} -                  ╵                                                                   ╵              -                  ╷                                                                   ╷              -📤 Outputs       Description                                                             Pattern -╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ - meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ -                  │single_end:false ]                                                 │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - results  (files)│List containing abacas output files [ 'test.abacas.bin',           │ *.{abacas}* -                  │'test.abacas.fasta', 'test.abacas.gaps', 'test.abacas.gaps.tab',   │ -                  │'test.abacas.nucmer.delta', 'test.abacas.nucmer.filtered.delta',   │ -                  │'test.abacas.nucmer.tiling', 'test.abacas.tab',                    │ -                  │'test.abacas.unused.contigs.out', 'test.abacas.MULTIFASTA.fa' ]    │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - versions  (file)│File containing software versions                                  │versions.yml -                  ╵                                                                   ╵              - - 💻  Installation command: nf-core modules install abacas - + + $ nf-core modules info abacas + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +╭─ Module: abacas  ────────────────────────────────────────────────────────────────────────────────╮ +│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ +│ 🔧 Tools: abacas                                                                                 │ +│ 📖 Description: contiguate draft genome assembly                                                 │ +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +                  ╷                                                                   ╷              +📥 Inputs        Description                                                             Pattern +╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ + meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ +                  │single_end:false ]                                                 │ +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + scaffold  (file)│Fasta file containing scaffold                                     │*.{fasta,fa} +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + fasta  (file)   │FASTA reference file                                               │*.{fasta,fa} +                  ╵                                                                   ╵              +                  ╷                                                                   ╷              +📤 Outputs       Description                                                             Pattern +╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ + meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ +                  │single_end:false ]                                                 │ +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + results  (files)│List containing abacas output files [ 'test.abacas.bin',           │ *.{abacas}* +                  │'test.abacas.fasta', 'test.abacas.gaps', 'test.abacas.gaps.tab',   │ +                  │'test.abacas.nucmer.delta', 'test.abacas.nucmer.filtered.delta',   │ +                  │'test.abacas.nucmer.tiling', 'test.abacas.tab',                    │ +                  │'test.abacas.unused.contigs.out', 'test.abacas.MULTIFASTA.fa' ]    │ +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + versions  (file)│File containing software versions                                  │versions.yml +                  ╵                                                                   ╵              + + 💻  Installation command: nf-core modules install abacas + diff --git a/docs/images/nf-core-modules-install.svg b/docs/images/nf-core-modules-install.svg index ce016fe92b..6fb029df0a 100644 --- a/docs/images/nf-core-modules-install.svg +++ b/docs/images/nf-core-modules-install.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - + - + - - $ nf-core modules install abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - - -INFO     Installing 'abacas'install.py:116 -INFO     Include statement: include { ABACAS } from                                   install.py:125 -'../modules/nf-core/modules/abacas/main' + + $ nf-core modules install abacas + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +INFO     Installing 'abacas' +INFO     Use the following statement to include this module:                                         + + include { ABACAS } from '../modules/nf-core/abacas/main'                                            + diff --git a/docs/images/nf-core-modules-lint.svg b/docs/images/nf-core-modules-lint.svg index e74151121d..b39f7aa4fd 100644 --- a/docs/images/nf-core-modules-lint.svg +++ b/docs/images/nf-core-modules-lint.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - - - - $ nf-core modules lint multiqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - - -INFO     Linting modules repo: '.'__init__.py:206 -INFO     Linting module: 'multiqc'__init__.py:210 - -╭─[!] 1 Module Test Warning ──────────────────────────────────────────────────────────────────────╮ -                                           ╷                         ╷                             -Module name                              File path              Test message               -╶──────────────────────────────────────────┼─────────────────────────┼───────────────────────────╴ -multiqcmodules/multiqc/main.nfConda update:  -bioconda::multiqc 1.10 ->  -1.13a -                                           ╵                         ╵                             -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭──────────────────────╮ -LINT RESULTS SUMMARY -├──────────────────────┤ -[✔]  22 Tests Passed -[!]   1 Test Warning -[✗]   0 Tests Failed -╰──────────────────────╯ + + + + $ nf-core modules lint multiqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +INFO     Linting modules repo: '.' +INFO     Linting module: 'multiqc' + +╭───────────────────────╮ +LINT RESULTS SUMMARY +├───────────────────────┤ +[✔]  23 Tests Passed  +[!]   0 Test Warnings +[✗]   0 Tests Failed  +╰───────────────────────╯ diff --git a/docs/images/nf-core-modules-list-local.svg b/docs/images/nf-core-modules-list-local.svg index 61c394913e..fab2cecf7e 100644 --- a/docs/images/nf-core-modules-list-local.svg +++ b/docs/images/nf-core-modules-list-local.svg @@ -19,109 +19,108 @@ font-weight: 700; } - .terminal-578959072-matrix { + .terminal-2617511112-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-578959072-title { + .terminal-2617511112-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-578959072-r1 { fill: #c5c8c6 } -.terminal-578959072-r2 { fill: #98a84b } -.terminal-578959072-r3 { fill: #9a9b99 } -.terminal-578959072-r4 { fill: #608ab1 } -.terminal-578959072-r5 { fill: #d0b344 } -.terminal-578959072-r6 { fill: #868887 } -.terminal-578959072-r7 { fill: #c5c8c6;font-weight: bold } -.terminal-578959072-r8 { fill: #868887;font-style: italic; } + .terminal-2617511112-r1 { fill: #c5c8c6 } +.terminal-2617511112-r2 { fill: #98a84b } +.terminal-2617511112-r3 { fill: #9a9b99 } +.terminal-2617511112-r4 { fill: #608ab1 } +.terminal-2617511112-r5 { fill: #d0b344 } +.terminal-2617511112-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-2617511112-r7 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -133,36 +132,36 @@ - + - - $ nf-core modules list local - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - - -INFO     Modules installed in '.':                                                       list.py:124 - -┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ -Module Name         Repository     Version SHA        Message             Date       -┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ -│ custom/dumpsoftware… │ nf-core/modules │ e745e167c1020928ef… │ Fix formatting in    │ 2022-02-15 │ -│                      │                 │                     │ yaml files, add      │            │ -│                      │                 │                     │ yamllint config      │            │ -│                      │                 │                     │ (#1279)              │            │ -│ fastqc               │ nf-core/modules │ e745e167c1020928ef… │ Fix formatting in    │ 2022-02-15 │ -│                      │                 │                     │ yaml files, add      │            │ -│                      │                 │                     │ yamllint config      │            │ -│                      │                 │                     │ (#1279)              │            │ -│ multiqc              │ nf-core/modules │ e745e167c1020928ef… │ Fix formatting in    │ 2022-02-15 │ -[..truncated..] + + $ nf-core modules list local + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +INFO     Modules installed in '.':                                                                   + +┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ +Module Name        Repository        Version SHA        Message           Date       +┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ +│ custom/dumpsoftwar… │ https://github.co… │ c8e35eb2055c099720… │ Bulk change conda  │ 2022-12-13 │ +│                     │                    │                     │ syntax for all     │            │ +│                     │                    │                     │ modules (#2654)    │            │ +│ fastqc              │ https://github.co… │ c8e35eb2055c099720… │ Bulk change conda  │ 2022-12-13 │ +│                     │                    │                     │ syntax for all     │            │ +│                     │                    │                     │ modules (#2654)    │            │ +│ multiqc             │ https://github.co… │ c8e35eb2055c099720… │ Bulk change conda  │ 2022-12-13 │ +│                     │                    │                     │ syntax for all     │            │ +│                     │                    │                     │ modules (#2654)    │            │ +[..truncated..] diff --git a/docs/images/nf-core-modules-list-remote.svg b/docs/images/nf-core-modules-list-remote.svg index 96b91a83f9..cd2d1df6e5 100644 --- a/docs/images/nf-core-modules-list-remote.svg +++ b/docs/images/nf-core-modules-list-remote.svg @@ -19,109 +19,109 @@ font-weight: 700; } - .terminal-277652753-matrix { + .terminal-3444989756-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-277652753-title { + .terminal-3444989756-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-277652753-r1 { fill: #c5c8c6 } -.terminal-277652753-r2 { fill: #98a84b } -.terminal-277652753-r3 { fill: #9a9b99 } -.terminal-277652753-r4 { fill: #608ab1 } -.terminal-277652753-r5 { fill: #d0b344 } -.terminal-277652753-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-277652753-r7 { fill: #868887 } -.terminal-277652753-r8 { fill: #868887;font-style: italic; } + .terminal-3444989756-r1 { fill: #c5c8c6 } +.terminal-3444989756-r2 { fill: #98a84b } +.terminal-3444989756-r3 { fill: #9a9b99 } +.terminal-3444989756-r4 { fill: #608ab1 } +.terminal-3444989756-r5 { fill: #d0b344 } +.terminal-3444989756-r6 { fill: #1984e9;text-decoration: underline; } +.terminal-3444989756-r7 { fill: #c5c8c6;font-weight: bold } +.terminal-3444989756-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -133,36 +133,36 @@ - + - - $ nf-core modules list remote - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - - -INFO     Modules available from nf-core/modules (master):                                list.py:119 - -┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ -Module Name                              -┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ -│ abacas                                   │ -│ abricate/run                             │ -│ abricate/summary                         │ -│ adapterremoval                           │ -│ adapterremovalfixprefix                  │ -│ agrvate                                  │ -│ allelecounter                            │ -│ ampir                                    │ -│ amplify/predict                          │ -[..truncated..] + + $ nf-core modules list remote + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +INFO     Modules available from https://github.com/nf-core/modules.git(master):                     + +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +Module Name                                           +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ abacas                                                │ +│ abricate/run                                          │ +│ abricate/summary                                      │ +│ adapterremoval                                        │ +│ adapterremovalfixprefix                               │ +│ agat/convertspgff2gtf                                 │ +│ agat/convertspgxf2gxf                                 │ +│ agat/spstatistics                                     │ +│ agat/sqstatbasic                                      │ +[..truncated..] diff --git a/docs/images/nf-core-modules-mulled.svg b/docs/images/nf-core-modules-mulled.svg index b7ac11336c..2ac4b28f23 100644 --- a/docs/images/nf-core-modules-mulled.svg +++ b/docs/images/nf-core-modules-mulled.svg @@ -19,70 +19,69 @@ font-weight: 700; } - .terminal-3375078311-matrix { + .terminal-1198110247-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3375078311-title { + .terminal-1198110247-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3375078311-r1 { fill: #c5c8c6 } -.terminal-3375078311-r2 { fill: #98a84b } -.terminal-3375078311-r3 { fill: #9a9b99 } -.terminal-3375078311-r4 { fill: #608ab1 } -.terminal-3375078311-r5 { fill: #d0b344 } -.terminal-3375078311-r6 { fill: #868887 } -.terminal-3375078311-r7 { fill: #00823d;font-weight: bold } -.terminal-3375078311-r8 { fill: #68a0b3;font-weight: bold } + .terminal-1198110247-r1 { fill: #c5c8c6 } +.terminal-1198110247-r2 { fill: #98a84b } +.terminal-1198110247-r3 { fill: #9a9b99 } +.terminal-1198110247-r4 { fill: #608ab1 } +.terminal-1198110247-r5 { fill: #d0b344 } +.terminal-1198110247-r6 { fill: #00823d;font-weight: bold } +.terminal-1198110247-r7 { fill: #68a0b3;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -94,23 +93,23 @@ - + - - $ nf-core modules mulled pysam==0.16.0.1 biopython==1.78 - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - -INFO     Found docker image on quay.io! ✨                                              mulled.py:68 -INFO     Mulled container hash:                                                      __main__.py:826 -mulled-v2-3a59640f3fe1ed11819984087d31d68600200c3f:185a25ca79923df85b58f42deb48f5ac4481e91f-0 + + $ nf-core modules mulled pysam==0.16.0.1 biopython==1.78 + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + +INFO     Found docker image on quay.io! ✨                                                           +INFO     Mulled container hash:                                                                      +mulled-v2-3a59640f3fe1ed11819984087d31d68600200c3f:185a25ca79923df85b58f42deb48f5ac4481e91f-0 diff --git a/docs/images/nf-core-modules-patch.svg b/docs/images/nf-core-modules-patch.svg index 8dea1566a8..729bb1d519 100644 --- a/docs/images/nf-core-modules-patch.svg +++ b/docs/images/nf-core-modules-patch.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - + - - $ nf-core modules patch fastqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - - -INFO     Changes in module 'nf-core/modules/fastqc'modules_differ.py:252 -INFO    'modules/nf-core/modules/fastqc/meta.yml' is unchanged                modules_differ.py:257 -INFO     Changes in 'fastqc/main.nf':                                          modules_differ.py:266 - ---- modules/nf-core/modules/fastqc/main.nf -+++ modules/nf-core/modules/fastqc/main.nf -@@ -1,6 +1,6 @@ -process FASTQC {                                                                                   -    tag "$meta.id"                                                                                 --    label 'process_medium' -+    label 'process_low' - -    conda (params.enable_conda ? "bioconda::fastqc=0.11.9" : null)                                 -    container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_  - - -INFO     Patch file of 'nf-core/modules/fastqc' written to                              patch.py:115 -'modules/nf-core/modules/fastqc/fastqc.diff' + + $ nf-core modules patch fastqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +ERROR    Module 'modules/nf-core/fastqc' is unchanged. No patch to compute                           diff --git a/docs/images/nf-core-modules-remove.svg b/docs/images/nf-core-modules-remove.svg index 1deb2efd38..ce72c00e13 100644 --- a/docs/images/nf-core-modules-remove.svg +++ b/docs/images/nf-core-modules-remove.svg @@ -19,65 +19,64 @@ font-weight: 700; } - .terminal-1679646874-matrix { + .terminal-2450992562-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1679646874-title { + .terminal-2450992562-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1679646874-r1 { fill: #c5c8c6 } -.terminal-1679646874-r2 { fill: #98a84b } -.terminal-1679646874-r3 { fill: #9a9b99 } -.terminal-1679646874-r4 { fill: #608ab1 } -.terminal-1679646874-r5 { fill: #d0b344 } -.terminal-1679646874-r6 { fill: #868887 } + .terminal-2450992562-r1 { fill: #c5c8c6 } +.terminal-2450992562-r2 { fill: #98a84b } +.terminal-2450992562-r3 { fill: #9a9b99 } +.terminal-2450992562-r4 { fill: #608ab1 } +.terminal-2450992562-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -89,22 +88,22 @@ - + - - $ nf-core modules remove abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - - -INFO     Removing abacas                                                                remove.py:52 + + $ nf-core modules remove abacas + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +INFO     Removed files for 'abacas' and it's dependencies 'abacas'.                                  diff --git a/docs/images/nf-core-modules-test.svg b/docs/images/nf-core-modules-test.svg index 20b7f7bb7f..c11314a7d1 100644 --- a/docs/images/nf-core-modules-test.svg +++ b/docs/images/nf-core-modules-test.svg @@ -19,68 +19,67 @@ font-weight: 700; } - .terminal-2872056789-matrix { + .terminal-424201008-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2872056789-title { + .terminal-424201008-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2872056789-r1 { fill: #c5c8c6 } -.terminal-2872056789-r2 { fill: #98a84b } -.terminal-2872056789-r3 { fill: #9a9b99 } -.terminal-2872056789-r4 { fill: #608ab1 } -.terminal-2872056789-r5 { fill: #d0b344 } -.terminal-2872056789-r6 { fill: #868887 } + .terminal-424201008-r1 { fill: #c5c8c6 } +.terminal-424201008-r2 { fill: #98a84b } +.terminal-424201008-r3 { fill: #9a9b99 } +.terminal-424201008-r4 { fill: #608ab1 } +.terminal-424201008-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -92,23 +91,23 @@ - + - - $ nf-core modules test samtools/view --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - - -────────────────────────────────────────── samtools/view ─────────────────────────────────────────── -INFO     Running pytest for module 'samtools/view'module_test.py:184 + + $ nf-core modules test samtools/view --no-prompts + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +────────────────────────────────────────── samtools/view ─────────────────────────────────────────── +INFO     Running pytest for module 'samtools/view' diff --git a/docs/images/nf-core-modules-update.svg b/docs/images/nf-core-modules-update.svg index 20db553630..161c6b2a56 100644 --- a/docs/images/nf-core-modules-update.svg +++ b/docs/images/nf-core-modules-update.svg @@ -19,77 +19,76 @@ font-weight: 700; } - .terminal-4103131181-matrix { + .terminal-555766662-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4103131181-title { + .terminal-555766662-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4103131181-r1 { fill: #c5c8c6 } -.terminal-4103131181-r2 { fill: #98a84b } -.terminal-4103131181-r3 { fill: #9a9b99 } -.terminal-4103131181-r4 { fill: #608ab1 } -.terminal-4103131181-r5 { fill: #d0b344 } -.terminal-4103131181-r6 { fill: #868887 } + .terminal-555766662-r1 { fill: #c5c8c6 } +.terminal-555766662-r2 { fill: #98a84b } +.terminal-555766662-r3 { fill: #9a9b99 } +.terminal-555766662-r4 { fill: #608ab1 } +.terminal-555766662-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -101,26 +100,26 @@ - + - - $ nf-core modules update --all --no-preview - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - - -INFO    'nf-core/modules/abacas' is already up to date                                update.py:160 -INFO     Updating 'nf-core/modules/custom/dumpsoftwareversions'update.py:516 -INFO     Updating 'nf-core/modules/fastqc'update.py:516 -INFO     Updating 'nf-core/modules/multiqc'update.py:516 -INFO     Updates complete ✨                                                           update.py:242 + + $ nf-core modules update --all --no-preview + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +INFO    'modules/nf-core/abacas' is already up to date                                              +INFO    'modules/nf-core/custom/dumpsoftwareversions' is already up to date                         +INFO    'modules/nf-core/fastqc' is already up to date                                              +INFO    'modules/nf-core/multiqc' is already up to date                                             +INFO     Updates complete ✨                                                                         diff --git a/docs/images/nf-core-schema-build.svg b/docs/images/nf-core-schema-build.svg index bf05c1cb93..6ebf92dc2e 100644 --- a/docs/images/nf-core-schema-build.svg +++ b/docs/images/nf-core-schema-build.svg @@ -19,19 +19,28 @@ font-weight: 700; } +<<<<<<< HEAD .terminal-1186702448-matrix { +======= + .terminal-1833749233-matrix { +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } +<<<<<<< HEAD .terminal-1186702448-title { +======= + .terminal-1833749233-title { +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 font-size: 18px; font-weight: bold; font-family: arial; } +<<<<<<< HEAD .terminal-1186702448-r1 { fill: #c5c8c6 } .terminal-1186702448-r2 { fill: #98a84b } .terminal-1186702448-r3 { fill: #9a9b99 } @@ -85,6 +94,61 @@ +======= + .terminal-1833749233-r1 { fill: #c5c8c6 } +.terminal-1833749233-r2 { fill: #98a84b } +.terminal-1833749233-r3 { fill: #9a9b99 } +.terminal-1833749233-r4 { fill: #608ab1 } +.terminal-1833749233-r5 { fill: #d0b344 } +.terminal-1833749233-r6 { fill: #98a84b;font-weight: bold } +.terminal-1833749233-r7 { fill: #868887;font-weight: bold } +.terminal-1833749233-r8 { fill: #868887 } +.terminal-1833749233-r9 { fill: #4e707b;font-weight: bold } +.terminal-1833749233-r10 { fill: #68a0b3;font-weight: bold } + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 @@ -96,6 +160,7 @@ +<<<<<<< HEAD @@ -113,6 +178,25 @@ INFO    [] Default parameters match schema validationschema.py:237 INFO    [] Pipeline schema looks valid(found 27 params)schema.py:95 INFO     Writing schema with 30 params: './nextflow_schema.json'schema.py:173 +======= + + + + $ nf-core schema build --no-prompts + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + +INFO    [] Default parameters match schema validation +INFO    [] Pipeline schema looks valid(found 30 params) +INFO     Writing schema with 31 params: './nextflow_schema.json' +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 diff --git a/docs/images/nf-core-schema-lint.svg b/docs/images/nf-core-schema-lint.svg index 5dc2da7da5..17d7fb5213 100644 --- a/docs/images/nf-core-schema-lint.svg +++ b/docs/images/nf-core-schema-lint.svg @@ -19,19 +19,28 @@ font-weight: 700; } +<<<<<<< HEAD .terminal-4009025472-matrix { +======= + .terminal-1041388458-matrix { +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } +<<<<<<< HEAD .terminal-4009025472-title { +======= + .terminal-1041388458-title { +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 font-size: 18px; font-weight: bold; font-family: arial; } +<<<<<<< HEAD .terminal-4009025472-r1 { fill: #c5c8c6 } .terminal-4009025472-r2 { fill: #98a84b } .terminal-4009025472-r3 { fill: #9a9b99 } @@ -81,6 +90,57 @@ +======= + .terminal-1041388458-r1 { fill: #c5c8c6 } +.terminal-1041388458-r2 { fill: #98a84b } +.terminal-1041388458-r3 { fill: #9a9b99 } +.terminal-1041388458-r4 { fill: #608ab1 } +.terminal-1041388458-r5 { fill: #d0b344 } +.terminal-1041388458-r6 { fill: #98a84b;font-weight: bold } +.terminal-1041388458-r7 { fill: #868887;font-weight: bold } +.terminal-1041388458-r8 { fill: #868887 } +.terminal-1041388458-r9 { fill: #4e707b;font-weight: bold } + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 @@ -104,6 +164,7 @@ +<<<<<<< HEAD @@ -124,6 +185,24 @@   --lenient_mode: String should not be set to `false` NOTE: Use null in config for no default. +======= + + + + $ nf-core schema lint nextflow_schema.json + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + +INFO    [] Default parameters match schema validation +INFO    [] Pipeline schema looks valid(found 31 params) +>>>>>>> 7f305024edcfe6e58109daa5a644c905e314bab8 diff --git a/docs/images/nf-core-schema-validate.svg b/docs/images/nf-core-schema-validate.svg index 41376f5bdb..3133ec66b8 100644 --- a/docs/images/nf-core-schema-validate.svg +++ b/docs/images/nf-core-schema-validate.svg @@ -19,71 +19,71 @@ font-weight: 700; } - .terminal-3209931885-matrix { + .terminal-2998337404-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3209931885-title { + .terminal-2998337404-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3209931885-r1 { fill: #c5c8c6 } -.terminal-3209931885-r2 { fill: #98a84b } -.terminal-3209931885-r3 { fill: #9a9b99 } -.terminal-3209931885-r4 { fill: #608ab1 } -.terminal-3209931885-r5 { fill: #d0b344 } -.terminal-3209931885-r6 { fill: #98a84b;font-weight: bold } -.terminal-3209931885-r7 { fill: #868887 } -.terminal-3209931885-r8 { fill: #868887;font-weight: bold } -.terminal-3209931885-r9 { fill: #4e707b;font-weight: bold } + .terminal-2998337404-r1 { fill: #c5c8c6 } +.terminal-2998337404-r2 { fill: #98a84b } +.terminal-2998337404-r3 { fill: #9a9b99 } +.terminal-2998337404-r4 { fill: #608ab1 } +.terminal-2998337404-r5 { fill: #d0b344 } +.terminal-2998337404-r6 { fill: #98a84b;font-weight: bold } +.terminal-2998337404-r7 { fill: #868887;font-weight: bold } +.terminal-2998337404-r8 { fill: #868887 } +.terminal-2998337404-r9 { fill: #4e707b;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -95,23 +95,23 @@ - + - - $ nf-core schema validate nf-core-rnaseq/workflow nf-params.json - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - -INFO    [] Default parameters match schema validationschema.py:237 -INFO    [] Pipeline schema looks valid(found 93 params)schema.py:95 -INFO    [] Input parameters look validschema.py:213 + + $ nf-core schema validate nf-core-rnaseq/workflow nf-params.json + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + +INFO    [] Default parameters match schema validation +INFO    [] Pipeline schema looks valid(found 93 params) +INFO    [] Input parameters look valid diff --git a/docs/images/nf-core-subworkflows-create-test.svg b/docs/images/nf-core-subworkflows-create-test.svg new file mode 100644 index 0000000000..5e8841a50e --- /dev/null +++ b/docs/images/nf-core-subworkflows-create-test.svg @@ -0,0 +1,167 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $ nf-core subworkflows create-test-yml bam_stats_samtools --no-prompts --force + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +WARNING 'repository_type' not defined in .nf-core.yml                                               +Warning: Input is not a terminal (fd=0). +? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr +                                                                               o +w keys) + » Pipeline +   nf-core/modules + + + +        ? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr +                                                                               o +w keys) + +Aborted. + + + + diff --git a/docs/images/nf-core-subworkflows-create.svg b/docs/images/nf-core-subworkflows-create.svg new file mode 100644 index 0000000000..763a5a5d94 --- /dev/null +++ b/docs/images/nf-core-subworkflows-create.svg @@ -0,0 +1,143 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $ nf-core subworkflows create bam_stats_samtools --author @nf-core-bot  --label process_low --meta  +--force + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +Usage: nf-core subworkflows create [OPTIONS] subworkflow name                                       + +Try 'nf-core subworkflows create -h' for help. +╭─ Error ──────────────────────────────────────────────────────────────────────────────────────────╮ + No such option: --label Did you mean --help?                                                      +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ + + + + + diff --git a/docs/images/nf-core-subworkflows-info.svg b/docs/images/nf-core-subworkflows-info.svg new file mode 100644 index 0000000000..b00f918056 --- /dev/null +++ b/docs/images/nf-core-subworkflows-info.svg @@ -0,0 +1,465 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $ nf-core subworkflows info bam_rseqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +╭───────────────────────────────Traceback (most recent call last)────────────────────────────────╮ +/opt/hostedtoolcache/Python/3.11.1/x64/bin/nf-core:8 in <module> + +│   sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$''', sys.argv[0])                          +❱ │   sys.exit(run_nf_core())                                                                   + + +/opt/hostedtoolcache/Python/3.11.1/x64/lib/python3.11/site-packages/nf_core/__main__.py:105 in    +run_nf_core + + 104 │   # Launch the click cli +❱  105 │   nf_core_cli(auto_envvar_prefix="NFCORE")                                               + 106  + +/opt/hostedtoolcache/Python/3.11.1/x64/lib/python3.11/site-packages/click/core.py:1130 in         +__call__ + +1129 │   │   """Alias for :meth:`main`.""" +❱ 1130 │   │   returnself.main(*args, **kwargs)                                                  +1131  + +/opt/hostedtoolcache/Python/3.11.1/x64/lib/python3.11/site-packages/rich_click/rich_group.py:21 + in main + +20 │   │   try:                                                                                 +❱ 21 │   │   │   rv = super().main(*args, standalone_mode=False, **kwargs)                        +22 │   │   │   ifnot standalone_mode:                                                          + +/opt/hostedtoolcache/Python/3.11.1/x64/lib/python3.11/site-packages/click/core.py:1055 in main + +1054 │   │   │   │   withself.make_context(prog_name, args, **extra) as ctx:                   +❱ 1055 │   │   │   │   │   rv = self.invoke(ctx)                                                  +1056 │   │   │   │   │   ifnot standalone_mode:                                                + +/opt/hostedtoolcache/Python/3.11.1/x64/lib/python3.11/site-packages/click/core.py:1657 in invoke + +1656 │   │   │   │   with sub_ctx:                                                              +❱ 1657 │   │   │   │   │   return _process_result(sub_ctx.command.invoke(sub_ctx))                +1658  + +/opt/hostedtoolcache/Python/3.11.1/x64/lib/python3.11/site-packages/click/core.py:1657 in invoke + +1656 │   │   │   │   with sub_ctx:                                                              +❱ 1657 │   │   │   │   │   return _process_result(sub_ctx.command.invoke(sub_ctx))                +1658  + +/opt/hostedtoolcache/Python/3.11.1/x64/lib/python3.11/site-packages/click/core.py:1404 in invoke + +1403 │   │   ifself.callback isnotNone:                                                      +❱ 1404 │   │   │   return ctx.invoke(self.callback, **ctx.params)                                 +1405  + +/opt/hostedtoolcache/Python/3.11.1/x64/lib/python3.11/site-packages/click/core.py:760 in invoke + + 759 │   │   │   with ctx:                                                                      +❱  760 │   │   │   │   return __callback(*args, **kwargs)                                         + 761  + +/opt/hostedtoolcache/Python/3.11.1/x64/lib/python3.11/site-packages/click/decorators.py:26 in     +new_func + + 25 │   defnew_func(*args, **kwargs):  # type: ignore +❱  26 │   │   return f(get_current_context(), *args, **kwargs)                                    + 27  + +/opt/hostedtoolcache/Python/3.11.1/x64/lib/python3.11/site-packages/nf_core/__main__.py:1087 in   +info + +1086 │   │   )                                                                                  +❱ 1087 │   │   stdout.print(subworkflow_info.get_component_info())                                +1088 │   except (UserWarningLookupErroras e:                                                + +/opt/hostedtoolcache/Python/3.11.1/x64/lib/python3.11/site-packages/nf_core/components/info.py:1 +49 in get_component_info + +148 │   │    +❱ 149 │   │   returnself.generate_component_info_help()                                          +150  + +/opt/hostedtoolcache/Python/3.11.1/x64/lib/python3.11/site-packages/nf_core/components/info.py:2 +62 in generate_component_info_help + +261 │   │   │   │   │   inputs_table.add_row(                                                   +❱ 262 │   │   │   │   │   │   f"[orange1 on black] {key} [/][dim i] ({info['type']})",            +263 │   │   │   │   │   │   Markdown(info["description"if info["description"else""),       +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +TypeError: 'NoneType' object is not subscriptable + + + + diff --git a/docs/images/nf-core-subworkflows-install.svg b/docs/images/nf-core-subworkflows-install.svg new file mode 100644 index 0000000000..a440ce839e --- /dev/null +++ b/docs/images/nf-core-subworkflows-install.svg @@ -0,0 +1,167 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $ nf-core subworkflows install bam_rseqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +WARNING 'repository_type' not defined in .nf-core.yml                                               +Warning: Input is not a terminal (fd=0). +? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr +                                                                               o +w keys) + » Pipeline +   nf-core/modules + + + +        ? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr +                                                                               o +w keys) + +Aborted. + + + + diff --git a/docs/images/nf-core-subworkflows-list-local.svg b/docs/images/nf-core-subworkflows-list-local.svg new file mode 100644 index 0000000000..208e28e40e --- /dev/null +++ b/docs/images/nf-core-subworkflows-list-local.svg @@ -0,0 +1,168 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $ nf-core subworkflows list local + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +WARNING 'repository_type' not defined in .nf-core.yml                                               +Warning: Input is not a terminal (fd=0). +? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr +                                                                               o +w keys) + » Pipeline +   nf-core/modules + + + +        ? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr +                                                                               o +w keys) + +[..truncated..] + + + + diff --git a/docs/images/nf-core-subworkflows-list-remote.svg b/docs/images/nf-core-subworkflows-list-remote.svg new file mode 100644 index 0000000000..4e2ff9f8a5 --- /dev/null +++ b/docs/images/nf-core-subworkflows-list-remote.svg @@ -0,0 +1,169 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $ nf-core subworkflows list remote + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +INFO     Subworkflows available from https://github.com/nf-core/modules.git(master):                + +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +Subworkflow Name                             +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ bam_dedup_stats_samtools_umitools            │ +│ bam_markduplicates_picard                    │ +│ bam_qc_picard                                │ +│ bam_rseqc                                    │ +│ bam_sort_stats_samtools                      │ +│ bam_stats_samtools                           │ +│ bcl_demultiplex                              │ +│ bed_scatter_bedtools                         │ +│ bedgraph_bedclip_bedgraphtobigwig            │ +[..truncated..] + + + + diff --git a/docs/images/nf-core-subworkflows-remove.svg b/docs/images/nf-core-subworkflows-remove.svg new file mode 100644 index 0000000000..33b93f5c19 --- /dev/null +++ b/docs/images/nf-core-subworkflows-remove.svg @@ -0,0 +1,167 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $ nf-core subworkflows remove bam_rseqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +WARNING 'repository_type' not defined in .nf-core.yml                                               +Warning: Input is not a terminal (fd=0). +? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr +                                                                               o +w keys) + » Pipeline +   nf-core/modules + + + +        ? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr +                                                                               o +w keys) + +Aborted. + + + + diff --git a/docs/images/nf-core-subworkflows-test.svg b/docs/images/nf-core-subworkflows-test.svg new file mode 100644 index 0000000000..44d0a5fc48 --- /dev/null +++ b/docs/images/nf-core-subworkflows-test.svg @@ -0,0 +1,167 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $ nf-core subworkflows test bam_rseqc --no-prompts + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +WARNING 'repository_type' not defined in .nf-core.yml                                               +Warning: Input is not a terminal (fd=0). +? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr +                                                                               o +w keys) + » Pipeline +   nf-core/modules + + + +        ? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr +                                                                               o +w keys) + +Aborted. + + + + diff --git a/docs/images/nf-core-subworkflows-update.svg b/docs/images/nf-core-subworkflows-update.svg new file mode 100644 index 0000000000..4769128e73 --- /dev/null +++ b/docs/images/nf-core-subworkflows-update.svg @@ -0,0 +1,167 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $ nf-core subworkflows update --all --no-preview + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + + +WARNING 'repository_type' not defined in .nf-core.yml                                               +Warning: Input is not a terminal (fd=0). +? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr +                                                                               o +w keys) + » Pipeline +   nf-core/modules + + + +        ? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr +                                                                               o +w keys) + +Aborted. + + + + diff --git a/docs/images/nf-core-sync.svg b/docs/images/nf-core-sync.svg index 1b6f3a9e83..99c164c146 100644 --- a/docs/images/nf-core-sync.svg +++ b/docs/images/nf-core-sync.svg @@ -19,73 +19,72 @@ font-weight: 700; } - .terminal-2822682943-matrix { + .terminal-832309805-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2822682943-title { + .terminal-832309805-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2822682943-r1 { fill: #c5c8c6 } -.terminal-2822682943-r2 { fill: #98a84b } -.terminal-2822682943-r3 { fill: #9a9b99 } -.terminal-2822682943-r4 { fill: #608ab1 } -.terminal-2822682943-r5 { fill: #d0b344 } -.terminal-2822682943-r6 { fill: #98729f } -.terminal-2822682943-r7 { fill: #ff2c7a } -.terminal-2822682943-r8 { fill: #868887 } + .terminal-832309805-r1 { fill: #c5c8c6 } +.terminal-832309805-r2 { fill: #98a84b } +.terminal-832309805-r3 { fill: #9a9b99 } +.terminal-832309805-r4 { fill: #608ab1 } +.terminal-832309805-r5 { fill: #d0b344 } +.terminal-832309805-r6 { fill: #98729f } +.terminal-832309805-r7 { fill: #ff2c7a } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -97,24 +96,24 @@ - + - - $ nf-core sync - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.5.dev0 - https://nf-co.re - - -INFO     Pipeline directory: /home/runner/work/tools/tools/tmp/nf-core-nextbigthingsync.py:95 -INFO     Original pipeline repository branch is 'master'sync.py:149 -INFO     Deleting all files in 'TEMPLATE' branch                                         sync.py:205 -INFO     Making a new template pipeline using pipeline variables                         sync.py:223 + + $ nf-core sync + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.7.2 - https://nf-co.re + + +INFO     Pipeline directory: /home/runner/work/tools/tools/tmp/nf-core-nextbigthing +INFO     Original pipeline repository branch is 'master' +INFO     Deleting all files in 'TEMPLATE' branch                                                     +INFO     Making a new template pipeline using pipeline variables                                     diff --git a/nf_core/.pre-commit-prettier-config.yaml b/nf_core/.pre-commit-prettier-config.yaml new file mode 100644 index 0000000000..0c31cdb99f --- /dev/null +++ b/nf_core/.pre-commit-prettier-config.yaml @@ -0,0 +1,5 @@ +repos: + - repo: https://github.com/pre-commit/mirrors-prettier + rev: "v2.7.1" + hooks: + - id: prettier diff --git a/nf_core/__init__.py b/nf_core/__init__.py index 42c3f188f3..e333335280 100644 --- a/nf_core/__init__.py +++ b/nf_core/__init__.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Main nf_core module file. Shouldn't do much, as everything is under subcommands. diff --git a/nf_core/__main__.py b/nf_core/__main__.py old mode 100755 new mode 100644 index 0746e80c0e..841cb9f7e7 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -20,6 +20,7 @@ import nf_core.list import nf_core.modules import nf_core.schema +import nf_core.subworkflows import nf_core.sync import nf_core.utils @@ -41,7 +42,7 @@ }, { "name": "Commands for developers", - "commands": ["create", "lint", "modules", "schema", "bump-version", "sync"], + "commands": ["create", "lint", "modules", "subworkflows", "schema", "bump-version", "sync"], }, ], "nf-core modules": [ @@ -54,6 +55,16 @@ "commands": ["create", "create-test-yml", "lint", "bump-versions", "mulled", "test"], }, ], + "nf-core subworkflows": [ + { + "name": "For pipelines", + "commands": ["info", "install", "list", "remove", "update"], + }, + { + "name": "Developing new subworkflows", + "commands": ["create", "create-test-yml"], + }, + ], } click.rich_click.OPTION_GROUPS = { "nf-core modules list local": [{"options": ["--dir", "--json", "--help"]}], @@ -68,48 +79,39 @@ def run_nf_core(): - # Print nf-core header - stderr.print(f"\n[green]{' ' * 42},--.[grey39]/[green],-.", highlight=False) - stderr.print("[blue] ___ __ __ __ ___ [green]/,-._.--~\\", highlight=False) - stderr.print(r"[blue] |\ | |__ __ / ` / \ |__) |__ [yellow] } {", highlight=False) - stderr.print(r"[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-,", highlight=False) - stderr.print("[green] `._,._,'\n", highlight=False) - stderr.print( - f"[grey39] nf-core/tools version {nf_core.__version__} - [link=https://nf-co.re]https://nf-co.re[/]", - highlight=False, - ) - try: - is_outdated, _, remote_vers = nf_core.utils.check_if_outdated() - if is_outdated: - stderr.print( - f"[bold bright_yellow] There is a new version of nf-core/tools available! ({remote_vers})", - highlight=False, - ) - except Exception as e: - log.debug(f"Could not check latest version: {e}") - stderr.print("\n") - - # Lanch the click cli + # print nf-core header if environment variable is not set + if os.environ.get("_NF_CORE_COMPLETE") is None: + # Print nf-core header + stderr.print(f"\n[green]{' ' * 42},--.[grey39]/[green],-.", highlight=False) + stderr.print("[blue] ___ __ __ __ ___ [green]/,-._.--~\\", highlight=False) + stderr.print(r"[blue] |\ | |__ __ / ` / \ |__) |__ [yellow] } {", highlight=False) + stderr.print(r"[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-,", highlight=False) + stderr.print("[green] `._,._,'\n", highlight=False) + stderr.print( + f"[grey39] nf-core/tools version {nf_core.__version__} - [link=https://nf-co.re]https://nf-co.re[/]", + highlight=False, + ) + try: + is_outdated, _, remote_vers = nf_core.utils.check_if_outdated() + if is_outdated: + stderr.print( + f"[bold bright_yellow] There is a new version of nf-core/tools available! ({remote_vers})", + highlight=False, + ) + except Exception as e: + log.debug(f"Could not check latest version: {e}") + stderr.print("\n") + # Launch the click cli nf_core_cli(auto_envvar_prefix="NFCORE") -# taken from https://github.com/pallets/click/issues/108#issuecomment-194465429 -_common_options = [ - click.option("--hide-progress", is_flag=True, default=False, help="Don't show progress bars."), -] - - -def common_options(func): - for option in reversed(_common_options): - func = option(func) - return func - - @click.group(context_settings=dict(help_option_names=["-h", "--help"])) @click.version_option(nf_core.__version__) @click.option("-v", "--verbose", is_flag=True, default=False, help="Print verbose output to the console.") +@click.option("--hide-progress", is_flag=True, default=False, help="Don't show progress bars.") @click.option("-l", "--log-file", help="Save a verbose log to a file.", metavar="") -def nf_core_cli(verbose, log_file): +@click.pass_context +def nf_core_cli(ctx, verbose, hide_progress, log_file): """ nf-core/tools provides a set of helper tools for use with nf-core Nextflow pipelines. @@ -124,6 +126,7 @@ def nf_core_cli(verbose, log_file): level=logging.DEBUG if verbose else logging.INFO, console=rich.console.Console(stderr=True, force_terminal=nf_core.utils.rich_force_colors()), show_time=False, + show_path=verbose, # True if verbose, false otherwise markup=True, ) ) @@ -135,6 +138,11 @@ def nf_core_cli(verbose, log_file): log_fh.setFormatter(logging.Formatter("[%(asctime)s] %(name)-20s [%(levelname)-7s] %(message)s")) log.addHandler(log_fh) + ctx.obj = { + "verbose": verbose, + "hide_progress": hide_progress or verbose, # Always hide progress bar with verbose logging + } + # nf-core list @nf_core_cli.command() @@ -242,12 +250,13 @@ def download(pipeline, revision, outdir, compress, force, container, singularity @click.option("--json", is_flag=True, default=False, help="Print output in JSON") def licences(pipeline, json): """ - List software licences for a given workflow. + List software licences for a given workflow (DSL1 only). - Checks the pipeline environment.yml file which lists all conda software packages. + Checks the pipeline environment.yml file which lists all conda software packages, which is not available for DSL2 workflows. Therefore, this command only supports DSL1 workflows (for now). Each of these is queried against the anaconda.org API to find the licence. Package name, version and licence is printed to the command line. """ + lic = nf_core.licences.WorkflowLicences(pipeline) lic.as_json = json try: @@ -316,8 +325,15 @@ def create(name, description, author, version, no_git, force, outdir, template_y @click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") @click.option("--markdown", type=str, metavar="", help="File to write linting results to (Markdown)") @click.option("--json", type=str, metavar="", help="File to write linting results to (JSON)") -@common_options -def lint(dir, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, hide_progress): +@click.option( + "--sort-by", + type=click.Choice(["module", "test"]), + default="test", + help="Sort lint output by module or test name.", + show_default=True, +) +@click.pass_context +def lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, sort_by): """ Check pipeline code against nf-core guidelines. @@ -339,7 +355,17 @@ def lint(dir, release, fix, key, show_passed, fail_ignored, fail_warned, markdow # Run the lint tests! try: lint_obj, module_lint_obj = nf_core.lint.run_linting( - dir, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, hide_progress + dir, + release, + fix, + key, + show_passed, + fail_ignored, + fail_warned, + sort_by, + markdown, + json, + ctx.obj["hide_progress"], ) if len(lint_obj.failed) + len(module_lint_obj.failed) > 0: sys.exit(1) @@ -383,6 +409,38 @@ def modules(ctx, git_remote, branch, no_pull): ctx.obj["modules_repo_no_pull"] = no_pull +# nf-core subworkflows click command +@nf_core_cli.group() +@click.option( + "-g", + "--git-remote", + type=str, + default=nf_core.modules.modules_repo.NF_CORE_MODULES_REMOTE, + help="Remote git repo to fetch files from", +) +@click.option("-b", "--branch", type=str, default=None, help="Branch of git repository hosting modules.") +@click.option( + "-N", + "--no-pull", + is_flag=True, + default=False, + help="Do not pull in latest changes to local clone of modules repository.", +) +@click.pass_context +def subworkflows(ctx, git_remote, branch, no_pull): + """ + Commands to manage Nextflow DSL2 subworkflows (tool wrappers). + """ + # ensure that ctx.obj exists and is a dict (in case `cli()` is called + # by means other than the `if` block below) + ctx.ensure_object(dict) + + # Place the arguments in a context object + ctx.obj["modules_repo_url"] = git_remote + ctx.obj["modules_repo_branch"] = branch + ctx.obj["modules_repo_no_pull"] = no_pull + + # nf-core modules list subcommands @modules.group() @click.pass_context @@ -410,7 +468,7 @@ def remote(ctx, keywords, json): ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], ) - stdout.print(module_list.list_modules(keywords, json)) + stdout.print(module_list.list_components(keywords, json)) except (UserWarning, LookupError) as e: log.critical(e) sys.exit(1) @@ -428,7 +486,7 @@ def remote(ctx, keywords, json): default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def local(ctx, keywords, json, dir): +def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin """ List modules installed locally in a pipeline """ @@ -440,7 +498,7 @@ def local(ctx, keywords, json, dir): ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], ) - stdout.print(module_list.list_modules(keywords, json)) + stdout.print(module_list.list_components(keywords, json)) except (UserWarning, LookupError) as e: log.error(e) sys.exit(1) @@ -514,7 +572,14 @@ def install(ctx, tool, dir, prompt, force, sha): default=None, help="Save diffs to a file instead of updating in place", ) -def update(ctx, tool, dir, force, prompt, sha, all, preview, save_diff): +@click.option( + "-u", + "--update-deps", + is_flag=True, + default=False, + help="Automatically update all linked modules and subworkflows without asking for confirmation", +) +def update(ctx, tool, dir, force, prompt, sha, all, preview, save_diff, update_deps): """ Update DSL2 modules within a pipeline. @@ -529,6 +594,7 @@ def update(ctx, tool, dir, force, prompt, sha, all, preview, save_diff): all, preview, save_diff, + update_deps, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], @@ -612,7 +678,16 @@ def remove(ctx, dir, tool): @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite any files if they already exist") @click.option("-c", "--conda-name", type=str, default=None, help="Name of the conda package to use") @click.option("-p", "--conda-package-version", type=str, default=None, help="Version of conda package to use") -def create_module(ctx, tool, dir, author, label, meta, no_meta, force, conda_name, conda_package_version): +@click.option( + "-i", + "--empty-template", + is_flag=True, + default=False, + help="Create a module from the template without TODOs or examples", +) +def create_module( + ctx, tool, dir, author, label, meta, no_meta, force, conda_name, conda_package_version, empty_template +): """ Create a new DSL2 module from the nf-core template. @@ -634,7 +709,7 @@ def create_module(ctx, tool, dir, author, label, meta, no_meta, force, conda_nam # Run function try: module_create = nf_core.modules.ModuleCreate( - dir, tool, author, label, has_meta, force, conda_name, conda_package_version + dir, tool, author, label, has_meta, force, conda_name, conda_package_version, empty_template ) module_create.create() except UserWarning as e: @@ -661,7 +736,15 @@ def create_test_yml(ctx, tool, run_tests, output, force, no_prompts): the required `test.yml` file based on the output files. """ try: - meta_builder = nf_core.modules.ModulesTestYmlBuilder(tool, run_tests, output, force, no_prompts) + meta_builder = nf_core.modules.ModulesTestYmlBuilder( + module_name=tool, + run_tests=run_tests, + test_yml_output_path=output, + force_overwrite=force, + no_prompts=no_prompts, + remote_url=ctx.obj["modules_repo_url"], + branch=ctx.obj["modules_repo_branch"], + ) meta_builder.run() except (UserWarning, LookupError) as e: log.critical(e) @@ -678,9 +761,17 @@ def create_test_yml(ctx, tool, run_tests, output, force, no_prompts): @click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") @click.option("--local", is_flag=True, help="Run additional lint tests for local modules") @click.option("--passed", is_flag=True, help="Show passed tests") +@click.option( + "--sort-by", + type=click.Choice(["module", "test"]), + default="test", + help="Sort lint output by module or test name.", + show_default=True, +) @click.option("--fix-version", is_flag=True, help="Fix the module version if a newer version is available") -@common_options -def lint(ctx, tool, dir, key, all, fail_warned, local, passed, fix_version, hide_progress): +def lint( + ctx, tool, dir, key, all, fail_warned, local, passed, sort_by, fix_version +): # pylint: disable=redefined-outer-name """ Lint one or more modules in a directory. @@ -693,20 +784,20 @@ def lint(ctx, tool, dir, key, all, fail_warned, local, passed, fix_version, hide try: module_lint = nf_core.modules.ModuleLint( dir, - fail_warned, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - hide_progress, + fail_warned=fail_warned, + remote_url=ctx.obj["modules_repo_url"], + branch=ctx.obj["modules_repo_branch"], + no_pull=ctx.obj["modules_repo_no_pull"], + hide_progress=ctx.obj["hide_progress"], ) module_lint.lint( module=tool, key=key, all_modules=all, - hide_progress=hide_progress, print_results=True, local=local, show_passed=passed, + sort_by=sort_by, fix_version=fix_version, ) if len(module_lint.failed) > 0: @@ -750,7 +841,7 @@ def info(ctx, tool, dir): ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], ) - stdout.print(module_info.get_module_info()) + stdout.print(module_info.get_component_info()) except (UserWarning, LookupError) as e: log.error(e) sys.exit(1) @@ -776,7 +867,7 @@ def bump_versions(ctx, tool, dir, all, show_all): ctx.obj["modules_repo_no_pull"], ) version_bumper.bump_versions(module=tool, all_modules=all, show_uptodate=show_all) - except nf_core.modules.module_utils.ModuleException as e: + except nf_core.modules.modules_utils.ModuleException as e: log.error(e) sys.exit(1) except (UserWarning, LookupError) as e: @@ -847,6 +938,385 @@ def test_module(ctx, tool, no_prompts, pytest_args): sys.exit(1) +# nf-core subworkflows create +@subworkflows.command("create") +@click.pass_context +@click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") +@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") +@click.option("-a", "--author", type=str, metavar="", help="Module author's GitHub username prefixed with '@'") +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite any files if they already exist") +def create_subworkflow(ctx, subworkflow, dir, author, force): + """ + Create a new subworkflow from the nf-core template. + + If the specified directory is a pipeline, this function creates a file called + 'subworkflows/local/.nf' + + If the specified directory is a clone of nf-core/modules, it creates or modifies files + in 'subworkflows/', 'tests/subworkflows' and 'tests/config/pytest_modules.yml' + """ + + # Run function + try: + subworkflow_create = nf_core.subworkflows.SubworkflowCreate(dir, subworkflow, author, force) + subworkflow_create.create() + except UserWarning as e: + log.critical(e) + sys.exit(1) + except LookupError as e: + log.error(e) + sys.exit(1) + + +# nf-core subworkflows create-test-yml +@subworkflows.command("create-test-yml") +@click.pass_context +@click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") +@click.option("-t", "--run-tests", is_flag=True, default=False, help="Run the test workflows") +@click.option("-o", "--output", type=str, help="Path for output YAML file") +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output YAML file if it already exists") +@click.option("-p", "--no-prompts", is_flag=True, default=False, help="Use defaults without prompting") +def create_test_yml(ctx, subworkflow, run_tests, output, force, no_prompts): + """ + Auto-generate a test.yml file for a new subworkflow. + + Given the name of a module, runs the Nextflow test command and automatically generate + the required `test.yml` file based on the output files. + """ + try: + meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( + subworkflow=subworkflow, + run_tests=run_tests, + test_yml_output_path=output, + force_overwrite=force, + no_prompts=no_prompts, + remote_url=ctx.obj["modules_repo_url"], + branch=ctx.obj["modules_repo_branch"], + ) + meta_builder.run() + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +# nf-core subworkflows list subcommands +@subworkflows.group() +@click.pass_context +def list(ctx): + """ + List subworkflows in a local pipeline or remote repository. + """ + pass + + +# nf-core subworkflows list remote +@list.command() +@click.pass_context +@click.argument("keywords", required=False, nargs=-1, metavar="") +@click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") +def remote(ctx, keywords, json): + """ + List subworkflows in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. + """ + try: + subworkflows_list = nf_core.subworkflows.SubworkflowList( + None, + True, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + stdout.print(subworkflows_list.list_subworkflows(keywords, json)) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +# nf-core subworkflows list local +@list.command() +@click.pass_context +@click.argument("keywords", required=False, nargs=-1, metavar="") +@click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: Current working directory][/]", +) +def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin + """ + List subworkflows installed locally in a pipeline + """ + try: + subworkflows_list = nf_core.subworkflows.SubworkflowList( + dir, + False, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + stdout.print(subworkflows_list.list_subworkflows(keywords, json)) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +# nf-core subworkflows info +@subworkflows.command() +@click.pass_context +@click.argument("tool", type=str, required=False, metavar="subworkflow name") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: Current working directory][/]", +) +def info(ctx, tool, dir): + """ + Show developer usage information about a given subworkflow. + + Parses information from a subworkflow's [i]meta.yml[/] and renders help + on the command line. A handy equivalent to searching the + [link=https://nf-co.re/modules]nf-core website[/]. + + If run from a pipeline and a local copy of the subworkflow is found, the command + will print this usage info. + If not, usage from the remote subworkflows repo will be shown. + """ + try: + subworkflow_info = nf_core.subworkflows.SubworkflowInfo( + dir, + tool, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + stdout.print(subworkflow_info.get_component_info()) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +# nf-core subworkflows test +@subworkflows.command("test") +@click.pass_context +@click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") +@click.option("-p", "--no-prompts", is_flag=True, default=False, help="Use defaults without prompting") +@click.option("-a", "--pytest_args", type=str, required=False, multiple=True, help="Additional pytest arguments") +def test_subworkflow(ctx, subworkflow, no_prompts, pytest_args): + """ + Run subworkflow tests locally. + + Given the name of a subworkflow, runs the Nextflow test command. + """ + try: + meta_builder = nf_core.subworkflows.SubworkflowsTest(subworkflow, no_prompts, pytest_args) + meta_builder.run() + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +# nf-core subworkflows install +@subworkflows.command() +@click.pass_context +@click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) +@click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the subworkflow") +@click.option( + "-f", "--force", is_flag=True, default=False, help="Force reinstallation of subworkflow if it already exists" +) +@click.option("-s", "--sha", type=str, metavar="", help="Install subworkflow at commit SHA") +def install(ctx, subworkflow, dir, prompt, force, sha): + """ + Install DSL2 subworkflow within a pipeline. + + Fetches and installs subworkflow files from a remote repo e.g. nf-core/modules. + """ + try: + subworkflow_install = nf_core.subworkflows.SubworkflowInstall( + dir, + force, + prompt, + sha, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + exit_status = subworkflow_install.install(subworkflow) + if not exit_status and all: + sys.exit(1) + except (UserWarning, LookupError) as e: + log.error(e) + raise + sys.exit(1) + + +# nf-core subworkflows list subcommands +@subworkflows.group() +@click.pass_context +def list(ctx): + """ + List modules in a local pipeline or remote repository. + """ + pass + + +# nf-core subworkflows list remote +@list.command() +@click.pass_context +@click.argument("keywords", required=False, nargs=-1, metavar="") +@click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") +def remote(ctx, keywords, json): + """ + List subworkflows in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. + """ + try: + subworkflow_list = nf_core.subworkflows.SubworkflowList( + None, + True, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + + stdout.print(subworkflow_list.list_components(keywords, json)) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +# nf-core subworkflows list local +@list.command() +@click.pass_context +@click.argument("keywords", required=False, nargs=-1, metavar="") +@click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: Current working directory][/]", +) +def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin + """ + List subworkflows installed locally in a pipeline + """ + try: + subworkflow_list = nf_core.subworkflows.SubworkflowList( + dir, + False, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + stdout.print(subworkflow_list.list_components(keywords, json)) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +# nf-core subworkflows remove +@subworkflows.command() +@click.pass_context +@click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) +def remove(ctx, dir, subworkflow): + """ + Remove a subworkflow from a pipeline. + """ + try: + module_remove = nf_core.subworkflows.SubworkflowRemove( + dir, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + module_remove.remove(subworkflow) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +# nf-core subworkflows update +@subworkflows.command() +@click.pass_context +@click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) +@click.option("-f", "--force", is_flag=True, default=False, help="Force update of subworkflow") +@click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the subworkflow") +@click.option("-s", "--sha", type=str, metavar="", help="Install subworkflow at commit SHA") +@click.option("-a", "--all", is_flag=True, default=False, help="Update all subworkflow installed in pipeline") +@click.option( + "-x/-y", + "--preview/--no-preview", + is_flag=True, + default=None, + help="Preview / no preview of changes before applying", +) +@click.option( + "-D", + "--save-diff", + type=str, + metavar="", + default=None, + help="Save diffs to a file instead of updating in place", +) +@click.option( + "-u", + "--update-deps", + is_flag=True, + default=False, + help="Automatically update all linked modules and subworkflows without asking for confirmation", +) +def update(ctx, subworkflow, dir, force, prompt, sha, all, preview, save_diff, update_deps): + """ + Update DSL2 subworkflow within a pipeline. + + Fetches and updates subworkflow files from a remote repo e.g. nf-core/modules. + """ + try: + subworkflow_install = nf_core.subworkflows.SubworkflowUpdate( + dir, + force, + prompt, + sha, + all, + preview, + save_diff, + update_deps, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + exit_status = subworkflow_install.update(subworkflow) + if not exit_status and all: + sys.exit(1) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + # nf-core schema subcommands @nf_core_cli.group() def schema(): @@ -985,8 +1455,7 @@ def docs(schema_path, output, format, force, columns): # Assume we're in a pipeline dir root if schema path not set schema_obj.get_schema_path(schema_path) schema_obj.load_schema() - if not output: - stdout.print(schema_obj.print_documentation(output, format, force, columns.split(","))) + schema_obj.print_documentation(output, format, force, columns.split(",")) # nf-core bump-version @@ -1046,7 +1515,8 @@ def bump_version(new_version, dir, nextflow): @click.option("-p", "--pull-request", is_flag=True, default=False, help="Make a GitHub pull-request with the changes.") @click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") -def sync(dir, from_branch, pull_request, github_repository, username): +@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") +def sync(dir, from_branch, pull_request, github_repository, username, template_yaml): """ Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. @@ -1063,7 +1533,7 @@ def sync(dir, from_branch, pull_request, github_repository, username): nf_core.utils.is_pipeline_directory(dir) # Sync the given pipeline dir - sync_obj = nf_core.sync.PipelineSync(dir, from_branch, pull_request, github_repository, username) + sync_obj = nf_core.sync.PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml) try: sync_obj.sync() except (nf_core.sync.SyncException, nf_core.sync.PullRequestException) as e: diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 53766678b0..129016fa38 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """Bumps the version number in all appropriate files for a nf-core pipeline. """ @@ -134,12 +133,10 @@ def update_file_version(filename, pipeline_obj, patterns): replacements = [] for pattern in patterns: - found_match = False newcontent = [] for line in content.splitlines(): - # Match the pattern matches_pattern = re.findall(rf"^.*{pattern[0]}.*$", line) if matches_pattern: diff --git a/nf_core/components/__init__.py b/nf_core/components/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py new file mode 100644 index 0000000000..31ab1a71fb --- /dev/null +++ b/nf_core/components/components_command.py @@ -0,0 +1,281 @@ +import logging +import mmap +import os +import shutil +from pathlib import Path + +import yaml + +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import ModulesRepo + +from .components_utils import get_repo_info + +log = logging.getLogger(__name__) + + +class ComponentCommand: + """ + Base class for the 'nf-core modules' and 'nf-core subworkflows' commands + """ + + def __init__(self, component_type, dir, remote_url=None, branch=None, no_pull=False, hide_progress=False): + """ + Initialise the ComponentClass object + """ + self.component_type = component_type + self.dir = dir + self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) + self.hide_progress = hide_progress + self._configure_repo_and_paths() + + def _configure_repo_and_paths(self, nf_dir_req=True): + """ + Determine the repo type and set some default paths. + If this is a modules repo, determine the org_path too. + + Args: + nf_dir_req (bool, optional): Whether this command requires being run in the nf-core modules repo or a nf-core pipeline repository. Defaults to True. + """ + try: + if self.dir: + self.dir, self.repo_type, self.org = get_repo_info(self.dir, use_prompt=nf_dir_req) + else: + self.repo_type = None + self.org = "" + except UserWarning: + if nf_dir_req: + raise + self.repo_type = None + self.org = "" + self.default_modules_path = Path("modules", self.org) + self.default_tests_path = Path("tests", "modules", self.org) + self.default_subworkflows_path = Path("subworkflows", self.org) + self.default_subworkflows_tests_path = Path("tests", "subworkflows", self.org) + + def get_local_components(self): + """ + Get the local modules/subworkflows in a pipeline + """ + local_component_dir = Path(self.dir, self.component_type, "local") + return [ + str(path.relative_to(local_component_dir)) for path in local_component_dir.iterdir() if path.suffix == ".nf" + ] + + def get_components_clone_modules(self): + """ + Get the modules/subworkflows repository available in a clone of nf-core/modules + """ + if self.component_type == "modules": + component_base_path = Path(self.dir, self.default_modules_path) + elif self.component_type == "subworkflows": + component_base_path = Path(self.dir, self.default_subworkflows_path) + return [ + str(Path(dir).relative_to(component_base_path)) + for dir, _, files in os.walk(component_base_path) + if "main.nf" in files + ] + + def has_valid_directory(self): + """Check that we were given a pipeline or clone of nf-core/modules""" + if self.repo_type == "modules": + return True + if self.dir is None or not os.path.exists(self.dir): + log.error(f"Could not find directory: {self.dir}") + return False + main_nf = os.path.join(self.dir, "main.nf") + nf_config = os.path.join(self.dir, "nextflow.config") + if not os.path.exists(main_nf) and not os.path.exists(nf_config): + if Path(self.dir).resolve().parts[-1].startswith("nf-core"): + raise UserWarning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.dir}'") + log.warning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.dir}'") + return True + + def has_modules_file(self): + """Checks whether a module.json file has been created and creates one if it is missing""" + modules_json_path = os.path.join(self.dir, "modules.json") + if not os.path.exists(modules_json_path): + log.info("Creating missing 'module.json' file.") + ModulesJson(self.dir).create() + + def clear_component_dir(self, component_name, component_dir): + """ + Removes all files in the module/subworkflow directory + + Args: + component_name (str): The name of the module/subworkflow + component_dir (str): The path to the module/subworkflow in the module repository + + """ + + try: + shutil.rmtree(component_dir) + # remove all empty directories + for dir_path, dir_names, filenames in os.walk(self.dir, topdown=False): + if not dir_names and not filenames: + try: + os.rmdir(dir_path) + except OSError: + pass + else: + log.debug(f"Deleted directory: '{dir_path}'") + + log.debug(f"Successfully removed {self.component_type[:-1]} {component_name}") + return True + except OSError as e: + log.error(f"Could not remove {self.component_type[:-1]} {component_name}: {e}") + return False + + def components_from_repo(self, install_dir): + """ + Gets the modules/subworkflows installed from a certain repository + + Args: + install_dir (str): The name of the directory where modules/subworkflows are installed + + Returns: + [str]: The names of the modules/subworkflows + """ + repo_dir = Path(self.dir, self.component_type, install_dir) + if not repo_dir.exists(): + raise LookupError(f"Nothing installed from {install_dir} in pipeline") + + return [ + str(Path(dir_path).relative_to(repo_dir)) for dir_path, _, files in os.walk(repo_dir) if "main.nf" in files + ] + + def install_component_files(self, component_name, component_version, modules_repo, install_dir): + """ + Installs a module/subworkflow into the given directory + + Args: + component_name (str): The name of the module/subworkflow + component_version (str): Git SHA for the version of the module/subworkflow to be installed + modules_repo (ModulesRepo): A correctly configured ModulesRepo object + install_dir (str): The path to where the module/subworkflow should be installed (should be the 'modules/' or 'subworkflows/' dir of the pipeline) + + Returns: + (bool): Whether the operation was successful of not + """ + return modules_repo.install_component(component_name, install_dir, component_version, self.component_type) + + def load_lint_config(self): + """Parse a pipeline lint config file. + + Look for a file called either `.nf-core-lint.yml` or + `.nf-core-lint.yaml` in the pipeline root directory and parse it. + (`.yml` takes precedence). + + Add parsed config to the `self.lint_config` class attribute. + """ + config_fn = os.path.join(self.dir, ".nf-core-lint.yml") + + # Pick up the file if it's .yaml instead of .yml + if not os.path.isfile(config_fn): + config_fn = os.path.join(self.dir, ".nf-core-lint.yaml") + + # Load the YAML + try: + with open(config_fn, "r") as fh: + self.lint_config = yaml.safe_load(fh) + except FileNotFoundError: + log.debug(f"No lint config file found: {config_fn}") + + def check_modules_structure(self): + """ + Check that the structure of the modules directory in a pipeline is the correct one: + modules/nf-core/TOOL/SUBTOOL + + Prior to nf-core/tools release 2.6 the directory structure had an additional level of nesting: + modules/nf-core/modules/TOOL/SUBTOOL + """ + if self.repo_type == "pipeline": + wrong_location_modules = [] + for directory, _, files in os.walk(Path(self.dir, "modules")): + if "main.nf" in files: + module_path = Path(directory).relative_to(Path(self.dir, "modules")) + parts = module_path.parts + # Check that there are modules installed directly under the 'modules' directory + if parts[1] == "modules": + wrong_location_modules.append(module_path) + # If there are modules installed in the wrong location + if len(wrong_location_modules) > 0: + log.info("The modules folder structure is outdated. Reinstalling modules.") + # Remove the local copy of the modules repository + log.info(f"Updating '{self.modules_repo.local_repo_dir}'") + self.modules_repo.setup_local_repo( + self.modules_repo.remote_url, self.modules_repo.branch, self.hide_progress + ) + # Move wrong modules to the right directory + for module in wrong_location_modules: + modules_dir = Path("modules").resolve() + correct_dir = Path(modules_dir, self.modules_repo.repo_path, Path(*module.parts[2:])) + wrong_dir = Path(modules_dir, module) + shutil.move(wrong_dir, correct_dir) + log.info(f"Moved {wrong_dir} to {correct_dir}.") + shutil.rmtree(Path(self.dir, "modules", self.modules_repo.repo_path, "modules")) + # Regenerate modules.json file + modules_json = ModulesJson(self.dir) + modules_json.check_up_to_date() + + def check_patch_paths(self, patch_path, module_name): + """ + Check that paths in patch files are updated to the new modules path + """ + if patch_path.exists(): + log.info(f"Modules {module_name} contains a patch file.") + rewrite = False + with open(patch_path, "r") as fh: + lines = fh.readlines() + for index, line in enumerate(lines): + # Check if there are old paths in the patch file and replace + if f"modules/{self.modules_repo.repo_path}/modules/{module_name}/" in line: + rewrite = True + lines[index] = line.replace( + f"modules/{self.modules_repo.repo_path}/modules/{module_name}/", + f"modules/{self.modules_repo.repo_path}/{module_name}/", + ) + if rewrite: + log.info(f"Updating paths in {patch_path}") + with open(patch_path, "w") as fh: + for line in lines: + fh.write(line) + # Update path in modules.json if the file is in the correct format + modules_json = ModulesJson(self.dir) + modules_json.load() + if modules_json.has_git_url_and_modules(): + modules_json.modules_json["repos"][self.modules_repo.remote_url]["modules"][ + self.modules_repo.repo_path + ][module_name]["patch"] = str(patch_path.relative_to(Path(self.dir).resolve())) + modules_json.dump() + + def check_if_in_include_stmts(self, component_path): + """ + Checks for include statements in the main.nf file of the pipeline and a list of line numbers where the component is included + Args: + component_path (str): The path to the module/subworkflow + + Returns: + (list): A list of dictionaries, with the workflow file and the line number where the component is included + """ + include_stmts = {} + if self.repo_type == "pipeline": + workflow_files = Path(self.dir, "workflows").glob("*.nf") + for workflow_file in workflow_files: + with open(workflow_file, "r") as fh: + # Check if component path is in the file using mmap + with mmap.mmap(fh.fileno(), 0, access=mmap.ACCESS_READ) as s: + if s.find(component_path.encode()) != -1: + # If the component path is in the file, check for include statements + for i, line in enumerate(fh): + if line.startswith("include") and component_path in line: + if str(workflow_file) not in include_stmts: + include_stmts[str(workflow_file)] = [] + include_stmts[str(workflow_file)].append( + {"line_number": i + 1, "line": line.rstrip()} + ) + + return include_stmts + else: + log.debug("Not a pipeline repository, skipping check for include statements") + return include_stmts diff --git a/nf_core/modules/module_test.py b/nf_core/components/components_test.py similarity index 54% rename from nf_core/modules/module_test.py rename to nf_core/components/components_test.py index 927fd6b693..c2b9abf569 100644 --- a/nf_core/modules/module_test.py +++ b/nf_core/components/components_test.py @@ -1,8 +1,3 @@ -#!/usr/bin/env python -""" -The ModulesTest class runs the tests locally -""" - import logging import os import sys @@ -13,23 +8,23 @@ import questionary import rich -import nf_core.modules.module_utils +import nf_core.modules.modules_utils import nf_core.utils -from nf_core.modules.modules_command import ModuleCommand +from nf_core.components.components_command import ComponentCommand from nf_core.modules.modules_json import ModulesJson log = logging.getLogger(__name__) -class ModulesTest(ModuleCommand): +class ComponentsTest(ComponentCommand): """ - Class to run module pytests. + Class to run module and subworkflow pytests. ... Attributes ---------- - module_name : str + component_name : str name of the tool to run tests for no_prompts : bool flat indicating if prompts are used @@ -41,7 +36,7 @@ class ModulesTest(ModuleCommand): run(): Run test steps _check_inputs(): - Check inputs. Ask for module_name if not provided and check that the directory exists + Check inputs. Ask for component_name if not provided and check that the directory exists _set_profile(): Set software profile _run_pytests(self): @@ -50,19 +45,19 @@ class ModulesTest(ModuleCommand): def __init__( self, - module_name=None, + component_type, + component_name=None, no_prompts=False, pytest_args="", remote_url=None, branch=None, no_pull=False, ): - self.module_name = module_name + super().__init__(component_type=component_type, dir=".", remote_url=remote_url, branch=branch, no_pull=no_pull) + self.component_name = component_name self.no_prompts = no_prompts self.pytest_args = pytest_args - super().__init__(".", remote_url, branch, no_pull) - def run(self): """Run test steps""" if not self.no_prompts: @@ -76,30 +71,38 @@ def run(self): def _check_inputs(self): """Do more complex checks about supplied flags.""" + # Check modules directory structure + self.check_modules_structure() # Retrieving installed modules if self.repo_type == "modules": - installed_modules = self.get_modules_clone_modules() + installed_components = self.get_components_clone_modules() else: modules_json = ModulesJson(self.dir) modules_json.check_up_to_date() - installed_modules = modules_json.get_all_modules().get(self.modules_repo.fullname) + installed_components = modules_json.get_all_components(self.component_type).get( + self.modules_repo.remote_url + ) - # Get the tool name if not specified - if self.module_name is None: + # Get the component name if not specified + if self.component_name is None: if self.no_prompts: raise UserWarning( - "Tool name not provided and prompts deactivated. Please provide the tool name as TOOL/SUBTOOL or TOOL." + f"{self.component_type[:-1].title()} name not provided and prompts deactivated. Please provide the {self.component_type[:-1]} name{' as TOOL/SUBTOOL or TOOL' if self.component_type == 'modules' else ''}." ) - if not installed_modules: + if not installed_components: + if self.component_type == "modules": + dir_structure_message = f"modules/{self.modules_repo.repo_path}/TOOL/SUBTOOL/ and tests/modules/{self.modules_repo.repo_path}/TOOLS/SUBTOOL/" + elif self.component_type == "subworkflows": + dir_structure_message = f"subworkflows/{self.modules_repo.repo_path}/SUBWORKFLOW/ and tests/subworkflows/{self.modules_repo.repo_path}/SUBWORKFLOW/" raise UserWarning( - f"No installed modules were found from '{self.modules_repo.remote_url}'.\n" - f"Are you running the tests inside the nf-core/modules main directory?\n" - f"Otherwise, make sure that the directory structure is modules/TOOL/SUBTOOL/ and tests/modules/TOOLS/SUBTOOL/" + f"No installed {self.component_type} were found from '{self.modules_repo.remote_url}'.\n" + f"Are you running the tests inside the repository root directory?\n" + f"Make sure that the directory structure is {dir_structure_message}" ) - self.module_name = questionary.autocomplete( - "Tool name:", - choices=installed_modules, + self.component_name = questionary.autocomplete( + f"{self.component_type[:-1]} name:", + choices=installed_components, style=nf_core.utils.nfcore_question_style, ).unsafe_ask() @@ -108,27 +111,27 @@ def _check_inputs(self): def _validate_folder_structure(self): """Validate that the modules follow the correct folder structure to run the tests: - - modules/TOOL/SUBTOOL/ - - tests/modules/TOOL/SUBTOOL/ - + - modules/nf-core/TOOL/SUBTOOL/ + - tests/modules/nf-core/TOOL/SUBTOOL/ + or + - subworkflows/nf-core/SUBWORKFLOW/ + - tests/subworkflows/nf-core/SUBWORKFLOW/ """ - basedir = "modules/nf-core" - - if self.repo_type == "modules": - module_path = Path("modules") / self.module_name - test_path = Path("tests/modules") / self.module_name - else: - module_path = Path(f"{basedir}/modules") / self.module_name - test_path = Path(f"{basedir}/tests/modules") / self.module_name - - if not (self.dir / module_path).is_dir(): + if self.component_type == "modules": + component_path = Path(self.default_modules_path) / self.component_name + test_path = Path(self.default_tests_path) / self.component_name + elif self.component_type == "subworkflows": + component_path = Path(self.default_subworkflows_path) / self.component_name + test_path = Path(self.default_subworkflows_tests_path) / self.component_name + + if not (self.dir / component_path).is_dir(): raise UserWarning( - f"Cannot find directory '{module_path}'. Should be TOOL/SUBTOOL or TOOL. Are you running the tests inside the nf-core/modules main directory?" + f"Cannot find directory '{component_path}'. Should be {'TOOL/SUBTOOL or TOOL' if self.component_type == 'modules' else 'SUBWORKFLOW'}. Are you running the tests inside the modules repository root directory?" ) if not (self.dir / test_path).is_dir(): raise UserWarning( - f"Cannot find directory '{test_path}'. Should be TOOL/SUBTOOL or TOOL. " - "Are you running the tests inside the nf-core/modules main directory? " + f"Cannot find directory '{test_path}'. Should be {'TOOL/SUBTOOL or TOOL' if self.component_type == 'modules' else 'SUBWORKFLOW'}. " + "Are you running the tests inside the modules repository root directory? " "Do you have tests for the specified module?" ) @@ -171,15 +174,18 @@ def _check_profile(self): ) def _run_pytests(self): - """Given a module name, run tests.""" + """Given a module/subworkflow name, run tests.""" # Print nice divider line console = rich.console.Console() - console.rule(self.module_name, style="black") + console.rule(self.component_name, style="black") # Set pytest arguments - command_args = ["--tag", f"{self.module_name}", "--symlink", "--keep-workflow-wd", "--git-aware"] + tag = self.component_name + if self.component_type == "subworkflows": + tag = "subworkflows/" + tag + command_args = ["--tag", f"{tag}", "--symlink", "--keep-workflow-wd", "--git-aware"] command_args += self.pytest_args # Run pytest - log.info(f"Running pytest for module '{self.module_name}'") + log.info(f"Running pytest for {self.component_type[:-1]} '{self.component_name}'") sys.exit(pytest.main(command_args)) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py new file mode 100644 index 0000000000..9a0565296e --- /dev/null +++ b/nf_core/components/components_utils.py @@ -0,0 +1,148 @@ +import logging +import os +import re +from pathlib import Path + +import questionary +import rich.prompt + +import nf_core.utils + +log = logging.getLogger(__name__) + + +def get_repo_info(directory, use_prompt=True): + """ + Determine whether this is a pipeline repository or a clone of + nf-core/modules + """ + # Verify that the pipeline dir exists + if directory is None or not Path(directory).is_dir(): + raise UserWarning(f"Could not find directory: {directory}") + + # Try to find the root directory + base_dir = nf_core.utils.determine_base_dir(directory) + + # Figure out the repository type from the .nf-core.yml config file if we can + config_fn, tools_config = nf_core.utils.load_tools_config(base_dir) + repo_type = tools_config.get("repository_type", None) + + # If not set, prompt the user + if not repo_type and use_prompt: + log.warning("'repository_type' not defined in %s", config_fn.name) + repo_type = questionary.select( + "Is this repository an nf-core pipeline or a fork of nf-core/modules?", + choices=[ + {"name": "Pipeline", "value": "pipeline"}, + {"name": "nf-core/modules", "value": "modules"}, + ], + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + + # Save the choice in the config file + log.info(f"To avoid this prompt in the future, add the 'repository_type' key to your {config_fn.name} file.") + if rich.prompt.Confirm.ask("[bold][blue]?[/] Would you like me to add this config now?", default=True): + with open(config_fn, "a+") as fh: + fh.write(f"repository_type: {repo_type}\n") + log.info(f"Config added to '{config_fn.name}'") + + # Not set and not allowed to ask + elif not repo_type: + raise UserWarning("Repository type could not be established") + + # Check if it's a valid answer + if not repo_type in ["pipeline", "modules"]: + raise UserWarning(f"Invalid repository type: '{repo_type}'") + + # Check for org if modules repo + org = None + if repo_type == "pipeline": + org = "" + elif repo_type == "modules": + org = tools_config.get("org_path", None) + if org is None: + log.warning("Organisation path not defined in %s [key: org_path]", config_fn.name) + org = questionary.text( + "What is the organisation path under which modules and subworkflows are stored?", + default="nf-core", + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + log.info("To avoid this prompt in the future, add the 'org_path' key to a root '%s' file.", config_fn.name) + if rich.prompt.Confirm.ask("[bold][blue]?[/] Would you like me to add this config now?", default=True): + with open(config_fn, "a+") as fh: + fh.write(f"org_path: {org}\n") + log.info(f"Config added to '{config_fn.name}'") + + if not org: + raise UserWarning("Organisation path could not be established") + + # It was set on the command line, return what we were given + return [base_dir, repo_type, org] + + +def prompt_component_version_sha(component_name, component_type, modules_repo, installed_sha=None): + """ + Creates an interactive questionary prompt for selecting the module/subworkflow version + Args: + component_name (str): Module/subworkflow name, + component_type (str): "modules" or "subworkflows", + modules_repo (ModulesRepo): Modules repo the module/subworkflow originate in + installed_sha (str): Optional extra argument to highlight the current installed version + + Returns: + git_sha (str): The selected version of the module/subworkflow + """ + older_commits_choice = questionary.Choice( + title=[("fg:ansiyellow", "older commits"), ("class:choice-default", "")], value="" + ) + git_sha = "" + page_nbr = 1 + + all_commits = modules_repo.get_component_git_log(component_name, component_type) + next_page_commits = [next(all_commits, None) for _ in range(10)] + next_page_commits = [commit for commit in next_page_commits if commit is not None] + + while git_sha == "": + commits = next_page_commits + next_page_commits = [next(all_commits, None) for _ in range(10)] + next_page_commits = [commit for commit in next_page_commits if commit is not None] + if all(commit is None for commit in next_page_commits): + next_page_commits = None + + choices = [] + for title, sha in map(lambda commit: (commit["trunc_message"], commit["git_sha"]), commits): + display_color = "fg:ansiblue" if sha != installed_sha else "fg:ansired" + message = f"{title} {sha}" + if installed_sha == sha: + message += " (installed version)" + commit_display = [(display_color, message), ("class:choice-default", "")] + choices.append(questionary.Choice(title=commit_display, value=sha)) + if next_page_commits is not None: + choices += [older_commits_choice] + git_sha = questionary.select( + f"Select '{component_name}' commit:", choices=choices, style=nf_core.utils.nfcore_question_style + ).unsafe_ask() + page_nbr += 1 + return git_sha + + +def get_components_to_install(subworkflow_dir): + """ + Parse the subworkflow test main.nf file to retrieve all imported modules and subworkflows. + """ + modules = [] + subworkflows = [] + with open(Path(subworkflow_dir, "main.nf"), "r") as fh: + for line in fh: + regex = re.compile( + r"include(?: *{ *)([a-zA-Z\_0-9]*)(?: *as *)?(?:[a-zA-Z\_0-9]*)?(?: *})(?: *from *)(?:'|\")(.*)(?:'|\")" + ) + match = regex.match(line) + if match and len(match.groups()) == 2: + name, link = match.groups() + if link.startswith("../../../"): + name_split = name.lower().split("_") + modules.append("/".join(name_split)) + elif link.startswith("../"): + subworkflows.append(name.lower()) + return modules, subworkflows diff --git a/nf_core/components/create.py b/nf_core/components/create.py new file mode 100644 index 0000000000..e626de4aaa --- /dev/null +++ b/nf_core/components/create.py @@ -0,0 +1,435 @@ +""" +The ComponentCreate class handles generating of module and subworkflow templates +""" + +from __future__ import print_function + +import glob +import json +import logging +import os +import re +import subprocess + +import jinja2 +import questionary +import rich +import yaml +from packaging.version import parse as parse_version + +import nf_core +import nf_core.utils +from nf_core.components.components_command import ComponentCommand + +log = logging.getLogger(__name__) + + +class ComponentCreate(ComponentCommand): + def __init__( + self, + component_type, + directory=".", + component="", + author=None, + process_label=None, + has_meta=None, + force=False, + conda_name=None, + conda_version=None, + empty_template=False, + ): + super().__init__(component_type, directory) + self.directory = directory + self.component = component + self.author = author + self.process_label = process_label + self.has_meta = has_meta + self.force_overwrite = force + self.subtool = None + self.tool_conda_name = conda_name + self.tool_conda_version = conda_version + self.tool_licence = None + self.tool_licence = "" + self.tool_description = "" + self.tool_doc_url = "" + self.tool_dev_url = "" + self.bioconda = None + self.singularity_container = None + self.docker_container = None + self.file_paths = {} + self.not_empty_template = not empty_template + + def create(self): + """ + Create a new DSL2 module or subworkflow from the nf-core template. + + A module should be named just or + e.g fastqc or samtools/sort, respectively. + + The subworkflow should be named as the main file type it operates on and a short description of the task performed + e.g bam_sort or bam_sort_samtools, respectively. + + If is a pipeline, this function creates a file called: + '/modules/local/tool.nf' + OR + '/modules/local/tool_subtool.nf' + OR for subworkflows + '/subworkflows/local/subworkflow_name.nf' + + If is a clone of nf-core/modules, it creates or modifies the following files: + + For modules: + + modules/modules/nf-core/tool/subtool/ + * main.nf + * meta.yml + modules/tests/modules/nf-core/tool/subtool/ + * main.nf + * test.yml + * nextflow.config + tests/config/pytest_modules.yml + + The function will attempt to automatically find a Bioconda package called + and matching Docker / Singularity images from BioContainers. + + For subworkflows: + subworkflows/nf-core/subworkflow_name/ + * main.nf + * meta.yml + tests/subworkflows/nf-core/subworkflow_name/ + * main.nf + * test.yml + * nextflow.config + tests/config/pytest_modules.yml + + """ + + if self.component_type == "modules": + # Check modules directory structure + self.check_modules_structure() + + # Check whether the given directory is a nf-core pipeline or a clone of nf-core/modules + log.info(f"Repository type: [blue]{self.repo_type}") + if self.directory != ".": + log.info(f"Base directory: '{self.directory}'") + + log.info( + "[yellow]Press enter to use default values [cyan bold](shown in brackets)[/] [yellow]or type your own responses. " + "ctrl+click [link=https://youtu.be/dQw4w9WgXcQ]underlined text[/link] to open links." + ) + + # Collect component info via prompt if empty or invalid + self._collect_name_prompt() + + # Determine the component name + self.component_name = self.component + self.component_dir = self.component + + if self.subtool: + self.component_name = f"{self.component}/{self.subtool}" + self.component_dir = os.path.join(self.component, self.subtool) + + self.component_name_underscore = self.component_name.replace("/", "_") + + # Check existence of directories early for fast-fail + self.file_paths = self._get_component_dirs() + + if self.component_type == "modules": + # Try to find a bioconda package for 'component' + self._get_bioconda_tool() + + # Prompt for GitHub username + self._get_username() + + if self.component_type == "modules": + self._get_module_structure_components() + + # Create component template with jinja2 + self._render_template() + + if self.repo_type == "modules": + # Add entry to pytest_modules.yml + try: + with open(os.path.join(self.directory, "tests", "config", "pytest_modules.yml"), "r") as fh: + pytest_modules_yml = yaml.safe_load(fh) + if self.subtool: + pytest_modules_yml[self.component_name] = [ + f"modules/{self.org}/{self.component}/{self.subtool}/**", + f"tests/modules/{self.org}/{self.component}/{self.subtool}/**", + ] + else: + pytest_modules_yml[ + ("" if self.component_type == "modules" else self.component_type + "/") + self.component_name + ] = [ + f"{self.component_type}/{self.org}/{self.component}/**", + f"tests/{self.component_type}/{self.org}/{self.component}/**", + ] + pytest_modules_yml = dict(sorted(pytest_modules_yml.items())) + with open(os.path.join(self.directory, "tests", "config", "pytest_modules.yml"), "w") as fh: + yaml.dump(pytest_modules_yml, fh, sort_keys=True, Dumper=nf_core.utils.custom_yaml_dumper()) + except FileNotFoundError: + raise UserWarning("Could not open 'tests/config/pytest_modules.yml' file!") + + new_files = list(self.file_paths.values()) + if self.repo_type == "modules": + new_files.append(os.path.join(self.directory, "tests", "config", "pytest_modules.yml")) + log.info("Created / edited following files:\n " + "\n ".join(new_files)) + + def _get_bioconda_tool(self): + """ + Try to find a bioconda package for 'tool' + """ + while True: + try: + if self.tool_conda_name: + anaconda_response = nf_core.utils.anaconda_package(self.tool_conda_name, ["bioconda"]) + else: + anaconda_response = nf_core.utils.anaconda_package(self.component, ["bioconda"]) + + if not self.tool_conda_version: + version = anaconda_response.get("latest_version") + if not version: + version = str(max([parse_version(v) for v in anaconda_response["versions"]])) + else: + version = self.tool_conda_version + + self.tool_licence = nf_core.utils.parse_anaconda_licence(anaconda_response, version) + self.tool_description = anaconda_response.get("summary", "") + self.tool_doc_url = anaconda_response.get("doc_url", "") + self.tool_dev_url = anaconda_response.get("dev_url", "") + if self.tool_conda_name: + self.bioconda = "bioconda::" + self.tool_conda_name + "=" + version + else: + self.bioconda = "bioconda::" + self.component + "=" + version + log.info(f"Using Bioconda package: '{self.bioconda}'") + break + except (ValueError, LookupError) as e: + log.warning( + f"Could not find Conda dependency using the Anaconda API: '{self.tool_conda_name if self.tool_conda_name else self.component}'" + ) + if rich.prompt.Confirm.ask("[violet]Do you want to enter a different Bioconda package name?"): + self.tool_conda_name = rich.prompt.Prompt.ask("[violet]Name of Bioconda package").strip() + continue + else: + log.warning( + f"{e}\nBuilding module without tool software and meta, you will need to enter this information manually." + ) + break + + # Try to get the container tag (only if bioconda package was found) + if self.bioconda: + try: + if self.tool_conda_name: + self.docker_container, self.singularity_container = nf_core.utils.get_biocontainer_tag( + self.tool_conda_name, version + ) + else: + self.docker_container, self.singularity_container = nf_core.utils.get_biocontainer_tag( + self.component, version + ) + log.info(f"Using Docker container: '{self.docker_container}'") + log.info(f"Using Singularity container: '{self.singularity_container}'") + except (ValueError, LookupError) as e: + log.info(f"Could not find a Docker/Singularity container ({e})") + + def _get_module_structure_components(self): + process_label_defaults = ["process_single", "process_low", "process_medium", "process_high", "process_long"] + if self.process_label is None: + log.info( + "Provide an appropriate resource label for the process, taken from the " + "[link=https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/conf/base.config#L29]nf-core pipeline template[/link].\n" + "For example: {}".format(", ".join(process_label_defaults)) + ) + while self.process_label is None: + self.process_label = questionary.autocomplete( + "Process resource label:", + choices=process_label_defaults, + style=nf_core.utils.nfcore_question_style, + default="process_single", + ).unsafe_ask() + + if self.has_meta is None: + log.info( + "Where applicable all sample-specific information e.g. 'id', 'single_end', 'read_group' " + "MUST be provided as an input via a Groovy Map called 'meta'. " + "This information may [italic]not[/] be required in some instances, for example " + "[link=https://github.com/nf-core/modules/blob/master/modules/nf-core/bwa/index/main.nf]indexing reference genome files[/link]." + ) + while self.has_meta is None: + self.has_meta = rich.prompt.Confirm.ask( + "[violet]Will the module require a meta map of sample information?", default=True + ) + + def _render_template(self): + """ + Create new module/subworkflow files with Jinja2. + """ + object_attrs = vars(self) + # Run jinja2 for each file in the template folder + env = jinja2.Environment( + loader=jinja2.PackageLoader("nf_core", f"{self.component_type[:-1]}-template"), keep_trailing_newline=True + ) + for template_fn, dest_fn in self.file_paths.items(): + log.debug(f"Rendering template file: '{template_fn}'") + j_template = env.get_template(template_fn) + object_attrs["nf_core_version"] = nf_core.__version__ + rendered_output = j_template.render(object_attrs) + + # Write output to the target file + os.makedirs(os.path.dirname(dest_fn), exist_ok=True) + with open(dest_fn, "w") as fh: + log.debug(f"Writing output to: '{dest_fn}'") + fh.write(rendered_output) + + # Mirror file permissions + template_stat = os.stat( + os.path.join(os.path.dirname(nf_core.__file__), f"{self.component_type[:-1]}-template", template_fn) + ) + os.chmod(dest_fn, template_stat.st_mode) + + def _collect_name_prompt(self): + """ + Collect module/subworkflow info via prompt if empty or invalid + """ + # Collect module info via prompt if empty or invalid + self.subtool = None + if self.component_type == "modules": + pattern = r"[^a-z\d/]" + elif self.component_type == "subworkflows": + pattern = r"[^a-z\d_/]" + if self.component is None: + self.component = "" + while self.component == "" or re.search(pattern, self.component) or self.component.count("/") > 0: + # Check + auto-fix for invalid chacters + if re.search(pattern, self.component): + if self.component_type == "modules": + log.warning("Tool/subtool name must be lower-case letters only, with no punctuation") + elif self.component_type == "subworkflows": + log.warning("Subworkflow name must be lower-case letters only, with no punctuation") + name_clean = re.sub(r"[^a-z\d/]", "", self.component.lower()) + if rich.prompt.Confirm.ask(f"[violet]Change '{self.component}' to '{name_clean}'?"): + self.component = name_clean + else: + self.component = "" + + if self.component_type == "modules": + # Split into tool and subtool + if self.component.count("/") > 1: + log.warning("Tool/subtool can have maximum one '/' character") + self.component = "" + elif self.component.count("/") == 1: + self.component, self.subtool = self.component.split("/") + else: + self.subtool = None # Reset edge case: entered '/subtool' as name and gone round loop again + + # Prompt for new entry if we reset + if self.component == "": + if self.component_type == "modules": + self.component = rich.prompt.Prompt.ask("[violet]Name of tool/subtool").strip() + elif self.component_type == "subworkflows": + self.component = rich.prompt.Prompt.ask("[violet]Name of subworkflow").strip() + + def _get_component_dirs(self): + """Given a directory and a tool/subtool or subworkflow, set the file paths and check if they already exist + + Returns dict: keys are relative paths to template files, vals are target paths. + """ + file_paths = {} + if self.repo_type == "pipeline": + local_component_dir = os.path.join(self.directory, self.component_type, "local") + # Check whether component file already exists + component_file = os.path.join(local_component_dir, f"{self.component_name}.nf") + if os.path.exists(component_file) and not self.force_overwrite: + raise UserWarning( + f"{self.component_type[:-1].title()} file exists already: '{component_file}'. Use '--force' to overwrite" + ) + + if self.component_type == "modules": + # If a subtool, check if there is a module called the base tool name already + if self.subtool and os.path.exists(os.path.join(local_component_dir, f"{self.component}.nf")): + raise UserWarning( + f"Module '{self.component}' exists already, cannot make subtool '{self.component_name}'" + ) + + # If no subtool, check that there isn't already a tool/subtool + tool_glob = glob.glob(f"{local_component_dir}/{self.component}_*.nf") + if not self.subtool and tool_glob: + raise UserWarning( + f"Module subtool '{tool_glob[0]}' exists already, cannot make tool '{self.component_name}'" + ) + + # Set file paths + file_paths[os.path.join(self.component_type, "main.nf")] = component_file + + if self.repo_type == "modules": + software_dir = os.path.join(self.directory, self.component_type, self.org, self.component_dir) + test_dir = os.path.join(self.directory, "tests", self.component_type, self.org, self.component_dir) + + # Check if module/subworkflow directories exist already + if os.path.exists(software_dir) and not self.force_overwrite: + raise UserWarning( + f"{self.component_type[:-1]} directory exists: '{software_dir}'. Use '--force' to overwrite" + ) + if os.path.exists(test_dir) and not self.force_overwrite: + raise UserWarning( + f"{self.component_type[:-1]} test directory exists: '{test_dir}'. Use '--force' to overwrite" + ) + + if self.component_type == "modules": + # If a subtool, check if there is a module called the base tool name already + parent_tool_main_nf = os.path.join( + self.directory, self.component_type, self.org, self.component, "main.nf" + ) + parent_tool_test_nf = os.path.join( + self.directory, self.component_type, self.org, self.component, "main.nf" + ) + if self.subtool and os.path.exists(parent_tool_main_nf): + raise UserWarning( + f"Module '{parent_tool_main_nf}' exists already, cannot make subtool '{self.component_name}'" + ) + if self.subtool and os.path.exists(parent_tool_test_nf): + raise UserWarning( + f"Module '{parent_tool_test_nf}' exists already, cannot make subtool '{self.component_name}'" + ) + + # If no subtool, check that there isn't already a tool/subtool + tool_glob = glob.glob( + f"{os.path.join(self.directory, self.component_type, self.org, self.component)}/*/main.nf" + ) + if not self.subtool and tool_glob: + raise UserWarning( + f"Module subtool '{tool_glob[0]}' exists already, cannot make tool '{self.component_name}'" + ) + + # Set file paths + # For modules - can be tool/ or tool/subtool/ so can't do in template directory structure + file_paths[os.path.join(self.component_type, "main.nf")] = os.path.join(software_dir, "main.nf") + file_paths[os.path.join(self.component_type, "meta.yml")] = os.path.join(software_dir, "meta.yml") + file_paths[os.path.join("tests", "main.nf")] = os.path.join(test_dir, "main.nf") + file_paths[os.path.join("tests", "test.yml")] = os.path.join(test_dir, "test.yml") + file_paths[os.path.join("tests", "nextflow.config")] = os.path.join(test_dir, "nextflow.config") + + return file_paths + + def _get_username(self): + """ + Prompt for GitHub username + """ + # Try to guess the current user if `gh` is installed + author_default = None + try: + with open(os.devnull, "w") as devnull: + gh_auth_user = json.loads(subprocess.check_output(["gh", "api", "/user"], stderr=devnull)) + author_default = f"@{gh_auth_user['login']}" + except Exception as e: + log.debug(f"Could not find GitHub username using 'gh' cli command: [red]{e}") + + # Regex to valid GitHub username: https://github.com/shinnn/github-username-regex + github_username_regex = re.compile(r"^@[a-zA-Z\d](?:[a-zA-Z\d]|-(?=[a-zA-Z\d])){0,38}$") + while self.author is None or not github_username_regex.match(self.author): + if self.author is not None and not github_username_regex.match(self.author): + log.warning("Does not look like a valid GitHub username (must start with an '@')!") + self.author = rich.prompt.Prompt.ask( + f"[violet]GitHub Username:[/]{' (@author)' if author_default is None else ''}", + default=author_default, + ) diff --git a/nf_core/components/info.py b/nf_core/components/info.py new file mode 100644 index 0000000000..e4d8038b87 --- /dev/null +++ b/nf_core/components/info.py @@ -0,0 +1,333 @@ +import logging +import os +from pathlib import Path + +import questionary +import yaml +from rich import box +from rich.console import Group +from rich.markdown import Markdown +from rich.panel import Panel +from rich.syntax import Syntax +from rich.table import Table +from rich.text import Text + +import nf_core.utils +from nf_core.components.components_command import ComponentCommand +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import NF_CORE_MODULES_REMOTE + +log = logging.getLogger(__name__) + + +class ComponentInfo(ComponentCommand): + """ + Class to print information of a module/subworkflow. + + Attributes + ---------- + meta : YAML object + stores the information from meta.yml file + local_path : str + path of the local modules/subworkflows + remote_location : str + remote repository URL + local : bool + indicates if the module/subworkflow is locally installed or not + repo_type : str + repository type. Can be either 'pipeline' or 'modules' + modules_json : ModulesJson object + contains 'modules.json' file information from a pipeline + component_name : str + name of the module/subworkflow to get information from + + Methods + ------- + init_mod_name(component) + Makes sure that we have a modules/subworkflows name + get_component_info() + Given the name of a module/subworkflow, parse meta.yml and print usage help + get_local_yaml() + Attempt to get the meta.yml file from a locally installed module/subworkflow + get_remote_yaml() + Attempt to get the meta.yml file from a remote repo + generate_component_info_help() + Take the parsed meta.yml and generate rich help + """ + + def __init__( + self, + component_type, + pipeline_dir, + component_name, + remote_url=None, + branch=None, + no_pull=False, + ): + super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) + self.meta = None + self.local_path = None + self.remote_location = None + self.local = None + + if self.repo_type == "pipeline": + # Check modules directory structure + if self.component_type == "modules": + self.check_modules_structure() + # Check modules.json up to date + self.modules_json = ModulesJson(self.dir) + self.modules_json.check_up_to_date() + else: + self.modules_json = None + self.component = self.init_mod_name(component_name) + + def _configure_repo_and_paths(self, nf_dir_req=False): + """ + Override the default with nf_dir_req set to False to allow + info to be run from anywhere and still return remote info + """ + return super()._configure_repo_and_paths(nf_dir_req) + + def init_mod_name(self, component): + """ + Makes sure that we have a module/subworkflow name before proceeding. + + Args: + module: str: Module name to check + """ + if component is None: + self.local = questionary.confirm( + f"Is the {self.component_type[:-1]} locally installed?", style=nf_core.utils.nfcore_question_style + ).unsafe_ask() + if self.local: + if self.repo_type == "modules": + components = self.get_components_clone_modules() + else: + components = self.modules_json.get_all_components(self.component_type).get( + self.modules_repo.remote_url, {} + ) + components = [ + component if directory == self.modules_repo.repo_path else f"{directory}/{component}" + for directory, component in components + ] + if not components: + raise UserWarning( + f"No {self.component_type[:-1]} installed from '{self.modules_repo.remote_url}'" + ) + else: + components = self.modules_repo.get_avail_components(self.component_type) + components.sort() + component = questionary.autocomplete( + f"Please select a {self.component_type[:-1]}", + choices=components, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + while component not in components: + log.info(f"'{component}' is not a valid {self.component_type[:-1]} name") + component = questionary.autocomplete( + f"Please select a new {self.component_type[:-1]}", + choices=components, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + else: + if self.repo_type == "pipeline": + # check if the module is locally installed + local_paths = self.modules_json.get_all_components(self.component_type).get( + self.modules_repo.remote_url, {} + ) + for directory, comp in local_paths: + if comp == component: + component_base_path = Path(self.dir, self.component_type) + self.local_path = Path(component_base_path, directory, component) + break + if self.local_path: + self.local = True + + return component + + def get_component_info(self): + """Given the name of a module/subworkflow, parse meta.yml and print usage help.""" + + # Running with a local install, try to find the local meta + if self.local: + self.meta = self.get_local_yaml() + + # Either failed locally or in remote mode + if not self.meta: + self.meta = self.get_remote_yaml() + + # Could not find the meta + if self.meta is False: + raise UserWarning(f"Could not find {self.component_type[:-1]} '{self.component}'") + + return self.generate_component_info_help() + + def get_local_yaml(self): + """Attempt to get the meta.yml file from a locally installed module/subworkflow. + + Returns: + dict or bool: Parsed meta.yml found, False otherwise + """ + + if self.repo_type == "pipeline": + # Try to find and load the meta.yml file + component_base_path = Path(self.dir, self.component_type) + # Check that we have any modules/subworkflows installed from this repo + components = self.modules_json.get_all_components(self.component_type).get(self.modules_repo.remote_url) + component_names = [component for _, component in components] + if components is None: + raise LookupError(f"No {self.component_type[:-1]} installed from {self.modules_repo.remote_url}") + + if self.component in component_names: + install_dir = [dir for dir, module in components if module == self.component][0] + comp_dir = Path(component_base_path, install_dir, self.component) + meta_fn = Path(comp_dir, "meta.yml") + if meta_fn.exists(): + log.debug(f"Found local file: {meta_fn}") + with open(meta_fn, "r") as fh: + self.local_path = comp_dir + return yaml.safe_load(fh) + + log.debug(f"{self.component_type[:-1].title()} '{self.component}' meta.yml not found locally") + else: + component_base_path = Path(self.dir, self.component_type, self.org) + if self.component in os.listdir(component_base_path): + comp_dir = Path(component_base_path, self.component) + meta_fn = Path(comp_dir, "meta.yml") + if meta_fn.exists(): + log.debug(f"Found local file: {meta_fn}") + with open(meta_fn, "r") as fh: + self.local_path = comp_dir + return yaml.safe_load(fh) + log.debug(f"{self.component_type[:-1].title()} '{self.component}' meta.yml not found locally") + + return None + + def get_remote_yaml(self): + """Attempt to get the meta.yml file from a remote repo. + + Returns: + dict or bool: Parsed meta.yml found, False otherwise + """ + # Check if our requested module/subworkflow is there + if self.component not in self.modules_repo.get_avail_components(self.component_type): + return False + + file_contents = self.modules_repo.get_meta_yml(self.component_type, self.component) + if file_contents is None: + return False + self.remote_location = self.modules_repo.remote_url + return yaml.safe_load(file_contents) + + def generate_component_info_help(self): + """Take the parsed meta.yml and generate rich help. + + Returns: + rich renderable + """ + + renderables = [] + + # Intro panel + intro_text = Text() + if self.local_path: + intro_text.append(Text.from_markup(f"Location: [blue]{self.local_path}\n")) + elif self.remote_location: + intro_text.append( + Text.from_markup( + ":globe_with_meridians: Repository: " + f"{ '[link={self.remote_location}]' if self.remote_location.startswith('http') else ''}" + f"{self.remote_location}" + f"{'[/link]' if self.remote_location.startswith('http') else '' }" + "\n" + ) + ) + + if self.meta.get("tools"): + tools_strings = [] + for tool in self.meta["tools"]: + for tool_name, tool_meta in tool.items(): + if "homepage" in tool_meta: + tools_strings.append(f"[link={tool_meta['homepage']}]{tool_name}[/link]") + else: + tools_strings.append(f"{tool_name}") + intro_text.append(Text.from_markup(f":wrench: Tools: {', '.join(tools_strings)}\n", style="dim")) + + if self.meta.get("description"): + intro_text.append(Text.from_markup(f":book: Description: {self.meta['description']}", style="dim")) + + renderables.append( + Panel( + intro_text, + title=f"[bold]{self.component_type[:-1].title()}: [green]{self.component}\n", + title_align="left", + ) + ) + + # Inputs + if self.meta.get("input"): + inputs_table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) + inputs_table.add_column(":inbox_tray: Inputs") + inputs_table.add_column("Description") + inputs_table.add_column("Pattern", justify="right", style="green") + for input in self.meta["input"]: + for key, info in input.items(): + inputs_table.add_row( + f"[orange1 on black] {key} [/][dim i] ({info['type']})", + Markdown(info["description"] if info["description"] else ""), + info.get("pattern", ""), + ) + + renderables.append(inputs_table) + + # Outputs + if self.meta.get("output"): + outputs_table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) + outputs_table.add_column(":outbox_tray: Outputs") + outputs_table.add_column("Description") + outputs_table.add_column("Pattern", justify="right", style="green") + for output in self.meta["output"]: + for key, info in output.items(): + outputs_table.add_row( + f"[orange1 on black] {key} [/][dim i] ({info['type']})", + Markdown(info["description"] if info["description"] else ""), + info.get("pattern", ""), + ) + + renderables.append(outputs_table) + + # Installation command + if self.remote_location and not self.local: + cmd_base = f"nf-core {self.component_type}" + if self.remote_location != NF_CORE_MODULES_REMOTE: + cmd_base = f"nf-core {self.component_type} --git-remote {self.remote_location}" + renderables.append( + Text.from_markup(f"\n :computer: Installation command: [magenta]{cmd_base} install {self.component}\n") + ) + + # Print include statement + if self.local_path: + install_folder = Path(self.dir, self.component_type, self.modules_repo.repo_path) + component_name = "_".join(self.component.upper().split("/")) + renderables.append( + Text.from_markup(f"\n [blue]Use the following statement to include this {self.component_type[:-1]}:") + ) + renderables.append( + Syntax( + f"include {{ {component_name} }} from '../{Path(install_folder, self.component).relative_to(self.dir)}/main'", + "groovy", + theme="ansi_dark", + padding=1, + ) + ) + if self.component_type == "subworkflows": + subworkflow_config = Path(install_folder, self.component, "nextflow.config").relative_to(self.dir) + if os.path.isfile(subworkflow_config): + renderables.append( + Text.from_markup("\n [blue]Add the following config statement to use this subworkflow:") + ) + renderables.append( + Syntax(f"includeConfig '{subworkflow_config}'", "groovy", theme="ansi_dark", padding=1) + ) + + return Group(*renderables) diff --git a/nf_core/components/install.py b/nf_core/components/install.py new file mode 100644 index 0000000000..850cc9d60f --- /dev/null +++ b/nf_core/components/install.py @@ -0,0 +1,266 @@ +import logging +import os +from pathlib import Path + +import questionary +from rich.console import Console +from rich.syntax import Syntax + +import nf_core.modules.modules_utils +import nf_core.utils +from nf_core.components.components_command import ComponentCommand +from nf_core.components.components_utils import ( + get_components_to_install, + prompt_component_version_sha, +) +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME + +log = logging.getLogger(__name__) + + +class ComponentInstall(ComponentCommand): + def __init__( + self, + pipeline_dir, + component_type, + force=False, + prompt=False, + sha=None, + remote_url=None, + branch=None, + no_pull=False, + installed_by=False, + ): + super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) + self.force = force + self.prompt = prompt + self.sha = sha + if installed_by: + self.installed_by = installed_by + else: + self.installed_by = self.component_type + + def install(self, component, silent=False): + if self.repo_type == "modules": + log.error(f"You cannot install a {component} in a clone of nf-core/modules") + return False + # Check whether pipelines is valid + if not self.has_valid_directory(): + return False + + if self.component_type == "modules": + # Check modules directory structure + self.check_modules_structure() + + # Verify that 'modules.json' is consistent with the installed modules and subworkflows + modules_json = ModulesJson(self.dir) + if not silent: + modules_json.check_up_to_date() + + # Verify SHA + if not self.modules_repo.verify_sha(self.prompt, self.sha): + return False + + # Check and verify component name + component = self.collect_and_verify_name(component, self.modules_repo) + if not component: + return False + + # Get current version + current_version = modules_json.get_component_version( + self.component_type, component, self.modules_repo.remote_url, self.modules_repo.repo_path + ) + + # Set the install folder based on the repository name + install_folder = Path(self.dir, self.component_type, self.modules_repo.repo_path) + + # Compute the component directory + component_dir = Path(install_folder, component) + + # Check that the component is not already installed + component_not_installed = self.check_component_installed( + component, current_version, component_dir, self.modules_repo, self.force, self.prompt, silent + ) + if not component_not_installed: + log.debug( + f"{self.component_type[:-1].title()} is already installed and force is not set.\nAdding the new installation source {self.installed_by} for {self.component_type[:-1]} {component} to 'modules.json' without installing the {self.component_type}." + ) + modules_json.load() + modules_json.update(self.component_type, self.modules_repo, component, current_version, self.installed_by) + return False + + version = self.get_version(component, self.sha, self.prompt, current_version, self.modules_repo) + if not version: + return False + + # Remove component if force is set and component is installed + install_track = None + if self.force: + log.debug(f"Removing installed version of '{self.modules_repo.repo_path}/{component}'") + self.clear_component_dir(component, component_dir) + install_track = self.clean_modules_json(component, self.modules_repo, modules_json) + if not silent: + log.info(f"{'Rei' if self.force else 'I'}nstalling '{component}'") + log.debug( + f"Installing {self.component_type} '{component}' at modules hash {version} from {self.modules_repo.remote_url}" + ) + + # Download component files + if not self.install_component_files(component, version, self.modules_repo, install_folder): + return False + + # Update module.json with newly installed subworkflow + modules_json.load() + modules_json.update( + self.component_type, self.modules_repo, component, version, self.installed_by, install_track + ) + + if self.component_type == "subworkflows": + # Install included modules and subworkflows + self.install_included_components(component_dir) + + if not silent: + # Print include statement + component_name = "_".join(component.upper().split("/")) + log.info(f"Use the following statement to include this {self.component_type[:-1]}:") + Console().print( + Syntax( + f"include {{ {component_name} }} from '../{Path(install_folder, component).relative_to(self.dir)}/main'", + "groovy", + theme="ansi_dark", + padding=1, + ) + ) + if self.component_type == "subworkflows": + subworkflow_config = Path(install_folder, component, "nextflow.config").relative_to(self.dir) + if os.path.isfile(subworkflow_config): + log.info("Add the following config statement to use this subworkflow:") + Console().print( + Syntax(f"includeConfig '{subworkflow_config}'", "groovy", theme="ansi_dark", padding=1) + ) + return True + + def install_included_components(self, subworkflow_dir): + """ + Install included modules and subworkflows + """ + modules_to_install, subworkflows_to_install = get_components_to_install(subworkflow_dir) + for s_install in subworkflows_to_install: + original_installed = self.installed_by + self.installed_by = Path(subworkflow_dir).parts[-1] + self.install(s_install, silent=True) + self.installed_by = original_installed + for m_install in modules_to_install: + original_component_type = self.component_type + self.component_type = "modules" + original_installed = self.installed_by + self.installed_by = Path(subworkflow_dir).parts[-1] + self.install(m_install, silent=True) + self.component_type = original_component_type + self.installed_by = original_installed + + def collect_and_verify_name(self, component, modules_repo): + """ + Collect component name. + Check that the supplied name is an available module/subworkflow. + """ + if component is None: + component = questionary.autocomplete( + f"{'Tool' if self.component_type == 'modules' else 'Subworkflow'} name:", + choices=sorted(modules_repo.get_avail_components(self.component_type, commit=self.sha)), + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + + # Check that the supplied name is an available module/subworkflow + if component and component not in modules_repo.get_avail_components(self.component_type, commit=self.sha): + log.error( + f"{self.component_type[:-1].title()} '{component}' not found in list of available {self.component_type}." + ) + log.info(f"Use the command 'nf-core {self.component_type} list' to view available software") + return False + + if not modules_repo.component_exists(component, self.component_type, commit=self.sha): + warn_msg = f"{self.component_type[:-1].title()} '{component}' not found in remote '{modules_repo.remote_url}' ({modules_repo.branch})" + log.warning(warn_msg) + return False + + return component + + def check_component_installed(self, component, current_version, component_dir, modules_repo, force, prompt, silent): + """ + Check that the module/subworkflow is not already installed. + + Return: + True: if the component is not installed + False: if the component is installed + """ + if (current_version is not None and os.path.exists(component_dir)) and not force: + # make sure included components are also installed + if self.component_type == "subworkflows": + self.install_included_components(component_dir) + if not silent: + log.info(f"{self.component_type[:-1].title()} '{component}' is already installed.") + + if prompt: + message = ( + "?" if self.component_type == "modules" else " of this subworkflow and all it's imported modules?" + ) + force = questionary.confirm( + f"{self.component_type[:-1].title()} {component} is already installed. \nDo you want to force the reinstallation{message}", + style=nf_core.utils.nfcore_question_style, + default=False, + ).unsafe_ask() + + if not force: + if not silent: + repo_flag = ( + "" if modules_repo.repo_path == NF_CORE_MODULES_NAME else f"-g {modules_repo.remote_url} " + ) + branch_flag = "" if modules_repo.branch == "master" else f"-b {modules_repo.branch} " + + log.info( + f"To update '{component}' run 'nf-core {self.component_type} {repo_flag}{branch_flag}update {component}'. To force reinstallation use '--force'." + ) + return False + + return True + + def get_version(self, component, sha, prompt, current_version, modules_repo): + """ + Get the version to install + """ + if sha: + version = sha + elif prompt: + try: + version = prompt_component_version_sha( + component, + self.component_type, + installed_sha=current_version, + modules_repo=modules_repo, + ) + except SystemError as e: + log.error(e) + return False + else: + # Fetch the latest commit for the module + version = modules_repo.get_latest_component_version(component, self.component_type) + return version + + def clean_modules_json(self, component, modules_repo, modules_json): + """ + Remove installed version of module/subworkflow from modules.json + """ + for repo_url, repo_content in modules_json.modules_json["repos"].items(): + for dir, dir_components in repo_content[self.component_type].items(): + for name, component_values in dir_components.items(): + if name == component and dir == modules_repo.repo_path: + repo_to_remove = repo_url + log.debug( + f"Removing {self.component_type[:-1]} '{modules_repo.repo_path}/{component}' from repo '{repo_to_remove}' from modules.json." + ) + modules_json.remove_entry( + self.component_type, component, repo_to_remove, modules_repo.repo_path + ) + return component_values["installed_by"] diff --git a/nf_core/components/list.py b/nf_core/components/list.py new file mode 100644 index 0000000000..1b12615dec --- /dev/null +++ b/nf_core/components/list.py @@ -0,0 +1,140 @@ +import json +import logging + +import rich + +from nf_core.components.components_command import ComponentCommand +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import ModulesRepo + +log = logging.getLogger(__name__) + + +class ComponentList(ComponentCommand): + def __init__(self, component_type, pipeline_dir, remote=True, remote_url=None, branch=None, no_pull=False): + super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) + self.remote = remote + + def list_components(self, keywords=None, print_json=False): + keywords = keywords or [] + """ + Get available modules/subworkflows names from GitHub tree for repo + and print as list to stdout + """ + # Check modules directory structure + # self.check_component_structure(self.component_type) + + # Initialise rich table + table = rich.table.Table() + table.add_column(f"{self.component_type[:-1].capitalize()} Name") + components = [] + + def pattern_msg(keywords): + if len(keywords) == 0: + return "" + if len(keywords) == 1: + return f" matching pattern '{keywords[0]}'" + else: + quoted_keywords = (f"'{key}'" for key in keywords) + return f" matching patterns {', '.join(quoted_keywords)}" + + # No pipeline given - show all remote + if self.remote: + # Filter the modules/subworkflows by keywords + components = [ + comp + for comp in self.modules_repo.get_avail_components(self.component_type) + if all(k in comp for k in keywords) + ] + + # Nothing found + if len(components) == 0: + log.info( + f"No available {self.component_type} found in {self.modules_repo.remote_url} ({self.modules_repo.branch})" + f"{pattern_msg(keywords)}" + ) + return "" + + for comp in sorted(components): + table.add_row(comp) + + # We have a pipeline - list what's installed + else: + # Check that we are in a pipeline directory + + try: + if self.repo_type != "pipeline": + raise UserWarning( + f"The command 'nf-core {self.component_type} list local' must be run from a pipeline directory.", + ) + except UserWarning as e: + log.error(e) + return "" + # Check whether pipelines is valid + try: + self.has_valid_directory() + except UserWarning as e: + log.error(e) + return "" + + # Verify that 'modules.json' is consistent with the installed modules + modules_json = ModulesJson(self.dir) + modules_json.check_up_to_date() + + # Filter by keywords + repos_with_comps = { + repo_url: [comp for comp in components if all(k in comp[1] for k in keywords)] + for repo_url, components in modules_json.get_all_components(self.component_type).items() + } + + # Nothing found + if sum(map(len, repos_with_comps)) == 0: + log.info(f"No nf-core {self.component_type} found in '{self.dir}'{pattern_msg(keywords)}") + return "" + + table.add_column("Repository") + table.add_column("Version SHA") + table.add_column("Message") + table.add_column("Date") + + # Load 'modules.json' + modules_json = modules_json.modules_json + + for repo_url, component_with_dir in sorted(repos_with_comps.items()): + repo_entry = modules_json["repos"].get(repo_url, {}) + for install_dir, component in sorted(component_with_dir): + repo_modules = repo_entry.get(self.component_type) + component_entry = repo_modules.get(install_dir).get(component) + + if component_entry: + version_sha = component_entry["git_sha"] + try: + # pass repo_name to get info on modules even outside nf-core/modules + message, date = ModulesRepo( + remote_url=repo_url, + branch=component_entry["branch"], + ).get_commit_info(version_sha) + except LookupError as e: + log.warning(e) + date = "[red]Not Available" + message = "[red]Not Available" + else: + log.warning( + f"Commit SHA for {self.component_type[:-1]} '{install_dir}/{self.component_type}' is missing from 'modules.json'" + ) + version_sha = "[red]Not Available" + date = "[red]Not Available" + message = "[red]Not Available" + table.add_row(component, repo_url, version_sha, message, date) + + if print_json: + return json.dumps(components, sort_keys=True, indent=4) + + if self.remote: + log.info( + f"{self.component_type.capitalize()} available from {self.modules_repo.remote_url} ({self.modules_repo.branch})" + f"{pattern_msg(keywords)}:\n" + ) + else: + log.info(f"{self.component_type.capitalize()} installed in '{self.dir}'{pattern_msg(keywords)}:\n") + return table diff --git a/nf_core/components/remove.py b/nf_core/components/remove.py new file mode 100644 index 0000000000..99df757992 --- /dev/null +++ b/nf_core/components/remove.py @@ -0,0 +1,172 @@ +import logging +from pathlib import Path + +import questionary +from rich.console import Console, Group +from rich.panel import Panel +from rich.syntax import Syntax + +import nf_core.utils +from nf_core.components.components_command import ComponentCommand +from nf_core.modules.modules_json import ModulesJson + +from .install import ComponentInstall + +log = logging.getLogger(__name__) + + +class ComponentRemove(ComponentCommand): + def __init__(self, component_type, pipeline_dir, remote_url=None, branch=None, no_pull=False): + super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) + + def remove(self, component, removed_by=None, removed_components=None, force=False): + """ + Remove an already installed module/subworkflow + This command only works for modules/subworkflows that are installed from 'nf-core/modules' + + Args: + component (str): Name of the component to remove + removed_by (str): Name of the component that is removing the current component + (a subworkflow name if the component is a dependency or "modules" or "subworkflows" if it is not a dependency) + removed_components (list[str]): list of components that have been removed during a recursive remove of subworkflows + force (bool): Force removal of component, even if there is still an include statement in a workflow file + + Returns: + bool: True if any item has been removed, False if not + """ + if self.repo_type == "modules": + log.error(f"You cannot remove a {self.component_type[:-1]} in a clone of nf-core/modules") + return False + + # Check modules directory structure + if self.component_type == "modules": + self.check_modules_structure() + + # Check whether pipeline is valid and with a modules.json file + self.has_valid_directory() + self.has_modules_file() + + repo_path = self.modules_repo.repo_path + if component is None: + component = questionary.autocomplete( + f"{self.component_type[:-1]} name:", + choices=self.components_from_repo(repo_path), + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + + if removed_components is None: + removed_components = [] + + # Get the module/subworkflow directory + component_dir = Path(self.dir, self.component_type, repo_path, component) + + # Load the modules.json file + modules_json = ModulesJson(self.dir) + modules_json.load() + + # Verify that the module/subworkflow is actually installed + if not component_dir.exists(): + log.error(f"Installation directory '{component_dir}' does not exist.") + + if modules_json.module_present(component, self.modules_repo.remote_url, repo_path): + log.error(f"Found entry for '{component}' in 'modules.json'. Removing...") + modules_json.remove_entry(self.component_type, component, self.modules_repo.remote_url, repo_path) + return False + + # remove all dependent components based on installed_by entry + # Remove entry from modules.json + removed = False + removed_components = [] + # Remove component from modules.json + removed_component = modules_json.remove_entry( + self.component_type, + component, + self.modules_repo.remote_url, + repo_path, + removed_by=removed_by, + ) + removed_component_dir = Path(self.component_type, repo_path, component) + if removed_component: + # check if the module/subworkflow has been manually included in the pipeline + include_stmts = self.check_if_in_include_stmts(str(removed_component_dir)) + if include_stmts: + # print the include statements + log.warning( + f"The {self.component_type[:-1]} '{component}' is still included in the following workflow file{nf_core.utils.plural_s(include_stmts)}:" + ) + console = Console() + for file, stmts in include_stmts.items(): + renderables = [] + for stmt in stmts: + renderables.append( + Syntax( + stmt["line"], + "groovy", + theme="ansi_dark", + line_numbers=True, + start_line=stmt["line_number"], + ) + ) + console.print( + Panel( + Group(*renderables), + title=f"{file}", + style="white", + title_align="center", + padding=1, + ) + ) + # ask the user if they still want to remove the component, add it back otherwise + if not force: + if not questionary.confirm( + f"Do you still want to remove the {self.component_type[:-1]} '{component}'?", + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask(): + # add the component back to modules.json + if not ComponentInstall(self.dir, self.component_type, force=True).install( + component, silent=True + ): + log.warning( + f"Could not install the {self.component_type[:-1]} '{component}', please install it manually with 'nf-core {self.component_type} install {component}'." + ) + removed_components.append(component) + return removed + # Remove the component files of all entries removed from modules.json + removed = ( + True if self.clear_component_dir(component, Path(self.dir, removed_component_dir)) or removed else False + ) + removed_components.append(component) + + if removed: + if self.component_type == "subworkflows": + removed_by = component + dependent_components = modules_json.get_dependent_components( + self.component_type, component, self.modules_repo.remote_url, repo_path, {} + ) + for component_name, component_type in dependent_components.items(): + if component_name in removed_components: + continue + original_component_type = self.component_type + self.component_type = component_type + dependency_removed = self.remove( + component_name, removed_by=removed_by, removed_components=removed_components + ) + self.component_type = original_component_type + # remember removed dependencies + if dependency_removed: + removed_components.append(component_name.replace("/", "_")) + # print removed dependencies + if removed_components: + log.info(f"Removed files for '{component}' and it's dependencies '{', '.join(removed_components)}'.") + else: + log.info(f"Removed files for '{component}'.") + else: + installed_by = modules_json.get_installed_by_entries(self.component_type, component) + if installed_by == [self.component_type]: + log.error( + f"Did not remove '{component}', because it was also manually installed. Only updated 'installed_by' entry in modules.json." + ) + log.info( + f"""Did not remove {self.component_type[:-1]} '{component}', because it was also installed by {', '.join(f"'{d}'" for d in installed_by)}. Only updated the 'installed_by' entry in modules.json.""" + ) + return removed diff --git a/nf_core/components/update.py b/nf_core/components/update.py new file mode 100644 index 0000000000..5f8a2129c5 --- /dev/null +++ b/nf_core/components/update.py @@ -0,0 +1,934 @@ +import logging +import os +import shutil +import tempfile +from pathlib import Path + +import questionary + +import nf_core.modules.modules_utils +import nf_core.utils +from nf_core.components.components_command import ComponentCommand +from nf_core.components.components_utils import ( + get_components_to_install, + prompt_component_version_sha, +) +from nf_core.components.install import ComponentInstall +from nf_core.components.remove import ComponentRemove +from nf_core.modules.modules_differ import ModulesDiffer +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import ModulesRepo +from nf_core.utils import plural_es, plural_s, plural_y + +log = logging.getLogger(__name__) + + +class ComponentUpdate(ComponentCommand): + def __init__( + self, + pipeline_dir, + component_type, + force=False, + prompt=False, + sha=None, + update_all=False, + show_diff=None, + save_diff_fn=None, + update_deps=False, + remote_url=None, + branch=None, + no_pull=False, + ): + super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) + self.force = force + self.prompt = prompt + self.sha = sha + self.update_all = update_all + self.show_diff = show_diff + self.save_diff_fn = save_diff_fn + self.update_deps = update_deps + self.component = None + self.update_config = None + self.modules_json = ModulesJson(self.dir) + self.branch = branch + + def _parameter_checks(self): + """Checks the compatibilty of the supplied parameters. + + Raises: + UserWarning: if any checks fail. + """ + + if self.save_diff_fn and self.show_diff: + raise UserWarning("Either `--preview` or `--save_diff` can be specified, not both.") + + if self.update_all and self.component: + raise UserWarning(f"Either a {self.component_type[:-1]} or the '--all' flag can be specified, not both.") + + if self.repo_type == "modules": + raise UserWarning( + f"{self.component_type.title()} can not be updated in clones of the nf-core/modules repository." + ) + + if self.prompt and self.sha is not None: + raise UserWarning("Cannot use '--sha' and '--prompt' at the same time.") + + if not self.has_valid_directory(): + raise UserWarning("The command was not run in a valid pipeline directory.") + + def update(self, component=None, silent=False, updated=None, check_diff_exist=True): + """Updates a specified module/subworkflow or all modules/subworkflows in a pipeline. + + If updating a subworkflow: updates all modules used in that subworkflow. + If updating a module: updates all subworkflows that use the module. + + Args: + component (str): The name of the module/subworkflow to update. + + Returns: + (bool): True if the update was successful, False otherwise. + """ + self.component = component + if updated is None: + updated = [] + + _, tool_config = nf_core.utils.load_tools_config(self.dir) + self.update_config = tool_config.get("update", {}) + + self._parameter_checks() + + # Check modules directory structure + self.check_modules_structure() + + # Verify that 'modules.json' is consistent with the installed modules + if not silent: + self.modules_json.check_up_to_date() + + if not self.update_all and component is None: + choices = [f"All {self.component_type}", f"Named {self.component_type[:-1]}"] + self.update_all = ( + questionary.select( + f"Update all {self.component_type} or a single named {self.component_type[:-1]}?", + choices=choices, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + == f"All {self.component_type}" + ) + + # Verify that the provided SHA exists in the repo + if self.sha is not None and not self.modules_repo.sha_exists_on_branch(self.sha): + log.error(f"Commit SHA '{self.sha}' doesn't exist in '{self.modules_repo.remote_url}'") + return False + + # Get the list of modules/subworkflows to update, and their version information + components_info = ( + self.get_all_components_info() if self.update_all else [self.get_single_component_info(component)] + ) + + # Save the current state of the modules.json + old_modules_json = self.modules_json.get_modules_json() + + # Ask if we should show the diffs (unless a filename was already given on the command line) + if not self.save_diff_fn and self.show_diff is None: + diff_type = questionary.select( + "Do you want to view diffs of the proposed changes?", + choices=[ + {"name": "No previews, just update everything", "value": 0}, + {"name": "Preview diff in terminal, choose whether to update files", "value": 1}, + {"name": "Just write diffs to a patch file", "value": 2}, + ], + default={"name": "No previews, just update everything", "value": 0}, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + + self.show_diff = diff_type == 1 + self.save_diff_fn = diff_type == 2 + + if self.save_diff_fn: # True or a string + self.setup_diff_file(check_diff_exist) + + # Loop through all components to be updated + # and do the requested action on them + exit_value = True + all_patches_successful = True + for modules_repo, component, sha, patch_relpath in components_info: + if component is None: + # The entry from .nf-core.yml is set to false, skip update of this component + continue + component_fullname = str(Path(self.component_type, modules_repo.repo_path, component)) + # Are we updating the files in place or not? + dry_run = self.show_diff or self.save_diff_fn + + current_version = self.modules_json.get_component_version( + self.component_type, component, modules_repo.remote_url, modules_repo.repo_path + ) + + # Set the temporary installation folder + install_tmp_dir = Path(tempfile.mkdtemp()) + component_install_dir = install_tmp_dir / component + + # Compute the component directory + component_dir = os.path.join(self.dir, self.component_type, modules_repo.repo_path, component) + + if sha is not None: + version = sha + elif self.prompt: + version = prompt_component_version_sha( + component, self.component_type, modules_repo=modules_repo, installed_sha=current_version + ) + else: + version = modules_repo.get_latest_component_version(component, self.component_type) + + if current_version is not None and not self.force: + if current_version == version: + if self.sha or self.prompt: + log.info(f"'{component_fullname}' is already installed at {version}") + else: + log.info(f"'{component_fullname}' is already up to date") + continue + + # Download component files + if not self.install_component_files(component, version, modules_repo, install_tmp_dir): + exit_value = False + continue + + if patch_relpath is not None: + patch_successful = self.try_apply_patch( + component, + modules_repo.repo_path, + patch_relpath, + component_dir, + component_install_dir, + write_file=False, + ) + if patch_successful: + log.info(f"{self.component_type[:-1].title()} '{component_fullname}' patched successfully") + else: + log.warning( + f"Failed to patch {self.component_type[:-1]} '{component_fullname}'. Will proceed with unpatched files." + ) + all_patches_successful &= patch_successful + + if dry_run: + if patch_relpath is not None: + if patch_successful: + log.info("Current installation is compared against patched version in remote.") + else: + log.warning("Current installation is compared against unpatched version in remote.") + # Compute the diffs for the component + if self.save_diff_fn: + log.info( + f"Writing diff file for {self.component_type[:-1]} '{component_fullname}' to '{self.save_diff_fn}'" + ) + try: + ModulesDiffer.write_diff_file( + self.save_diff_fn, + component, + modules_repo.repo_path, + component_dir, + component_install_dir, + current_version, + version, + dsp_from_dir=component_dir, + dsp_to_dir=component_dir, + ) + updated.append(component) + except UserWarning as e: + if str(e) != "Module is unchanged": + raise + else: + updated.append(component) + recursive_update = True + modules_to_update, subworkflows_to_update = self.get_components_to_update(component) + if not silent and len(modules_to_update + subworkflows_to_update) > 0: + log.warning( + f"All modules and subworkflows linked to the updated {self.component_type[:-1]} will be added to the same diff file.\n" + "It is advised to keep all your modules and subworkflows up to date.\n" + "It is not guaranteed that a subworkflow will continue working as expected if all modules/subworkflows used in it are not up to date.\n" + ) + if self.update_deps: + recursive_update = True + else: + recursive_update = questionary.confirm( + "Would you like to continue adding all modules and subworkflows differences?", + default=True, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + if recursive_update and len(modules_to_update + subworkflows_to_update) > 0: + # Write all the differences of linked components to a diff file + self.update_linked_components( + modules_to_update, subworkflows_to_update, updated, check_diff_exist=False + ) + self.manage_changes_in_linked_components(component, modules_to_update, subworkflows_to_update) + + elif self.show_diff: + ModulesDiffer.print_diff( + component, + modules_repo.repo_path, + component_dir, + component_install_dir, + current_version, + version, + dsp_from_dir=component_dir, + dsp_to_dir=component_dir, + ) + + # Ask the user if they want to install the component + dry_run = not questionary.confirm( + f"Update {self.component_type[:-1]} '{component}'?", + default=False, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + + if not dry_run: + # Clear the component directory and move the installed files there + self.move_files_from_tmp_dir(component, install_tmp_dir, modules_repo.repo_path, version) + # Update modules.json with newly installed component + self.modules_json.update(self.component_type, modules_repo, component, version, installed_by=None) + updated.append(component) + recursive_update = True + modules_to_update, subworkflows_to_update = self.get_components_to_update(component) + if not silent and not self.update_all and len(modules_to_update + subworkflows_to_update) > 0: + log.warning( + f"All modules and subworkflows linked to the updated {self.component_type[:-1]} will be {'asked for update' if self.show_diff else 'automatically updated'}.\n" + "It is advised to keep all your modules and subworkflows up to date.\n" + "It is not guaranteed that a subworkflow will continue working as expected if all modules/subworkflows used in it are not up to date.\n" + ) + if self.update_deps: + recursive_update = True + else: + recursive_update = questionary.confirm( + "Would you like to continue updating all modules and subworkflows?", + default=True, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + if recursive_update and len(modules_to_update + subworkflows_to_update) > 0: + # Update linked components + self.update_linked_components(modules_to_update, subworkflows_to_update, updated) + self.manage_changes_in_linked_components(component, modules_to_update, subworkflows_to_update) + else: + # Don't save to a file, just iteratively update the variable + self.modules_json.update( + self.component_type, + modules_repo, + component, + version, + installed_by=None, + write_file=False, + ) + + if self.save_diff_fn: + # Write the modules.json diff to the file + ModulesDiffer.append_modules_json_diff( + self.save_diff_fn, + old_modules_json, + self.modules_json.get_modules_json(), + Path(self.dir, "modules.json"), + ) + if exit_value and not silent: + log.info( + f"[bold magenta italic] TIP! [/] If you are happy with the changes in '{self.save_diff_fn}', you " + "can apply them by running the command :point_right:" + f" [bold magenta italic]git apply {self.save_diff_fn} [/]" + ) + elif not all_patches_successful and not silent: + log.info(f"Updates complete. Please apply failed patch{plural_es(components_info)} manually.") + elif not silent: + log.info("Updates complete :sparkles:") + + return exit_value + + def get_single_component_info(self, component): + """Collects the modules repository, version and sha for a component. + + Information about the component version in the '.nf-core.yml' overrides + the '--sha' option + + Args: + component (str): The name of the module/subworkflow to get info for. + + Returns: + (ModulesRepo, str, str): The modules repo containing the component, + the component name, and the component version. + + Raises: + LookupError: If the component is not found either in the pipeline or the modules repo. + UserWarning: If the '.nf-core.yml' entry is not valid. + """ + # Check if there are any modules/subworkflows installed from the repo + repo_url = self.modules_repo.remote_url + components = self.modules_json.get_all_components(self.component_type).get(repo_url) + if components is None: + raise LookupError(f"No {self.component_type} installed from '{repo_url}'") + + choices = [ + component if directory == self.modules_repo.repo_path else f"{directory}/{component}" + for directory, component in components + ] + + if component is None: + component = questionary.autocomplete( + f"{self.component_type[:-1].title()} name:", + choices=sorted(choices), + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + + # Get component installation directory + try: + install_dir = [dir for dir, m in components if component == m][0] + except IndexError: + raise UserWarning(f"{self.component_type[:-1].title()} '{component}' not found in 'modules.json'.") + + # Check if component is installed before trying to update + if component not in choices: + raise LookupError( + f"{self.component_type[:-1].title()} '{component}' is not installed in pipeline and could therefore not be updated" + ) + + # Check that the supplied name is an available module/subworkflow + if component and component not in self.modules_repo.get_avail_components(self.component_type, commit=self.sha): + raise LookupError( + f"{self.component_type[:-1].title()} '{component}' not found in list of available {self.component_type}." + f"Use the command 'nf-core {self.component_type} list remote' to view available software" + ) + + sha = self.sha + config_entry = None + if any( + [ + entry.count("/") == 1 + and (entry.endswith("modules") or entry.endswith("subworkflows")) + and not (entry.endswith(".git") or entry.endswith(".git/")) + for entry in self.update_config.keys() + ] + ): + raise UserWarning( + "Your '.nf-core.yml' file format is outdated. " + "The format should be of the form:\n" + "update:\n :\n :\n :" + ) + if isinstance(self.update_config.get(self.modules_repo.remote_url, {}), str): + # If the repo entry is a string, it's the sha to update to + config_entry = self.update_config.get(self.modules_repo.remote_url, {}) + elif component in self.update_config.get(self.modules_repo.remote_url, {}).get(install_dir, {}): + # If the component to update is in .nf-core.yml config file + config_entry = self.update_config[self.modules_repo.remote_url][install_dir].get(component) + if config_entry is not None and config_entry is not True: + if config_entry is False: + log.warn( + f"{self.component_type[:-1].title()}'s update entry in '.nf-core.yml' for '{component}' is set to False" + ) + return (self.modules_repo, None, None, None) + if not isinstance(config_entry, str): + raise UserWarning( + f"{self.component_type[:-1].title()}'s update entry in '.nf-core.yml' for '{component}' is of wrong type" + ) + + sha = config_entry + if self.sha is not None: + log.warning( + f"Found entry in '.nf-core.yml' for {self.component_type[:-1]} '{component}' " + "which will override version specified with '--sha'" + ) + else: + log.info(f"Found entry in '.nf-core.yml' for {self.component_type[:-1]} '{component}'") + log.info(f"Updating component to ({sha})") + + # Check if the update branch is the same as the installation branch + current_branch = self.modules_json.get_component_branch( + self.component_type, component, self.modules_repo.remote_url, install_dir + ) + new_branch = self.modules_repo.branch + if current_branch != new_branch: + log.warning( + f"You are trying to update the '{Path(install_dir, component)}' {self.component_type[:-1]} from " + f"the '{new_branch}' branch. This {self.component_type[:-1]} was installed from the '{current_branch}'" + ) + switch = questionary.confirm(f"Do you want to update using the '{current_branch}' instead?").unsafe_ask() + if switch: + # Change the branch + self.modules_repo.setup_branch(current_branch) + + # If there is a patch file, get its filename + patch_fn = self.modules_json.get_patch_fn(component, self.modules_repo.remote_url, install_dir) + + return (self.modules_repo, component, sha, patch_fn) + + def get_all_components_info(self, branch=None): + """Collects the modules repository, version and sha for all modules/subworkflows. + + Information about the module/subworkflow version in the '.nf-core.yml' overrides the '--sha' option. + + Returns: + [(ModulesRepo, str, str)]: A list of tuples containing a ModulesRepo object, + the component name, and the component version. + """ + if branch is not None: + use_branch = questionary.confirm( + f"'--branch' was specified. Should this branch be used to update all {self.component_type}?", + default=False, + ) + if not use_branch: + branch = None + skipped_repos = [] + skipped_components = [] + overridden_repos = [] + overridden_components = [] + components_info = {} + # Loop through all the modules/subworkflows in the pipeline + # and check if they have an entry in the '.nf-core.yml' file + for repo_name, components in self.modules_json.get_all_components(self.component_type).items(): + if repo_name not in self.update_config or self.update_config[repo_name] is True: + # There aren't restrictions for the repository in .nf-core.yml file + components_info[repo_name] = {} + for component_dir, component in components: + try: + components_info[repo_name][component_dir].append( + ( + component, + self.sha, + self.modules_json.get_component_branch( + self.component_type, component, repo_name, component_dir + ), + ) + ) + except KeyError: + components_info[repo_name][component_dir] = [ + ( + component, + self.sha, + self.modules_json.get_component_branch( + self.component_type, component, repo_name, component_dir + ), + ) + ] + elif isinstance(self.update_config[repo_name], dict): + # If it is a dict, then there are entries for individual components or component directories + for component_dir in set([dir for dir, _ in components]): + if isinstance(self.update_config[repo_name][component_dir], str): + # If a string is given it is the commit SHA to which we should update to + custom_sha = self.update_config[repo_name][component_dir] + components_info[repo_name] = {} + for dir, component in components: + if component_dir == dir: + try: + components_info[repo_name][component_dir].append( + ( + component, + custom_sha, + self.modules_json.get_component_branch( + self.component_type, component, repo_name, component_dir + ), + ) + ) + except KeyError: + components_info[repo_name][component_dir] = [ + ( + component, + custom_sha, + self.modules_json.get_component_branch( + self.component_type, component, repo_name, component_dir + ), + ) + ] + if self.sha is not None: + overridden_repos.append(repo_name) + elif self.update_config[repo_name][component_dir] is False: + for dir, component in components: + if dir == component_dir: + skipped_components.append(f"{component_dir}/{components}") + elif isinstance(self.update_config[repo_name][component_dir], dict): + # If it's a dict, there are entries for individual components + dir_config = self.update_config[repo_name][component_dir] + components_info[repo_name] = {} + for component_dir, component in components: + if component not in dir_config or dir_config[component] is True: + try: + components_info[repo_name][component_dir].append( + ( + component, + self.sha, + self.modules_json.get_component_branch( + self.component_type, component, repo_name, component_dir + ), + ) + ) + except KeyError: + components_info[repo_name][component_dir] = [ + ( + component, + self.sha, + self.modules_json.get_component_branch( + self.component_type, component, repo_name, component_dir + ), + ) + ] + elif isinstance(dir_config[component], str): + # If a string is given it is the commit SHA to which we should update to + custom_sha = dir_config[component] + try: + components_info[repo_name][component_dir].append( + ( + component, + custom_sha, + self.modules_json.get_component_branch( + self.component_type, component, repo_name, component_dir + ), + ) + ) + except KeyError: + components_info[repo_name][component_dir] = [ + ( + component, + custom_sha, + self.modules_json.get_component_branch( + self.component_type, component, repo_name, component_dir + ), + ) + ] + if self.sha is not None: + overridden_components.append(component) + elif dir_config[component] is False: + # Otherwise the entry must be 'False' and we should ignore the component + skipped_components.append(f"{component_dir}/{component}") + else: + raise UserWarning( + f"{self.component_type[:-1].title()} '{component}' in '{component_dir}' has an invalid entry in '.nf-core.yml'" + ) + elif isinstance(self.update_config[repo_name], str): + # If a string is given it is the commit SHA to which we should update to + custom_sha = self.update_config[repo_name] + components_info[repo_name] = {} + for component_dir, component in components: + try: + components_info[repo_name][component_dir].append( + ( + component, + custom_sha, + self.modules_json.get_component_branch( + self.component_type, component, repo_name, component_dir + ), + ) + ) + except KeyError: + components_info[repo_name][component_dir] = [ + ( + component, + custom_sha, + self.modules_json.get_component_branch( + self.component_type, component, repo_name, component_dir + ), + ) + ] + if self.sha is not None: + overridden_repos.append(repo_name) + elif self.update_config[repo_name] is False: + skipped_repos.append(repo_name) + else: + raise UserWarning(f"Repo '{repo_name}' has an invalid entry in '.nf-core.yml'") + + if skipped_repos: + skipped_str = "', '".join(skipped_repos) + log.info(f"Skipping {self.component_type} in repositor{plural_y(skipped_repos)}: '{skipped_str}'") + + if skipped_components: + skipped_str = "', '".join(skipped_components) + log.info(f"Skipping {self.component_type[:-1]}{plural_s(skipped_components)}: '{skipped_str}'") + + if overridden_repos: + overridden_str = "', '".join(overridden_repos) + log.info( + f"Overriding '--sha' flag for {self.component_type} in repositor{plural_y(overridden_repos)} " + f"with '.nf-core.yml' entry: '{overridden_str}'" + ) + + if overridden_components: + overridden_str = "', '".join(overridden_components) + log.info( + f"Overriding '--sha' flag for {self.component_type[:-1]}{plural_s(overridden_components)} with " + f"'.nf-core.yml' entry: '{overridden_str}'" + ) + # Loop through components_info and create on ModulesRepo object per remote and branch + repos_and_branches = {} + for repo_name, repo_content in components_info.items(): + for component_dir, comps in repo_content.items(): + for comp, sha, comp_branch in comps: + if branch is not None: + comp_branch = branch + if (repo_name, comp_branch) not in repos_and_branches: + repos_and_branches[(repo_name, comp_branch)] = [] + repos_and_branches[(repo_name, comp_branch)].append((comp, sha)) + + # Create ModulesRepo objects + repo_objs_comps = [] + for (repo_url, branch), comps_shas in repos_and_branches.items(): + try: + modules_repo = ModulesRepo(remote_url=repo_url, branch=branch) + except LookupError as e: + log.warning(e) + log.info(f"Skipping {self.component_type} in '{repo_url}'") + else: + repo_objs_comps.append((modules_repo, comps_shas)) + + # Flatten the list + components_info = [(repo, comp, sha) for repo, comps_shas in repo_objs_comps for comp, sha in comps_shas] + + # Verify that that all components and shas exist in their respective ModulesRepo, + # don't try to update those that don't + i = 0 + while i < len(components_info): + repo, component, sha = components_info[i] + if not repo.component_exists(component, self.component_type): + log.warning( + f"{self.component_type[:-1].title()} '{component}' does not exist in '{repo.remote_url}'. Skipping..." + ) + components_info.pop(i) + elif sha is not None and not repo.sha_exists_on_branch(sha): + log.warning( + f"Git sha '{sha}' does not exists on the '{repo.branch}' of '{repo.remote_url}'. Skipping {self.component_type[:-1]} '{component}'" + ) + components_info.pop(i) + else: + i += 1 + + # Add patch filenames to the components that have them + components_info = [ + (repo, comp, sha, self.modules_json.get_patch_fn(comp, repo.remote_url, repo.repo_path)) + for repo, comp, sha in components_info + ] + + return components_info + + def setup_diff_file(self, check_diff_exist=True): + """Sets up the diff file. + + If the save diff option was chosen interactively, the user is asked to supply a name for the diff file. + + Then creates the file for saving the diff. + """ + if self.save_diff_fn is True: + # From questionary - no filename yet + self.save_diff_fn = questionary.path( + "Enter the filename: ", style=nf_core.utils.nfcore_question_style + ).unsafe_ask() + + self.save_diff_fn = Path(self.save_diff_fn) + + if not check_diff_exist: + # This guarantees that the file exists after calling the function + self.save_diff_fn.touch() + return + # Check if filename already exists (questionary or cli) + while self.save_diff_fn.exists(): + if questionary.confirm(f"'{self.save_diff_fn}' exists. Remove file?").unsafe_ask(): + os.remove(self.save_diff_fn) + break + self.save_diff_fn = questionary.path( + "Enter a new filename: ", + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + self.save_diff_fn = Path(self.save_diff_fn) + + # This guarantees that the file exists after calling the function + self.save_diff_fn.touch() + + def move_files_from_tmp_dir(self, component, install_folder, repo_path, new_version): + """Move the files from the temporary to the installation directory. + + Args: + component (str): The module/subworkflow name. + install_folder [str]: The path to the temporary installation directory. + repo_path (str): The name of the directory where modules/subworkflows are installed + new_version (str): The version of the module/subworkflow that was installed. + """ + temp_component_dir = os.path.join(install_folder, component) + files = os.listdir(temp_component_dir) + pipeline_path = os.path.join(self.dir, self.component_type, repo_path, component) + + log.debug(f"Removing old version of {self.component_type[:-1]} '{component}'") + self.clear_component_dir(component, pipeline_path) + + os.makedirs(pipeline_path) + for file in files: + path = os.path.join(temp_component_dir, file) + if os.path.exists(path): + shutil.move(path, os.path.join(pipeline_path, file)) + + log.info(f"Updating '{repo_path}/{component}'") + log.debug(f"Updating {self.component_type[:-1]} '{component}' to {new_version} from {repo_path}") + + def try_apply_patch( + self, component, repo_path, patch_relpath, component_dir, component_install_dir, write_file=True + ): + """ + Try applying a patch file to the new module/subworkflow files + + + Args: + component (str): The name of the module/subworkflow + repo_path (str): The name of the repository where the module/subworkflow resides + patch_relpath (Path | str): The path to patch file in the pipeline + component_dir (Path | str): The module/subworkflow directory in the pipeline + component_install_dir (Path | str): The directory where the new component + file have been installed + + Returns: + (bool): Whether the patch application was successful + """ + component_fullname = str(Path(repo_path, component)) + log.info(f"Found patch for {self.component_type[:-1]} '{component_fullname}'. Trying to apply it to new files") + + patch_path = Path(self.dir / patch_relpath) + component_relpath = Path(self.component_type, repo_path, component) + + # Check that paths in patch file are updated + self.check_patch_paths(patch_path, component) + + # Copy the installed files to a new temporary directory to save them for later use + temp_dir = Path(tempfile.mkdtemp()) + temp_component_dir = temp_dir / component + shutil.copytree(component_install_dir, temp_component_dir) + + try: + new_files = ModulesDiffer.try_apply_patch(component, repo_path, patch_path, temp_component_dir) + except LookupError: + # Patch failed. Save the patch file by moving to the install dir + shutil.move(patch_path, Path(component_install_dir, patch_path.relative_to(component_dir))) + log.warning( + f"Failed to apply patch for {self.component_type[:-1]} '{component_fullname}'. You will have to apply the patch manually" + ) + return False + + # Write the patched files to a temporary directory + log.debug("Writing patched files") + for file, new_content in new_files.items(): + fn = temp_component_dir / file + with open(fn, "w") as fh: + fh.writelines(new_content) + + # Create the new patch file + log.debug("Regenerating patch file") + ModulesDiffer.write_diff_file( + Path(temp_component_dir, patch_path.relative_to(component_dir)), + component, + repo_path, + component_install_dir, + temp_component_dir, + file_action="w", + for_git=False, + dsp_from_dir=component_relpath, + dsp_to_dir=component_relpath, + ) + + # Move the patched files to the install dir + log.debug("Overwriting installed files installed files with patched files") + shutil.rmtree(component_install_dir) + shutil.copytree(temp_component_dir, component_install_dir) + + # Add the patch file to the modules.json file + self.modules_json.add_patch_entry( + component, self.modules_repo.remote_url, repo_path, patch_relpath, write_file=write_file + ) + + return True + + def get_components_to_update(self, component): + """ + Get all modules and subworkflows linked to the updated component. + + Returns: + (list,list): A tuple of lists with the modules and subworkflows to update + """ + mods_json = self.modules_json.get_modules_json() + modules_to_update = [] + subworkflows_to_update = [] + installed_by = self.modules_json.get_installed_by_entries(self.component_type, component) + + if self.component_type == "modules": + # All subworkflow names in the installed_by section of a module are subworkflows using this module + # We need to update them too + subworkflows_to_update = [subworkflow for subworkflow in installed_by if subworkflow != self.component_type] + elif self.component_type == "subworkflows": + for repo, repo_content in mods_json["repos"].items(): + for component_type, dir_content in repo_content.items(): + for dir, components in dir_content.items(): + for comp, comp_content in components.items(): + # If the updated subworkflow name appears in the installed_by section of the checked component + # The checked component is used by the updated subworkflow + # We need to update it too + if component in comp_content["installed_by"]: + if component_type == "modules": + modules_to_update.append(comp) + elif component_type == "subworkflows": + subworkflows_to_update.append(comp) + + return modules_to_update, subworkflows_to_update + + def update_linked_components(self, modules_to_update, subworkflows_to_update, updated=None, check_diff_exist=True): + """ + Update modules and subworkflows linked to the component being updated. + """ + for s_update in subworkflows_to_update: + if s_update in updated: + continue + original_component_type, original_update_all = self._change_component_type("subworkflows") + self.update(s_update, silent=True, updated=updated, check_diff_exist=check_diff_exist) + self._reset_component_type(original_component_type, original_update_all) + + for m_update in modules_to_update: + if m_update in updated: + continue + original_component_type, original_update_all = self._change_component_type("modules") + try: + self.update(m_update, silent=True, updated=updated, check_diff_exist=check_diff_exist) + except LookupError as e: + # If the module to be updated is not available, check if there has been a name change + if "not found in list of available" in str(e): + # Skip update, we check for name changes with manage_changes_in_linked_components + pass + else: + raise + finally: + self._reset_component_type(original_component_type, original_update_all) + + def manage_changes_in_linked_components(self, component, modules_to_update, subworkflows_to_update): + """Check for linked components added or removed in the new subworkflow version""" + if self.component_type == "subworkflows": + subworkflow_directory = Path(self.dir, self.component_type, self.modules_repo.repo_path, component) + included_modules, included_subworkflows = get_components_to_install(subworkflow_directory) + # If a module/subworkflow has been removed from the subworkflow + for module in modules_to_update: + if module not in included_modules: + log.info(f"Removing module '{module}' which is not included in '{component}' anymore.") + remove_module_object = ComponentRemove("modules", self.dir) + remove_module_object.remove(module, removed_by=component) + for subworkflow in subworkflows_to_update: + if subworkflow not in included_subworkflows: + log.info(f"Removing subworkflow '{subworkflow}' which is not included in '{component}' anymore.") + remove_subworkflow_object = ComponentRemove("subworkflows", self.dir) + remove_subworkflow_object.remove(subworkflow, removed_by=component) + # If a new module/subworkflow is included in the subworklfow and wasn't included before + for module in included_modules: + if module not in modules_to_update: + log.info(f"Installing newly included module '{module}' for '{component}'") + install_module_object = ComponentInstall(self.dir, "modules", installed_by=component) + install_module_object.install(module, silent=True) + for subworkflow in included_subworkflows: + if subworkflow not in subworkflows_to_update: + log.info(f"Installing newly included subworkflow '{subworkflow}' for '{component}'") + install_subworkflow_object = ComponentInstall(self.dir, "subworkflows", installed_by=component) + install_subworkflow_object.install(subworkflow, silent=True) + + def _change_component_type(self, new_component_type): + original_component_type = self.component_type + self.component_type = new_component_type + self.modules_json.pipeline_components = None + # also reset update_all in case it's set + original_update_all = self.update_all + self.update_all = False + return original_component_type, original_update_all + + def _reset_component_type(self, original_component_type, original_update_all): + self.component_type = original_component_type + self.modules_json.pipeline_components = None + self.update_all = original_update_all diff --git a/nf_core/create.py b/nf_core/create.py index 8e58306cd8..74c9df1b87 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -1,19 +1,17 @@ -#!/usr/bin/env python """Creates a nf-core pipeline matching the current organization's specification based on a template. """ import configparser -import imghdr import logging import os -import pathlib import random import re import shutil -import subprocess import sys import time +from pathlib import Path +import filetype import git import jinja2 import questionary @@ -23,11 +21,12 @@ import nf_core import nf_core.schema import nf_core.utils +from nf_core.lint_utils import run_prettier_on_file log = logging.getLogger(__name__) -class PipelineCreate(object): +class PipelineCreate: """Creates a nf-core pipeline a la carte from the nf-core best-practice template. Args: @@ -39,6 +38,9 @@ class PipelineCreate(object): force (bool): Overwrites a given workflow directory with the same name. Defaults to False. May the force be with you. outdir (str): Path to the local output directory. + template_yaml_path (str): Path to template.yml file for pipeline creation settings. + plain (bool): If true the Git repository will be initialized plain. + default_branch (str): Specifies the --initial-branch name. """ def __init__( @@ -52,12 +54,17 @@ def __init__( outdir=None, template_yaml_path=None, plain=False, + default_branch=None, ): self.template_params, skip_paths_keys = self.create_param_dict( name, description, author, version, template_yaml_path, plain ) skippable_paths = { + "github": [ + ".github/", + ".gitignore", + ], "ci": [".github/workflows/"], "igenomes": ["conf/igenomes.config"], "branded": [ @@ -74,11 +81,14 @@ def __init__( self.name = self.template_params["name"] # Set fields used by the class methods - self.no_git = no_git + self.no_git = ( + no_git if self.template_params["github"] else True + ) # Set to True if template was configured without github hosting + self.default_branch = default_branch self.force = force if outdir is None: outdir = os.path.join(os.getcwd(), self.template_params["name_noslash"]) - self.outdir = outdir + self.outdir = Path(outdir) def create_param_dict(self, name, description, author, version, template_yaml_path, plain): """Creates a dictionary of parameters for the new pipeline. @@ -110,6 +120,7 @@ def create_param_dict(self, name, description, author, version, template_yaml_pa # Define the different template areas, and what actions to take for each # if they are skipped template_areas = { + "github": {"name": "GitHub hosting", "file": True, "content": False}, "ci": {"name": "GitHub CI", "file": True, "content": False}, "github_badges": {"name": "GitHub badges", "file": False, "content": True}, "igenomes": {"name": "iGenomes config", "file": True, "content": True}, @@ -139,6 +150,9 @@ def create_param_dict(self, name, description, author, version, template_yaml_pa param_dict[t_area] = False else: param_dict[t_area] = True + # If github is selected, exclude also github_badges + if not param_dict["github"]: + param_dict["github_badges"] = False # Set the last parameters based on the ones provided param_dict["short_name"] = ( @@ -152,6 +166,18 @@ def create_param_dict(self, name, description, author, version, template_yaml_pa param_dict["logo_dark"] = f"{param_dict['name_noslash']}_logo_dark.png" param_dict["version"] = version + _, config_yml = nf_core.utils.load_tools_config() + if ( + "lint" in config_yml + and "nextflow_config" in config_yml["lint"] + and "manifest.name" in config_yml["lint"]["nextflow_config"] + ): + return param_dict, skip_paths + if param_dict["prefix"] == "nf-core": + # Check that the pipeline name matches the requirements + if not re.match(r"^[a-z]+$", param_dict["short_name"]): + raise UserWarning("[red]Invalid workflow name: must be lowercase without punctuation.") + return param_dict, skip_paths def customize_template(self, template_areas): @@ -216,9 +242,10 @@ def init_pipeline(self): if self.template_params["branded"]: log.info( "[green bold]!!!!!! IMPORTANT !!!!!!\n\n" - + "[green not bold]If you are interested in adding your pipeline to the nf-core community,\n" - + "PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE!\n\n" - + "[default]Please read: [link=https://nf-co.re/developers/adding_pipelines#join-the-community]https://nf-co.re/developers/adding_pipelines#join-the-community[/link]" + "[green not bold]If you are interested in adding your pipeline to the nf-core community,\n" + "PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE!\n\n" + "[default]Please read: [link=https://nf-co.re/developers/adding_pipelines#join-the-community]" + "https://nf-co.re/developers/adding_pipelines#join-the-community[/link]" ) def render_template(self): @@ -226,7 +253,7 @@ def render_template(self): log.info(f"Creating new nf-core pipeline: '{self.name}'") # Check if the output directory exists - if os.path.exists(self.outdir): + if self.outdir.exists(): if self.force: log.warning(f"Output directory '{self.outdir}' exists - continuing as --force specified") else: @@ -245,17 +272,17 @@ def render_template(self): object_attrs["nf_core_version"] = nf_core.__version__ # Can't use glob.glob() as need recursive hidden dotfiles - https://stackoverflow.com/a/58126417/713980 - template_files = list(pathlib.Path(template_dir).glob("**/*")) - template_files += list(pathlib.Path(template_dir).glob("*")) + template_files = list(Path(template_dir).glob("**/*")) + template_files += list(Path(template_dir).glob("*")) ignore_strs = [".pyc", "__pycache__", ".pyo", ".pyd", ".DS_Store", ".egg"] + short_name = self.template_params["short_name"] rename_files = { - "workflows/pipeline.nf": f"workflows/{self.template_params['short_name']}.nf", - "lib/WorkflowPipeline.groovy": f"lib/Workflow{self.template_params['short_name'][0].upper()}{self.template_params['short_name'][1:]}.groovy", + "workflows/pipeline.nf": f"workflows/{short_name}.nf", + "lib/WorkflowPipeline.groovy": f"lib/Workflow{short_name[0].upper()}{short_name[1:]}.groovy", } # Set the paths to skip according to customization for template_fn_path_obj in template_files: - template_fn_path = str(template_fn_path_obj) # Skip files that are in the self.skip_paths list @@ -271,9 +298,9 @@ def render_template(self): # Set up vars and directories template_fn = os.path.relpath(template_fn_path, template_dir) - output_path = os.path.join(self.outdir, template_fn) + output_path = self.outdir / template_fn if template_fn in rename_files: - output_path = os.path.join(self.outdir, rename_files[template_fn]) + output_path = self.outdir / rename_files[template_fn] os.makedirs(os.path.dirname(output_path), exist_ok=True) try: @@ -313,9 +340,10 @@ def render_template(self): # Make a logo and save it, if it is a nf-core pipeline self.make_pipeline_logo() else: - # Remove field mentioning nf-core docs - # in the github bug report template - self.remove_nf_core_in_bug_report_template() + if self.template_params["github"]: + # Remove field mentioning nf-core docs + # in the github bug report template + self.remove_nf_core_in_bug_report_template() # Update the .nf-core.yml with linting configurations self.fix_linting() @@ -324,7 +352,7 @@ def update_nextflow_schema(self): """ Removes unused parameters from the nextflow schema. """ - schema_path = os.path.join(self.outdir, "nextflow_schema.json") + schema_path = self.outdir / "nextflow_schema.json" schema = nf_core.schema.PipelineSchema() schema.schema_filename = schema_path @@ -333,22 +361,14 @@ def update_nextflow_schema(self): schema.get_wf_params() schema.remove_schema_notfound_configs() schema.save_schema(suppress_logging=True) - - # The schema is not guaranteed to follow Prettier standards - # so we run prettier on the schema file - try: - subprocess.run( - ["prettier", "--write", schema_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=False - ) - except FileNotFoundError: - log.warning("Prettier not found. Please install it and run it on the pipeline to fix linting issues.") + run_prettier_on_file(schema_path) def remove_nf_core_in_bug_report_template(self): """ Remove the field mentioning nf-core documentation in the github bug report template """ - bug_report_path = os.path.join(self.outdir, ".github", "ISSUE_TEMPLATE", "bug_report.yml") + bug_report_path = self.outdir / ".github" / "ISSUE_TEMPLATE" / "bug_report.yml" with open(bug_report_path, "r") as fh: contents = yaml.load(fh, Loader=yaml.FullLoader) @@ -359,17 +379,7 @@ def remove_nf_core_in_bug_report_template(self): with open(bug_report_path, "w") as fh: yaml.dump(contents, fh, default_flow_style=False, sort_keys=False) - # The dumped yaml file will not follow prettier formatting rules - # so we run prettier on the file - try: - subprocess.run( - ["prettier", "--write", bug_report_path], - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - check=False, - ) - except FileNotFoundError: - log.warning("Prettier not found. Please install it and run it on the pipeline to fix linting issues.") + run_prettier_on_file(bug_report_path) def fix_linting(self): """ @@ -395,6 +405,15 @@ def fix_linting(self): "multiqc_config": ["report_comment"], } + # Add GitHub hosting specific configurations + if not self.template_params["github"]: + lint_config["files_exist"].extend( + [ + ".github/ISSUE_TEMPLATE/bug_report.yml", + ] + ) + lint_config["files_unchanged"] = [".github/ISSUE_TEMPLATE/bug_report.yml"] + # Add CI specific configurations if not self.template_params["ci"]: lint_config["files_exist"].extend( @@ -419,28 +438,16 @@ def fix_linting(self): ) # Add github badges specific configurations - if not self.template_params["github_badges"]: + if not self.template_params["github_badges"] or not self.template_params["github"]: lint_config["readme"] = ["nextflow_badge"] # Add the lint content to the preexisting nf-core config - nf_core_yml = nf_core.utils.load_tools_config(self.outdir) + config_fn, nf_core_yml = nf_core.utils.load_tools_config(self.outdir) nf_core_yml["lint"] = lint_config - with open(os.path.join(self.outdir, ".nf-core.yml"), "w") as fh: + with open(self.outdir / config_fn, "w") as fh: yaml.dump(nf_core_yml, fh, default_flow_style=False, sort_keys=False) - # The dumped yaml file will not follow prettier formatting rules - # so we run prettier on the file - try: - subprocess.run( - ["prettier", "--write", os.path.join(self.outdir, ".nf-core.yml")], - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - check=False, - ) - except FileNotFoundError: - log.warning( - "Prettier is not installed. Please install it and run it on the pipeline to fix linting issues." - ) + run_prettier_on_file(os.path.join(self.outdir, config_fn)) def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" @@ -448,11 +455,13 @@ def make_pipeline_logo(self): logo_url = f"https://nf-co.re/logo/{self.template_params['short_name']}?theme=light" log.debug(f"Fetching logo from {logo_url}") - email_logo_path = f"{self.outdir}/assets/{self.template_params['name_noslash']}_logo_light.png" + email_logo_path = self.outdir / "assets" / f"{self.template_params['name_noslash']}_logo_light.png" self.download_pipeline_logo(f"{logo_url}&w=400", email_logo_path) for theme in ["dark", "light"]: readme_logo_url = f"{logo_url}?w=600&theme={theme}" - readme_logo_path = f"{self.outdir}/docs/images/{self.template_params['name_noslash']}_logo_{theme}.png" + readme_logo_path = ( + self.outdir / "docs" / "images" / f"{self.template_params['name_noslash']}_logo_{theme}.png" + ) self.download_pipeline_logo(readme_logo_url, readme_logo_path) def download_pipeline_logo(self, url, img_fn): @@ -488,7 +497,7 @@ def download_pipeline_logo(self, url, img_fn): with open(img_fn, "wb") as fh: fh.write(r.content) # Check that the file looks valid - image_type = imghdr.what(img_fn) + image_type = filetype.guess(img_fn).extension if image_type != "png": log.error(f"Logo from the website didn't look like an image: '{image_type}'") continue @@ -502,24 +511,27 @@ def git_init_pipeline(self): Raises: UserWarning: if Git default branch is set to 'dev' or 'TEMPLATE'. """ - # Check that the default branch is not dev + default_branch = self.default_branch try: - default_branch = git.config.GitConfigParser().get_value("init", "defaultBranch") + default_branch = default_branch or git.config.GitConfigParser().get_value("init", "defaultBranch") except configparser.Error: - default_branch = None log.debug("Could not read init.defaultBranch") - if default_branch == "dev" or default_branch == "TEMPLATE": + if default_branch in ["dev", "TEMPLATE"]: raise UserWarning( - f"Your Git defaultBranch is set to '{default_branch}', which is incompatible with nf-core.\n" - "This can be modified with the command [white on grey23] git config --global init.defaultBranch [/]\n" - "Pipeline git repository is not initialised." + f"Your Git defaultBranch '{default_branch}' is incompatible with nf-core.\n" + "'dev' and 'TEMPLATE' can not be used as default branch name.\n" + "Set the default branch name with " + "[white on grey23] git config --global init.defaultBranch [/]\n" + "Or set the default_branch parameter in this class.\n" + "Pipeline git repository will not be initialised." ) - # Initialise pipeline + log.info("Initialising pipeline git repository") repo = git.Repo.init(self.outdir) repo.git.add(A=True) repo.index.commit(f"initial template build from nf-core/tools, version {nf_core.__version__}") - # Add TEMPLATE branch to git repository + if default_branch: + repo.active_branch.rename(default_branch) repo.git.branch("TEMPLATE") repo.git.branch("dev") log.info( diff --git a/nf_core/download.py b/nf_core/download.py index e9a193b2a0..9d430e3352 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """Downloads a nf-core pipeline to the local file system.""" from __future__ import print_function @@ -12,6 +11,7 @@ import subprocess import sys import tarfile +import textwrap from zipfile import ZipFile import questionary @@ -64,7 +64,7 @@ def get_renderables(self): yield self.make_tasks_table([task]) -class DownloadWorkflow(object): +class DownloadWorkflow: """Downloads a nf-core workflow from GitHub to the local file system. Can also download its Singularity container image if required. @@ -453,7 +453,8 @@ def find_container_images(self): for subdir, _, files in os.walk(os.path.join(self.outdir, "workflow", "modules")): for file in files: if file.endswith(".nf"): - with open(os.path.join(subdir, file), "r") as fh: + file_path = os.path.join(subdir, file) + with open(file_path, "r") as fh: # Look for any lines with `container = "xxx"` this_container = None contents = fh.read() @@ -478,7 +479,9 @@ def find_container_images(self): # Don't recognise this, throw a warning else: - log.error(f"[red]Cannot parse container string, skipping: [green]'{file}'") + log.error( + f"[red]Cannot parse container string in '{file_path}':\n\n{textwrap.indent(match, ' ')}\n\n:warning: Skipping this singularity image.." + ) if this_container: containers_raw.append(this_container) @@ -503,7 +506,6 @@ def get_singularity_images(self): containers_download = [] containers_pull = [] for container in self.containers: - # Fetch the output and cached filenames for this container out_path, cache_path = self.singularity_image_filenames(container) diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index 7fbecc5e02..fa435c3612 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -2,6 +2,19 @@ FROM gitpod/workspace-base USER root +# Install util tools. +RUN apt-get update --quiet && \ + apt-get install --quiet --yes \ + apt-transport-https \ + apt-utils \ + sudo \ + git \ + less \ + wget \ + curl \ + tree \ + graphviz + # Install Conda RUN wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh && \ bash Miniconda3-latest-Linux-x86_64.sh -b -p /opt/conda && \ @@ -18,21 +31,20 @@ RUN chown -R gitpod:gitpod /opt/conda /usr/src/nf_core # Change user to gitpod USER gitpod - # Install nextflow, nf-core, Mamba, and pytest-workflow -RUN conda update -n base -c defaults conda && \ - conda config --add channels defaults && \ +RUN conda config --add channels defaults && \ conda config --add channels bioconda && \ conda config --add channels conda-forge && \ - conda install \ - openjdk=11.0.15 \ - nextflow=22.04.0 \ - pytest-workflow=1.6.0 \ - mamba=0.24.0 \ - pip=22.1.2 \ - black=22.6.0 \ - -n base && \ - conda clean --all -f -y + conda config --set channel_priority strict && \ + conda install --quiet --yes --name base mamba && \ + mamba install --quiet --yes --name base \ + nextflow \ + nf-core \ + nf-test \ + black \ + prettier \ + pytest-workflow && \ + mamba clean --all -f -y # Install nf-core RUN python -m pip install . diff --git a/nf_core/launch.py b/nf_core/launch.py index d57d9f112f..648c8775f8 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Launch a pipeline, interactively collecting params """ from __future__ import print_function @@ -18,11 +17,12 @@ import nf_core.schema import nf_core.utils +from nf_core.lint_utils import dump_json_with_prettier log = logging.getLogger(__name__) -class Launch(object): +class Launch: """Class to hold config option to launch a pipeline""" def __init__( @@ -98,7 +98,6 @@ def __init__( self.cli_launch = True def launch_pipeline(self): - # Prompt for pipeline if not supplied and no web launch ID if self.pipeline is None and self.web_id is None: launch_type = questionary.select( @@ -120,12 +119,19 @@ def launch_pipeline(self): # Check if the output file exists already if os.path.exists(self.params_out): - log.warning(f"Parameter output file already exists! {os.path.relpath(self.params_out)}") + # if params_in has the same name as params_out, don't ask to overwrite + if self.params_in and os.path.abspath(self.params_in) == os.path.abspath(self.params_out): + log.warning( + f"The parameter input file has the same name as the output file! {os.path.relpath(self.params_out)} will be overwritten." + ) + else: + log.warning(f"Parameter output file already exists! {os.path.relpath(self.params_out)}") if Confirm.ask("[yellow]Do you want to overwrite this file?"): - os.remove(self.params_out) - log.info(f"Deleted {self.params_out}\n") + if not (self.params_in and os.path.abspath(self.params_in) == os.path.abspath(self.params_out)): + os.remove(self.params_out) + log.info(f"Deleted {self.params_out}\n") else: - log.info("Exiting. Use --params-out to specify a custom filename.") + log.info("Exiting. Use --params-out to specify a custom output filename.") return False log.info( @@ -454,7 +460,6 @@ def prompt_group(self, group_id, group_obj): answers = {} error_msgs = [] while not while_break: - if len(error_msgs) == 0: self.print_param_header(group_id, group_obj, True) @@ -691,12 +696,9 @@ def build_command(self): # Pipeline parameters if len(self.schema_obj.input_params) > 0: - # Write the user selection to a file and run nextflow with that if self.use_params_file: - with open(self.params_out, "w") as fp: - json.dump(self.schema_obj.input_params, fp, indent=4) - fp.write("\n") + dump_json_with_prettier(self.params_out, self.schema_obj.input_params) self.nextflow_cmd += f' -params-file "{os.path.relpath(self.params_out)}"' # Call nextflow with a list of command line flags @@ -716,6 +718,6 @@ def launch_workflow(self): """Launch nextflow if required""" log.info(f"[bold underline]Nextflow command:[/]\n[magenta]{self.nextflow_cmd}\n\n") - if Confirm.ask("Do you want to run this command now? "): + if Confirm.ask("Do you want to run this command now? ", default=True): log.info("Launching workflow! :rocket:") subprocess.call(self.nextflow_cmd, shell=True) diff --git a/nf_core/licences.py b/nf_core/licences.py index 2e65462a1d..d686a56178 100644 --- a/nf_core/licences.py +++ b/nf_core/licences.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """Lists software licences for a given workflow.""" from __future__ import print_function @@ -17,7 +16,7 @@ log = logging.getLogger(__name__) -class WorkflowLicences(object): +class WorkflowLicences: """A nf-core workflow licenses collection. Tries to retrieve the license information from all dependencies diff --git a/nf_core/lint/__init__.py b/nf_core/lint/__init__.py index 7e61de3ac1..e014a933ea 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/lint/__init__.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """Linting policy for nf-core pipeline projects. Tests Nextflow-based pipelines to check that they adhere to @@ -8,7 +7,7 @@ import datetime import json import logging -import re +import os import git import rich @@ -38,6 +37,7 @@ def run_linting( show_passed=False, fail_ignored=False, fail_warned=False, + sort_by="test", md_fn=None, json_fn=None, hide_progress=False, @@ -122,9 +122,9 @@ def run_linting( # Print the results lint_obj._print_results(show_passed) - module_lint_obj._print_results(show_passed) + module_lint_obj._print_results(show_passed, sort_by=sort_by) nf_core.lint_utils.print_joint_summary(lint_obj, module_lint_obj) - nf_core.lint_utils.print_fixes(lint_obj, module_lint_obj) + nf_core.lint_utils.print_fixes(lint_obj) # Save results to Markdown file if md_fn is not None: @@ -172,6 +172,7 @@ class PipelineLint(nf_core.utils.Pipeline): from .files_unchanged import files_unchanged from .merge_markers import merge_markers from .modules_json import modules_json + from .modules_structure import modules_structure from .multiqc_config import multiqc_config from .nextflow_config import nextflow_config from .pipeline_name_conventions import pipeline_name_conventions @@ -227,6 +228,7 @@ def _get_all_lint_tests(release_mode): "merge_markers", "modules_json", "multiqc_config", + "modules_structure", ] + (["version_consistency"] if release_mode else []) def _load(self): @@ -245,7 +247,7 @@ def _load_lint_config(self): Add parsed config to the `self.lint_config` class attribute. """ - tools_config = nf_core.utils.load_tools_config(self.wf_path) + _, tools_config = nf_core.utils.load_tools_config(self.wf_path) self.lint_config = tools_config.get("lint", {}) # Check if we have any keys that don't match lint test names @@ -311,7 +313,7 @@ def _lint_pipeline(self): rich.progress.BarColumn(bar_width=None), "[magenta]{task.completed} of {task.total}[reset] » [bold yellow]{task.fields[test_name]}", transient=True, - disable=self.hide_progress, + disable=self.hide_progress or os.environ.get("HIDE_PROGRESS", None) is not None, ) with self.progress_bar: lint_progress = self.progress_bar.add_task( @@ -432,7 +434,6 @@ def format_result(test_results): ) def _print_summary(self): - # Summary table summary_colour = "red" if len(self.failed) > 0 else "green" table = Table(box=rich.box.ROUNDED, style=summary_colour) diff --git a/nf_core/lint/actions_awsfulltest.py b/nf_core/lint/actions_awsfulltest.py index e021ebd384..e8e1c951b1 100644 --- a/nf_core/lint/actions_awsfulltest.py +++ b/nf_core/lint/actions_awsfulltest.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import os import yaml diff --git a/nf_core/lint/actions_awstest.py b/nf_core/lint/actions_awstest.py index 4f27cbd765..ccdf0abf6a 100644 --- a/nf_core/lint/actions_awstest.py +++ b/nf_core/lint/actions_awstest.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import os import yaml diff --git a/nf_core/lint/actions_ci.py b/nf_core/lint/actions_ci.py index 4d6b0e6dfe..9aa18135d9 100644 --- a/nf_core/lint/actions_ci.py +++ b/nf_core/lint/actions_ci.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import os import re @@ -40,35 +38,6 @@ def actions_ci(self): .. note:: These ``matrix`` variables run the test workflow twice, varying the ``nxf_ver`` variable each time. This is used in the ``nextflow run`` commands to test the pipeline with both the latest available version of the pipeline (``''``) and the stated minimum required version. - - * The `Docker` container for the pipeline must use the correct pipeline version number: - - * Development pipelines: - - .. code-block:: bash - - docker tag nfcore/:dev nfcore/:dev - - * Released pipelines: - - .. code-block:: bash - - docker tag nfcore/:dev nfcore/: - - * Complete example for a released pipeline called *nf-core/example* with version number ``1.0.0``: - - .. code-block:: yaml - :emphasize-lines: 3,8,9 - - - name: Build new docker image - if: env.GIT_DIFF - run: docker build --no-cache . -t nfcore/example:1.0.0 - - - name: Pull docker image - if: ${{ !env.GIT_DIFF }} - run: | - docker pull nfcore/example:dev - docker tag nfcore/example:dev nfcore/example:1.0.0 """ passed = [] failed = [] @@ -103,44 +72,6 @@ def actions_ci(self): else: passed.append("'.github/workflows/ci.yml' is triggered on expected events") - # Check that we're pulling the right docker image and tagging it properly - if self.nf_config.get("process.container", ""): - docker_notag = re.sub(r":(?:[\.\d]+|dev)$", "", self.nf_config.get("process.container", "").strip("\"'")) - docker_withtag = self.nf_config.get("process.container", "").strip("\"'") - - # docker build - docker_build_cmd = f"docker build --no-cache . -t {docker_withtag}" - try: - steps = ciwf["jobs"]["test"]["steps"] - if not any(docker_build_cmd in step["run"] for step in steps if "run" in step.keys()): - raise AssertionError() - except (AssertionError, KeyError, TypeError): - failed.append(f"CI is not building the correct docker image. Should be: `{docker_build_cmd}`") - else: - passed.append(f"CI is building the correct docker image: `{docker_build_cmd}`") - - # docker pull - docker_pull_cmd = f"docker pull {docker_notag}:dev" - try: - steps = ciwf["jobs"]["test"]["steps"] - if not any(docker_pull_cmd in step["run"] for step in steps if "run" in step.keys()): - raise AssertionError() - except (AssertionError, KeyError, TypeError): - failed.append(f"CI is not pulling the correct docker image. Should be: `{docker_pull_cmd}`") - else: - passed.append(f"CI is pulling the correct docker image: {docker_pull_cmd}") - - # docker tag - docker_tag_cmd = f"docker tag {docker_notag}:dev {docker_withtag}" - try: - steps = ciwf["jobs"]["test"]["steps"] - if not any(docker_tag_cmd in step["run"] for step in steps if "run" in step.keys()): - raise AssertionError() - except (AssertionError, KeyError, TypeError): - failed.append(f"CI is not tagging docker image correctly. Should be: `{docker_tag_cmd}`") - else: - passed.append(f"CI is tagging docker image correctly: {docker_tag_cmd}") - # Check that we are testing the minimum nextflow version try: nxf_ver = ciwf["jobs"]["test"]["strategy"]["matrix"]["NXF_VER"] diff --git a/nf_core/lint/actions_schema_validation.py b/nf_core/lint/actions_schema_validation.py index 7ded008cfc..9d49b84c6b 100644 --- a/nf_core/lint/actions_schema_validation.py +++ b/nf_core/lint/actions_schema_validation.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import glob import logging import os diff --git a/nf_core/lint/files_exist.py b/nf_core/lint/files_exist.py index a65f28f11d..eb8c04916a 100644 --- a/nf_core/lint/files_exist.py +++ b/nf_core/lint/files_exist.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import logging import os @@ -73,8 +71,9 @@ def files_exist(self): .github/workflows/awstest.yml .github/workflows/awsfulltest.yml lib/WorkflowPIPELINE.groovy + pyproject.toml - Files that *must not* be present: + Files that *must not* be present, due to being renamed or removed in the template: .. code-block:: bash @@ -89,6 +88,9 @@ def files_exist(self): docs/images/nf-core-PIPELINE_logo.png .markdownlint.yml .yamllint.yml + lib/Checks.groovy + lib/Completion.groovy + lib/Workflow.groovy Files that *should not* be present: @@ -174,6 +176,7 @@ def files_exist(self): [os.path.join(".github", "workflows", "awsfulltest.yml")], [os.path.join("lib", f"Workflow{short_name[0].upper()}{short_name[1:]}.groovy")], ["modules.json"], + ["pyproject.toml"], ] # List of strings. Fails / warns if any of the strings exist. @@ -189,6 +192,9 @@ def files_exist(self): os.path.join("docs", "images", f"nf-core-{short_name}_logo.png"), ".markdownlint.yml", ".yamllint.yml", + os.path.join("lib", "Checks.groovy"), + os.path.join("lib", "Completion.groovy"), + os.path.join("lib", "Workflow.groovy"), ] files_warn_ifexists = [".travis.yml"] diff --git a/nf_core/lint/files_unchanged.py b/nf_core/lint/files_unchanged.py index 7c82d9961b..c0be64d0d7 100644 --- a/nf_core/lint/files_unchanged.py +++ b/nf_core/lint/files_unchanged.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import filecmp import logging import os @@ -50,6 +48,7 @@ def files_unchanged(self): .gitignore .prettierignore + pyproject.toml .. tip:: You can configure the ``nf-core lint`` tests to ignore any of these checks by setting the ``files_unchanged`` key as follows in your ``.nf-core.yml`` config file. For example: @@ -110,7 +109,7 @@ def files_unchanged(self): [os.path.join("lib", "NfcoreTemplate.groovy")], ] files_partial = [ - [".gitignore", ".prettierignore"], + [".gitignore", ".prettierignore", "pyproject.toml"], ] # Only show error messages from pipeline creation @@ -148,7 +147,6 @@ def _tf(file_path): # Files that must be completely unchanged from template for files in files_exact: - # Ignore if file specified in linting config ignore_files = self.lint_config.get("files_unchanged", []) if any([f in ignore_files for f in files]): @@ -178,7 +176,6 @@ def _tf(file_path): # Files that can be added to, but that must contain the template contents for files in files_partial: - # Ignore if file specified in linting config ignore_files = self.lint_config.get("files_unchanged", []) if any([f in ignore_files for f in files]): diff --git a/nf_core/lint/merge_markers.py b/nf_core/lint/merge_markers.py index 75fbf931bf..f33a5095d8 100644 --- a/nf_core/lint/merge_markers.py +++ b/nf_core/lint/merge_markers.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import fnmatch import io import logging diff --git a/nf_core/lint/modules_json.py b/nf_core/lint/modules_json.py index 8b3e00b945..dd0a59d558 100644 --- a/nf_core/lint/modules_json.py +++ b/nf_core/lint/modules_json.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - from pathlib import Path from nf_core.modules.modules_json import ModulesJson @@ -28,24 +26,25 @@ def modules_json(self): for repo in modules_json_dict["repos"].keys(): # Check if the modules.json has been updated to keep the - if "modules" not in modules_json_dict["repos"][repo] or "git_url" not in modules_json_dict["repos"][repo]: + if "modules" not in modules_json_dict["repos"][repo] or not repo.startswith("http"): failed.append( "Your `modules.json` file is outdated. " - "Please remove it and reinstall it by running any module command." + "It will be automatically generated by running any module command." ) continue - for module, module_entry in modules_json_dict["repos"][repo]["modules"].items(): - if not Path(modules_dir, repo, module).exists(): - failed.append( - f"Entry for `{Path(repo, module)}` found in `modules.json` but module is not installed in " - "pipeline." - ) - all_modules_passed = False - if module_entry.get("branch") is None: - failed.append(f"Entry for `{Path(repo, module)}` is missing branch information.") - if module_entry.get("git_sha") is None: - failed.append(f"Entry for `{Path(repo, module)}` is missing version information.") + for dir in modules_json_dict["repos"][repo]["modules"].keys(): + for module, module_entry in modules_json_dict["repos"][repo]["modules"][dir].items(): + if not Path(modules_dir, dir, module).exists(): + failed.append( + f"Entry for `{Path(modules_dir, dir, module)}` found in `modules.json` but module is not installed in " + "pipeline." + ) + all_modules_passed = False + if module_entry.get("branch") is None: + failed.append(f"Entry for `{Path(modules_dir, dir, module)}` is missing branch information.") + if module_entry.get("git_sha") is None: + failed.append(f"Entry for `{Path(modules_dir, dir, module)}` is missing version information.") if all_modules_passed: passed.append("Only installed modules found in `modules.json`") else: diff --git a/nf_core/lint/modules_structure.py b/nf_core/lint/modules_structure.py new file mode 100644 index 0000000000..f0e13e0346 --- /dev/null +++ b/nf_core/lint/modules_structure.py @@ -0,0 +1,31 @@ +import logging +import os +from pathlib import Path + +log = logging.getLogger(__name__) + + +def modules_structure(self): + """ + Check that the structure of the modules directory in a pipeline is the correct one: + modules/nf-core/TOOL/SUBTOOL + + Prior to nf-core/tools release 2.6 the directory structure had an additional level of nesting: + modules/nf-core/modules/TOOL/SUBTOOL + """ + wrong_location_modules = [] + for directory, _, files in os.walk(Path(self.wf_path, "modules")): + if "main.nf" in files: + module_path = Path(directory).relative_to(Path(self.wf_path, "modules")) + parts = module_path.parts + # Check that there are modules installed directly under the 'modules' directory + if parts[1] == "modules": + wrong_location_modules.append(module_path) + # If there are modules installed in the wrong location + failed = [] + passed = [] + if len(wrong_location_modules) > 0: + failed = ["modules directory structure is outdated. Should be 'modules/nf-core/TOOL/SUBTOOL'"] + else: + passed = ["modules directory structure is correct 'modules/nf-core/TOOL/SUBTOOL'"] + return {"passed": passed, "warned": [], "failed": failed, "ignored": []} diff --git a/nf_core/lint/multiqc_config.py b/nf_core/lint/multiqc_config.py index 37580a1f11..3378efce5f 100644 --- a/nf_core/lint/multiqc_config.py +++ b/nf_core/lint/multiqc_config.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import os import yaml diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 635e33cfb5..79bce3e7f1 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import logging import os import re @@ -143,12 +141,12 @@ def nextflow_config(self): ] # Old depreciated vars - fail if present config_fail_ifdefined = [ - "params.version", "params.nf_required_version", "params.container", "params.singleEnd", "params.igenomesIgnore", "params.name", + "params.enable_conda", ] # Remove field that should be ignored according to the linting config diff --git a/nf_core/lint/pipeline_name_conventions.py b/nf_core/lint/pipeline_name_conventions.py index e1ecad0be2..7fb6ffca0f 100644 --- a/nf_core/lint/pipeline_name_conventions.py +++ b/nf_core/lint/pipeline_name_conventions.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - def pipeline_name_conventions(self): """Checks that the pipeline name adheres to nf-core conventions. diff --git a/nf_core/lint/pipeline_todos.py b/nf_core/lint/pipeline_todos.py index 91a7cf6307..890e227fa1 100644 --- a/nf_core/lint/pipeline_todos.py +++ b/nf_core/lint/pipeline_todos.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import fnmatch import io import logging diff --git a/nf_core/lint/readme.py b/nf_core/lint/readme.py index 99def0a204..daf92f7932 100644 --- a/nf_core/lint/readme.py +++ b/nf_core/lint/readme.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import os import re @@ -42,7 +40,7 @@ def readme(self): if "nextflow_badge" not in ignore_configs: # Check that there is a readme badge showing the minimum required version of Nextflow - # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg)](https://www.nextflow.io/) + # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A522.10.1-23aa62.svg)](https://www.nextflow.io/) # and that it has the correct version nf_badge_re = r"\[!\[Nextflow\]\(https://img\.shields\.io/badge/nextflow%20DSL2-!?(?:%E2%89%A5|%3E%3D)([\d\.]+)-23aa62\.svg\)\]\(https://www\.nextflow\.io/\)" match = re.search(nf_badge_re, content) @@ -65,7 +63,7 @@ def readme(self): warned.append("README did not have a Nextflow minimum version badge.") # Check that the minimum version mentioned in the quick start section is consistent - # Looking for: "1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.10.3`)" + # Looking for: "1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=22.10.1`)" nf_version_re = r"1\.\s*Install\s*\[`Nextflow`\]\(https://www.nextflow.io/docs/latest/getstarted.html#installation\)\s*\(`>=(\d*\.\d*\.\d*)`\)" match = re.search(nf_version_re, content) if match: diff --git a/nf_core/lint/schema_description.py b/nf_core/lint/schema_description.py index 3a670e5f70..ca22f266ab 100644 --- a/nf_core/lint/schema_description.py +++ b/nf_core/lint/schema_description.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import nf_core.schema diff --git a/nf_core/lint/schema_lint.py b/nf_core/lint/schema_lint.py index f7c1b11048..178063d5dd 100644 --- a/nf_core/lint/schema_lint.py +++ b/nf_core/lint/schema_lint.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import logging import nf_core.schema diff --git a/nf_core/lint/schema_params.py b/nf_core/lint/schema_params.py index 6b32535738..9280fe4703 100644 --- a/nf_core/lint/schema_params.py +++ b/nf_core/lint/schema_params.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import nf_core.schema diff --git a/nf_core/lint/template_strings.py b/nf_core/lint/template_strings.py index 17886cec3f..fb1f0f32e5 100644 --- a/nf_core/lint/template_strings.py +++ b/nf_core/lint/template_strings.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import io import mimetypes import re @@ -26,7 +24,6 @@ def template_strings(self): # Loop through files, searching for string num_matches = 0 for fn in self.files: - # Skip binary files binary_ftypes = ["image", "application/java-archive"] (ftype, encoding) = mimetypes.guess_type(fn) diff --git a/nf_core/lint/version_consistency.py b/nf_core/lint/version_consistency.py index 89a8751af6..fa5b50de01 100644 --- a/nf_core/lint/version_consistency.py +++ b/nf_core/lint/version_consistency.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import os diff --git a/nf_core/lint_utils.py b/nf_core/lint_utils.py index ffb3bdf7b3..c2fd75d375 100644 --- a/nf_core/lint_utils.py +++ b/nf_core/lint_utils.py @@ -1,4 +1,7 @@ +import json import logging +import subprocess +from pathlib import Path import rich from rich.console import Console @@ -33,17 +36,61 @@ def print_joint_summary(lint_obj, module_lint_obj): console.print(table) -def print_fixes(lint_obj, module_lint_obj): +def print_fixes(lint_obj): """Prints available and applied fixes""" - if len(lint_obj.could_fix): - fix_cmd = "nf-core lint {} --fix {}".format( - "" if lint_obj.wf_path == "." else f"--dir {lint_obj.wf_path}", " --fix ".join(lint_obj.could_fix) - ) + if lint_obj.could_fix: + fix_flags = "".join([f" --fix {fix}" for fix in lint_obj.could_fix]) + wf_dir = "" if lint_obj.wf_path == "." else f"--dir {lint_obj.wf_path}" + fix_cmd = f"nf-core lint {wf_dir} {fix_flags}" console.print( - f"\nTip: Some of these linting errors can automatically be resolved with the following command:\n\n[blue] {fix_cmd}\n" + "\nTip: Some of these linting errors can automatically be resolved with the following command:\n\n" + f"[blue] {fix_cmd}\n" ) if len(lint_obj.fix): console.print( - "Automatic fixes applied. Please check with 'git diff' and revert any changes you do not want with 'git checkout '." + "Automatic fixes applied. " + "Please check with 'git diff' and revert any changes you do not want with 'git checkout '." ) + + +def run_prettier_on_file(file): + """Run the pre-commit hook prettier on a file. + + Args: + file (Path | str): A file identifier as a string or pathlib.Path. + + Warns: + If Prettier is not installed, a warning is logged. + """ + + nf_core_pre_commit_config = Path(nf_core.__file__).parent / ".pre-commit-prettier-config.yaml" + try: + subprocess.run( + ["pre-commit", "run", "--config", nf_core_pre_commit_config, "prettier", "--files", file], + capture_output=True, + check=True, + ) + except subprocess.CalledProcessError as e: + if ": SyntaxError: " in e.stdout.decode(): + log.critical(f"Can't format {file} because it has a syntax error.\n{e.stdout.decode()}") + elif "files were modified by this hook" in e.stdout.decode(): + all_lines = [line for line in e.stdout.decode().split("\n")] + files = "\n".join(all_lines[3:]) + log.debug(f"The following files were modified by prettier:\n {files}") + elif e.stderr.decode(): + log.warning( + "There was an error running the prettier pre-commit hook.\n" + f"STDOUT: {e.stdout.decode()}\nSTDERR: {e.stderr.decode()}" + ) + + +def dump_json_with_prettier(file_name, file_content): + """Dump a JSON file and run prettier on it. + Args: + file_name (Path | str): A file identifier as a string or pathlib.Path. + file_content (dict): Content to dump into the JSON file + """ + with open(file_name, "w") as fh: + json.dump(file_content, fh, indent=4) + run_prettier_on_file(file_name) diff --git a/nf_core/list.py b/nf_core/list.py index 62c925ee47..77a9ac3919 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """Lists available nf-core pipelines and versions.""" from __future__ import print_function @@ -74,7 +73,7 @@ def get_local_wf(workflow, revision=None): return local_wf.local_path -class Workflows(object): +class Workflows: """Workflow container class. Is used to collect local and remote nf-core pipelines. Pipelines @@ -273,7 +272,7 @@ def print_json(self): ) -class RemoteWorkflow(object): +class RemoteWorkflow: """A information container for a remote workflow. Args: @@ -309,7 +308,7 @@ def __init__(self, data): ) -class LocalWorkflow(object): +class LocalWorkflow: """Class to handle local workflows pulled by nextflow""" def __init__(self, name): @@ -329,7 +328,6 @@ def get_local_nf_workflow_details(self): """Get full details about a local cached workflow""" if self.local_path is None: - # Try to guess the local cache directory if len(os.environ.get("NXF_ASSETS", "")) > 0: nf_wfdir = os.path.join(os.environ.get("NXF_ASSETS"), self.full_name) diff --git a/nf_core/module-template/modules/main.nf b/nf_core/module-template/modules/main.nf index 8a61ea32d1..895ad8f68c 100644 --- a/nf_core/module-template/modules/main.nf +++ b/nf_core/module-template/modules/main.nf @@ -1,5 +1,6 @@ +{%- if not_empty_template -%} // TODO nf-core: If in doubt look at other nf-core/modules to see how we are doing things! :) -// https://github.com/nf-core/modules/tree/master/modules +// https://github.com/nf-core/modules/tree/master/modules/nf-core/ // You can also ask for help via your pull request or on the #modules channel on the nf-core Slack workspace: // https://nf-co.re/join // TODO nf-core: A module file SHOULD only define input and output files as command-line parameters. @@ -14,33 +15,46 @@ // bwa mem | samtools view -B -T ref.fasta // TODO nf-core: Optional inputs are not currently supported by Nextflow. However, using an empty // list (`[]`) instead of a file can be used to work around this issue. +{%- endif %} -process {{ tool_name_underscore|upper }} { +process {{ component_name_underscore|upper }} { tag {{ '"$meta.id"' if has_meta else "'$bam'" }} label '{{ process_label }}' + {% if not_empty_template -%} // TODO nf-core: List required Conda package(s). // Software MUST be pinned to channel (i.e. "bioconda"), version (i.e. "1.10"). // For Conda, the build (i.e. "h9402c20_2") must be EXCLUDED to support installation on different operating systems. // TODO nf-core: See section in main README for further information regarding finding and adding container addresses to the section below. - conda (params.enable_conda ? "{{ bioconda if bioconda else 'YOUR-TOOL-HERE' }}" : null) + {% endif -%} + conda "{{ bioconda if bioconda else 'YOUR-TOOL-HERE' }}" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? '{{ singularity_container if singularity_container else 'https://depot.galaxyproject.org/singularity/YOUR-TOOL-HERE' }}': '{{ docker_container if docker_container else 'quay.io/biocontainers/YOUR-TOOL-HERE' }}' }" input: + {% if not_empty_template -%} // TODO nf-core: Where applicable all sample-specific information e.g. "id", "single_end", "read_group" // MUST be provided as an input via a Groovy Map called "meta". // This information may not be required in some instances e.g. indexing reference genome files: - // https://github.com/nf-core/modules/blob/master/modules/bwa/index/main.nf + // https://github.com/nf-core/modules/blob/master/modules/nf-core/bwa/index/main.nf // TODO nf-core: Where applicable please provide/convert compressed files as input/output // e.g. "*.fastq.gz" and NOT "*.fastq", "*.bam" and NOT "*.sam" etc. {{ 'tuple val(meta), path(bam)' if has_meta else 'path bam' }} + {%- else -%} + {{ 'tuple val(meta), path(input)' if has_meta else 'path input' }} + {%- endif %} output: + {% if not_empty_template -%} // TODO nf-core: Named file extensions MUST be emitted for ALL output channels {{ 'tuple val(meta), path("*.bam")' if has_meta else 'path "*.bam"' }}, emit: bam + {%- else -%} + {{ 'tuple val(meta), path("*")' if has_meta else 'path "*"' }}, emit: output + {%- endif %} + {% if not_empty_template -%} // TODO nf-core: List additional required output channels/values here + {%- endif %} path "versions.yml" , emit: versions when: @@ -51,16 +65,19 @@ process {{ tool_name_underscore|upper }} { {% if has_meta -%} def prefix = task.ext.prefix ?: "${meta.id}" {%- endif %} + {% if not_empty_template -%} // TODO nf-core: Where possible, a command MUST be provided to obtain the version number of the software e.g. 1.10 // If the software is unable to output a version number on the command-line then it can be manually specified - // e.g. https://github.com/nf-core/modules/blob/master/modules/homer/annotatepeaks/main.nf + // e.g. https://github.com/nf-core/modules/blob/master/modules/nf-core/homer/annotatepeaks/main.nf // Each software used MUST provide the software name and version number in the YAML version file (versions.yml) // TODO nf-core: It MUST be possible to pass additional parameters to the tool as a command-line string via the "task.ext.args" directive // TODO nf-core: If the tool supports multi-threading then you MUST provide the appropriate parameter // using the Nextflow "task" variable e.g. "--threads $task.cpus" // TODO nf-core: Please replace the example samtools command below with your module's command // TODO nf-core: Please indent the command appropriately (4 spaces!!) to help with readability ;) + {%- endif %} """ + {% if not_empty_template -%} samtools \\ sort \\ $args \\ @@ -70,6 +87,7 @@ process {{ tool_name_underscore|upper }} { -T $prefix \\ {%- endif %} $bam + {%- endif %} cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/nf_core/module-template/modules/meta.yml b/nf_core/module-template/modules/meta.yml index 23c8b06da8..9b42bb3bdf 100644 --- a/nf_core/module-template/modules/meta.yml +++ b/nf_core/module-template/modules/meta.yml @@ -1,11 +1,15 @@ -name: "{{ tool_name_underscore }}" +name: "{{ component_name_underscore }}" +{% if not_empty_template -%} ## TODO nf-core: Add a description of the module and list keywords +{% endif -%} description: write your description here keywords: - sort tools: - - "{{ tool }}": + - "{{ component }}": + {% if not_empty_template -%} ## TODO nf-core: Add a description and other details for the software below + {% endif -%} description: "{{ tool_description }}" homepage: "{{ tool_doc_url }}" documentation: "{{ tool_doc_url }}" @@ -13,7 +17,9 @@ tools: doi: "" licence: "{{ tool_licence }}" +{% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as input +{% endif -%} input: #{% if has_meta %} Only when we have meta - meta: @@ -21,14 +27,18 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - # {% endif %} + {% endif %} + {% if not_empty_template -%} ## TODO nf-core: Delete / customise this example input - - bam: - type: file - description: BAM/CRAM/SAM file - pattern: "*.{bam,cram,sam}" + {%- endif %} + - {{ 'bam:' if not_empty_template else "input:" }} + type: file + description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} + pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} +{% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as output +{% endif -%} output: #{% if has_meta -%} Only when we have meta - meta: @@ -36,16 +46,18 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - # {% endif %} + {% endif %} - versions: type: file description: File containing software versions pattern: "versions.yml" + {% if not_empty_template -%} ## TODO nf-core: Delete / customise this example output - - bam: - type: file - description: Sorted BAM/CRAM/SAM file - pattern: "*.{bam,cram,sam}" + {%- endif %} + - {{ 'bam:' if not_empty_template else "output:" }} + type: file + description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} + pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} authors: - "{{ author }}" diff --git a/nf_core/module-template/tests/main.nf b/nf_core/module-template/tests/main.nf index a7be132962..fcb7195fe4 100644 --- a/nf_core/module-template/tests/main.nf +++ b/nf_core/module-template/tests/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { {{ tool_name_underscore|upper }} } from '../../../{{ "../" if subtool else "" }}modules/{{ tool_dir }}/main.nf' +include { {{ component_name_underscore|upper }} } from '../../../../{{ "../" if subtool else "" }}modules/{{ org }}/{{ component_dir }}/main.nf' -workflow test_{{ tool_name_underscore }} { +workflow test_{{ component_name_underscore }} { {% if has_meta %} input = [ [ id:'test', single_end:false ], // meta map @@ -14,5 +14,5 @@ workflow test_{{ tool_name_underscore }} { input = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) {%- endif %} - {{ tool_name_underscore|upper }} ( input ) + {{ component_name_underscore|upper }} ( input ) } diff --git a/nf_core/module-template/tests/test.yml b/nf_core/module-template/tests/test.yml index b0eb645846..4f38dec298 100644 --- a/nf_core/module-template/tests/test.yml +++ b/nf_core/module-template/tests/test.yml @@ -1,14 +1,18 @@ +{%- if not_empty_template -%} ## TODO nf-core: Please run the following command to build this file: # nf-core modules create-test-yml {{ tool }}{%- if subtool %}/{{ subtool }}{%- endif %} -- name: "{{ tool }}{{ ' '+subtool if subtool else '' }}" - command: nextflow run ./tests/modules/{{ tool_dir }} -entry test_{{ tool_name_underscore }} -c ./tests/config/nextflow.config -c ./tests/modules/{{ tool_dir }}/nextflow.config +{% endif -%} +- name: "{{ component }}{{ ' '+subtool if subtool else '' }}" + command: nextflow run ./tests/modules/{{ org }}/{{ component_dir }} -entry test_{{ component_name_underscore }} -c ./tests/config/nextflow.config -c ./tests/modules/{{ org }}/{{ component_dir }}/nextflow.config tags: - - "{{ tool }}" - # {%- if subtool %} - - "{{ tool }}/{{ subtool }}" - # {%- endif %} + - "{{ component }}{% if subtool -%}" + - "{{ component }}/{{ subtool }}{%- endif %}" files: - - path: "output/{{ tool }}/test.bam" + {% if not_empty_template -%} + - path: "output/{{ component }}/test.bam" md5sum: e667c7caad0bc4b7ac383fd023c654fc - - path: output/{{ tool }}/versions.yml + - path: "output/{{ component }}/versions.yml" md5sum: a01fe51bc4c6a3a6226fbf77b2c7cf3b + {% else -%} + - path: "" + {%- endif %} diff --git a/nf_core/modules/__init__.py b/nf_core/modules/__init__.py index ad3306ceef..060b39124b 100644 --- a/nf_core/modules/__init__.py +++ b/nf_core/modules/__init__.py @@ -4,10 +4,10 @@ from .install import ModuleInstall from .lint import ModuleLint from .list import ModuleList -from .module_test import ModulesTest -from .module_utils import ModuleException from .modules_json import ModulesJson from .modules_repo import ModulesRepo +from .modules_test import ModulesTest +from .modules_utils import ModuleException from .mulled import MulledImageNameGenerator from .patch import ModulePatch from .remove import ModuleRemove diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index a30f93eda6..6d61d4c750 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -7,6 +7,7 @@ from __future__ import print_function import logging +import os import re import questionary @@ -15,19 +16,18 @@ from rich.markdown import Markdown from rich.table import Table -import nf_core.modules.module_utils +import nf_core.modules.modules_utils import nf_core.utils +from nf_core.components.components_command import ComponentCommand from nf_core.utils import plural_s as _s from nf_core.utils import rich_force_colors -from .modules_command import ModuleCommand - log = logging.getLogger(__name__) -class ModuleVersionBumper(ModuleCommand): +class ModuleVersionBumper(ComponentCommand): def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False): - super().__init__(pipeline_dir, remote_url, branch, no_pull) + super().__init__("modules", pipeline_dir, remote_url, branch, no_pull) self.up_to_date = None self.updated = None @@ -54,18 +54,20 @@ def bump_versions(self, module=None, all_modules=False, show_uptodate=False): self.ignored = [] self.show_up_to_date = show_uptodate + # Check modules directory structure + self.check_modules_structure() + # Verify that this is not a pipeline - self.dir, repo_type = nf_core.modules.module_utils.get_repo_type(self.dir) - if not repo_type == "modules": - raise nf_core.modules.module_utils.ModuleException( + if not self.repo_type == "modules": + raise nf_core.modules.modules_utils.ModuleException( "This command only works on the nf-core/modules repository, not on pipelines!" ) # Get list of all modules - _, nfcore_modules = nf_core.modules.module_utils.get_installed_modules(self.dir) + _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(self.dir) # Load the .nf-core.yml config - self.tools_config = nf_core.utils.load_tools_config(self.dir) + _, self.tools_config = nf_core.utils.load_tools_config(self.dir) # Prompt for module or all if module is None and not all_modules: @@ -88,18 +90,19 @@ def bump_versions(self, module=None, all_modules=False, show_uptodate=False): if module: self.show_up_to_date = True if all_modules: - raise nf_core.modules.module_utils.ModuleException( + raise nf_core.modules.modules_utils.ModuleException( "You cannot specify a tool and request all tools to be bumped." ) nfcore_modules = [m for m in nfcore_modules if m.module_name == module] if len(nfcore_modules) == 0: - raise nf_core.modules.module_utils.ModuleException(f"Could not find the specified module: '{module}'") + raise nf_core.modules.modules_utils.ModuleException(f"Could not find the specified module: '{module}'") progress_bar = rich.progress.Progress( "[bold blue]{task.description}", rich.progress.BarColumn(bar_width=None), "[magenta]{task.completed} of {task.total}[reset] » [bold yellow]{task.fields[test_name]}", transient=True, + disable=os.environ.get("HIDE_PROGRESS", None) is not None, ) with progress_bar: bump_progress = progress_bar.add_task( @@ -184,7 +187,6 @@ def bump_module_version(self, module): found_match = False newcontent = [] for line in content.splitlines(): - # Match the pattern matches_pattern = re.findall(rf"^.*{pattern[0]}.*$", line) if matches_pattern: diff --git a/nf_core/modules/create.py b/nf_core/modules/create.py index 9ea80cc7c5..b5368130ce 100644 --- a/nf_core/modules/create.py +++ b/nf_core/modules/create.py @@ -1,362 +1,32 @@ -#!/usr/bin/env python -""" -The ModuleCreate class handles generating of module templates -""" - -from __future__ import print_function - -import glob -import json import logging -import os -import re -import subprocess - -import jinja2 -import questionary -import rich -import yaml -from packaging.version import parse as parse_version -import nf_core -import nf_core.modules.module_utils -import nf_core.utils +from nf_core.components.create import ComponentCreate log = logging.getLogger(__name__) -class ModuleCreate(object): +class ModuleCreate(ComponentCreate): def __init__( self, - directory=".", - tool="", + pipeline_dir, + component="", author=None, process_label=None, has_meta=None, force=False, conda_name=None, conda_version=None, - repo_type=None, + empty_template=False, ): - self.directory = directory - self.tool = tool - self.author = author - self.process_label = process_label - self.has_meta = has_meta - self.force_overwrite = force - self.subtool = None - self.tool_conda_name = conda_name - self.tool_conda_version = conda_version - self.tool_licence = None - self.repo_type = repo_type - self.tool_licence = "" - self.tool_description = "" - self.tool_doc_url = "" - self.tool_dev_url = "" - self.bioconda = None - self.singularity_container = None - self.docker_container = None - self.file_paths = {} - - def create(self): - """ - Create a new DSL2 module from the nf-core template. - - Tool should be named just or - e.g fastqc or samtools/sort, respectively. - - If is a pipeline, this function creates a file called: - '/modules/local/tool.nf' - OR - '/modules/local/tool_subtool.nf' - - If is a clone of nf-core/modules, it creates or modifies the following files: - - modules/modules/tool/subtool/ - * main.nf - * meta.yml - modules/tests/modules/tool/subtool/ - * main.nf - * test.yml - * nextflow.config - tests/config/pytest_modules.yml - - The function will attempt to automatically find a Bioconda package called - and matching Docker / Singularity images from BioContainers. - """ - - # Check whether the given directory is a nf-core pipeline or a clone of nf-core/modules - try: - self.directory, self.repo_type = nf_core.modules.module_utils.get_repo_type(self.directory, self.repo_type) - except LookupError as e: - raise UserWarning(e) - log.info(f"Repository type: [blue]{self.repo_type}") - if self.directory != ".": - log.info(f"Base directory: '{self.directory}'") - - log.info( - "[yellow]Press enter to use default values [cyan bold](shown in brackets)[/] [yellow]or type your own responses. " - "ctrl+click [link=https://youtu.be/dQw4w9WgXcQ]underlined text[/link] to open links." + super().__init__( + "modules", + pipeline_dir, + component, + author, + process_label, + has_meta, + force, + conda_name, + conda_version, + empty_template, ) - - # Collect module info via prompt if empty or invalid - if self.tool is None: - self.tool = "" - while self.tool == "" or re.search(r"[^a-z\d/]", self.tool) or self.tool.count("/") > 0: - - # Check + auto-fix for invalid chacters - if re.search(r"[^a-z\d/]", self.tool): - log.warning("Tool/subtool name must be lower-case letters only, with no punctuation") - tool_clean = re.sub(r"[^a-z\d/]", "", self.tool.lower()) - if rich.prompt.Confirm.ask(f"[violet]Change '{self.tool}' to '{tool_clean}'?"): - self.tool = tool_clean - else: - self.tool = "" - - # Split into tool and subtool - if self.tool.count("/") > 1: - log.warning("Tool/subtool can have maximum one '/' character") - self.tool = "" - elif self.tool.count("/") == 1: - self.tool, self.subtool = self.tool.split("/") - else: - self.subtool = None # Reset edge case: entered '/subtool' as name and gone round loop again - - # Prompt for new entry if we reset - if self.tool == "": - self.tool = rich.prompt.Prompt.ask("[violet]Name of tool/subtool").strip() - - # Determine the tool name - self.tool_name = self.tool - self.tool_dir = self.tool - - if self.subtool: - self.tool_name = f"{self.tool}/{self.subtool}" - self.tool_dir = os.path.join(self.tool, self.subtool) - - self.tool_name_underscore = self.tool_name.replace("/", "_") - - # Check existance of directories early for fast-fail - self.file_paths = self.get_module_dirs() - - # Try to find a bioconda package for 'tool' - while True: - try: - if self.tool_conda_name: - anaconda_response = nf_core.utils.anaconda_package(self.tool_conda_name, ["bioconda"]) - else: - anaconda_response = nf_core.utils.anaconda_package(self.tool, ["bioconda"]) - - if not self.tool_conda_version: - version = anaconda_response.get("latest_version") - if not version: - version = str(max([parse_version(v) for v in anaconda_response["versions"]])) - else: - version = self.tool_conda_version - - self.tool_licence = nf_core.utils.parse_anaconda_licence(anaconda_response, version) - self.tool_description = anaconda_response.get("summary", "") - self.tool_doc_url = anaconda_response.get("doc_url", "") - self.tool_dev_url = anaconda_response.get("dev_url", "") - if self.tool_conda_name: - self.bioconda = "bioconda::" + self.tool_conda_name + "=" + version - else: - self.bioconda = "bioconda::" + self.tool + "=" + version - log.info(f"Using Bioconda package: '{self.bioconda}'") - break - except (ValueError, LookupError) as e: - log.warning( - f"Could not find Conda dependency using the Anaconda API: '{self.tool_conda_name if self.tool_conda_name else self.tool}'" - ) - if rich.prompt.Confirm.ask("[violet]Do you want to enter a different Bioconda package name?"): - self.tool_conda_name = rich.prompt.Prompt.ask("[violet]Name of Bioconda package").strip() - continue - else: - log.warning( - f"{e}\nBuilding module without tool software and meta, you will need to enter this information manually." - ) - break - - # Try to get the container tag (only if bioconda package was found) - if self.bioconda: - try: - if self.tool_conda_name: - self.docker_container, self.singularity_container = nf_core.utils.get_biocontainer_tag( - self.tool_conda_name, version - ) - else: - self.docker_container, self.singularity_container = nf_core.utils.get_biocontainer_tag( - self.tool, version - ) - log.info(f"Using Docker container: '{self.docker_container}'") - log.info(f"Using Singularity container: '{self.singularity_container}'") - except (ValueError, LookupError) as e: - log.info(f"Could not find a Docker/Singularity container ({e})") - - # Prompt for GitHub username - # Try to guess the current user if `gh` is installed - author_default = None - try: - with open(os.devnull, "w") as devnull: - gh_auth_user = json.loads(subprocess.check_output(["gh", "api", "/user"], stderr=devnull)) - author_default = f"@{gh_auth_user['login']}" - except Exception as e: - log.debug(f"Could not find GitHub username using 'gh' cli command: [red]{e}") - - # Regex to valid GitHub username: https://github.com/shinnn/github-username-regex - github_username_regex = re.compile(r"^@[a-zA-Z\d](?:[a-zA-Z\d]|-(?=[a-zA-Z\d])){0,38}$") - while self.author is None or not github_username_regex.match(self.author): - if self.author is not None and not github_username_regex.match(self.author): - log.warning("Does not look like a valid GitHub username (must start with an '@')!") - self.author = rich.prompt.Prompt.ask( - f"[violet]GitHub Username:[/]{' (@author)' if author_default is None else ''}", - default=author_default, - ) - - process_label_defaults = ["process_low", "process_medium", "process_high", "process_long"] - if self.process_label is None: - log.info( - "Provide an appropriate resource label for the process, taken from the " - "[link=https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/conf/base.config#L29]nf-core pipeline template[/link].\n" - "For example: {}".format(", ".join(process_label_defaults)) - ) - while self.process_label is None: - self.process_label = questionary.autocomplete( - "Process resource label:", - choices=process_label_defaults, - style=nf_core.utils.nfcore_question_style, - default="process_low", - ).unsafe_ask() - - if self.has_meta is None: - log.info( - "Where applicable all sample-specific information e.g. 'id', 'single_end', 'read_group' " - "MUST be provided as an input via a Groovy Map called 'meta'. " - "This information may [italic]not[/] be required in some instances, for example " - "[link=https://github.com/nf-core/modules/blob/master/modules/bwa/index/main.nf]indexing reference genome files[/link]." - ) - while self.has_meta is None: - self.has_meta = rich.prompt.Confirm.ask( - "[violet]Will the module require a meta map of sample information?", default=True - ) - - # Create module template with cokiecutter - self.render_template() - - if self.repo_type == "modules": - # Add entry to pytest_modules.yml - try: - with open(os.path.join(self.directory, "tests", "config", "pytest_modules.yml"), "r") as fh: - pytest_modules_yml = yaml.safe_load(fh) - if self.subtool: - pytest_modules_yml[self.tool_name] = [ - f"modules/{self.tool}/{self.subtool}/**", - f"tests/modules/{self.tool}/{self.subtool}/**", - ] - else: - pytest_modules_yml[self.tool_name] = [ - f"modules/{self.tool}/**", - f"tests/modules/{self.tool}/**", - ] - pytest_modules_yml = dict(sorted(pytest_modules_yml.items())) - with open(os.path.join(self.directory, "tests", "config", "pytest_modules.yml"), "w") as fh: - yaml.dump(pytest_modules_yml, fh, sort_keys=True, Dumper=nf_core.utils.custom_yaml_dumper()) - except FileNotFoundError as e: - raise UserWarning("Could not open 'tests/config/pytest_modules.yml' file!") - - new_files = list(self.file_paths.values()) - if self.repo_type == "modules": - new_files.append(os.path.join(self.directory, "tests", "config", "pytest_modules.yml")) - log.info("Created / edited following files:\n " + "\n ".join(new_files)) - - def render_template(self): - """ - Create new module files with Jinja2. - """ - # Run jinja2 for each file in the template folder - env = jinja2.Environment(loader=jinja2.PackageLoader("nf_core", "module-template"), keep_trailing_newline=True) - for template_fn, dest_fn in self.file_paths.items(): - log.debug(f"Rendering template file: '{template_fn}'") - j_template = env.get_template(template_fn) - object_attrs = vars(self) - object_attrs["nf_core_version"] = nf_core.__version__ - rendered_output = j_template.render(object_attrs) - - # Write output to the target file - os.makedirs(os.path.dirname(dest_fn), exist_ok=True) - with open(dest_fn, "w") as fh: - log.debug(f"Writing output to: '{dest_fn}'") - fh.write(rendered_output) - - # Mirror file permissions - template_stat = os.stat(os.path.join(os.path.dirname(nf_core.__file__), "module-template", template_fn)) - os.chmod(dest_fn, template_stat.st_mode) - - def get_module_dirs(self): - """Given a directory and a tool/subtool, set the file paths and check if they already exist - - Returns dict: keys are relative paths to template files, vals are target paths. - """ - - file_paths = {} - - if self.repo_type == "pipeline": - local_modules_dir = os.path.join(self.directory, "modules", "local") - - # Check whether module file already exists - module_file = os.path.join(local_modules_dir, f"{self.tool_name}.nf") - if os.path.exists(module_file) and not self.force_overwrite: - raise UserWarning(f"Module file exists already: '{module_file}'. Use '--force' to overwrite") - - # If a subtool, check if there is a module called the base tool name already - if self.subtool and os.path.exists(os.path.join(local_modules_dir, f"{self.tool}.nf")): - raise UserWarning(f"Module '{self.tool}' exists already, cannot make subtool '{self.tool_name}'") - - # If no subtool, check that there isn't already a tool/subtool - tool_glob = glob.glob(f"{local_modules_dir}/{self.tool}_*.nf") - if not self.subtool and tool_glob: - raise UserWarning( - f"Module subtool '{tool_glob[0]}' exists already, cannot make tool '{self.tool_name}'" - ) - - # Set file paths - file_paths[os.path.join("modules", "main.nf")] = module_file - - if self.repo_type == "modules": - software_dir = os.path.join(self.directory, "modules", self.tool_dir) - test_dir = os.path.join(self.directory, "tests", "modules", self.tool_dir) - - # Check if module directories exist already - if os.path.exists(software_dir) and not self.force_overwrite: - raise UserWarning(f"Module directory exists: '{software_dir}'. Use '--force' to overwrite") - - if os.path.exists(test_dir) and not self.force_overwrite: - raise UserWarning(f"Module test directory exists: '{test_dir}'. Use '--force' to overwrite") - - # If a subtool, check if there is a module called the base tool name already - parent_tool_main_nf = os.path.join(self.directory, "modules", self.tool, "main.nf") - parent_tool_test_nf = os.path.join(self.directory, "tests", "modules", self.tool, "main.nf") - if self.subtool and os.path.exists(parent_tool_main_nf): - raise UserWarning( - f"Module '{parent_tool_main_nf}' exists already, cannot make subtool '{self.tool_name}'" - ) - if self.subtool and os.path.exists(parent_tool_test_nf): - raise UserWarning( - f"Module '{parent_tool_test_nf}' exists already, cannot make subtool '{self.tool_name}'" - ) - - # If no subtool, check that there isn't already a tool/subtool - tool_glob = glob.glob(f"{os.path.join(self.directory, 'modules', self.tool)}/*/main.nf") - if not self.subtool and tool_glob: - raise UserWarning( - f"Module subtool '{tool_glob[0]}' exists already, cannot make tool '{self.tool_name}'" - ) - - # Set file paths - can be tool/ or tool/subtool/ so can't do in template directory structure - file_paths[os.path.join("modules", "main.nf")] = os.path.join(software_dir, "main.nf") - file_paths[os.path.join("modules", "meta.yml")] = os.path.join(software_dir, "meta.yml") - file_paths[os.path.join("tests", "main.nf")] = os.path.join(test_dir, "main.nf") - file_paths[os.path.join("tests", "test.yml")] = os.path.join(test_dir, "test.yml") - file_paths[os.path.join("tests", "nextflow.config")] = os.path.join(test_dir, "nextflow.config") - - return file_paths diff --git a/nf_core/modules/info.py b/nf_core/modules/info.py index 37e0b4db43..265bbb2678 100644 --- a/nf_core/modules/info.py +++ b/nf_core/modules/info.py @@ -1,271 +1,17 @@ import logging -import os -import questionary -import yaml -from rich import box -from rich.console import Group -from rich.markdown import Markdown -from rich.panel import Panel -from rich.table import Table -from rich.text import Text - -import nf_core.utils -from nf_core.modules.modules_json import ModulesJson - -from .module_utils import get_repo_type -from .modules_command import ModuleCommand -from .modules_repo import NF_CORE_MODULES_REMOTE +from nf_core.components.info import ComponentInfo log = logging.getLogger(__name__) -class ModuleInfo(ModuleCommand): - """ - Class to print information of a module. - - Attributes - ---------- - meta : YAML object - stores the information from meta.yml file - local_path : str - path of the local modules - remote_location : str - remote repository URL - local : bool - indicates if the module is locally installed or not - repo_type : str - repository type. Can be either 'pipeline' or 'modules' - modules_json : ModulesJson object - contains 'modules.json' file information from a pipeline - module : str - name of the tool to get information from - - Methods - ------- - init_mod_name(module) - Makes sure that we have a module name - get_module_info() - Given the name of a module, parse meta.yml and print usage help - get_local_yaml() - Attempt to get the meta.yml file from a locally installed module - get_remote_yaml() - Attempt to get the meta.yml file from a remote repo - generate_module_info_help() - Take the parsed meta.yml and generate rich help - """ - - def __init__(self, pipeline_dir, tool, remote_url, branch, no_pull): - super().__init__(pipeline_dir, remote_url, branch, no_pull) - self.meta = None - self.local_path = None - self.remote_location = None - self.local = None - - # Quietly check if this is a pipeline or not - if pipeline_dir: - try: - pipeline_dir, repo_type = get_repo_type(pipeline_dir, use_prompt=False) - log.debug(f"Found {repo_type} repo: {pipeline_dir}") - except UserWarning as e: - log.debug(f"Only showing remote info: {e}") - pipeline_dir = None - - if self.repo_type == "pipeline": - self.modules_json = ModulesJson(self.dir) - self.modules_json.check_up_to_date() - else: - self.modules_json = None - self.module = self.init_mod_name(tool) - - def init_mod_name(self, module): - """ - Makes sure that we have a module name before proceeding. - - Args: - module: str: Module name to check - """ - if module is None: - self.local = questionary.confirm( - "Is the module locally installed?", style=nf_core.utils.nfcore_question_style - ).unsafe_ask() - if self.local: - if self.repo_type == "modules": - modules = self.get_modules_clone_modules() - else: - modules = self.modules_json.get_all_modules().get(self.modules_repo.fullname) - if modules is None: - raise UserWarning(f"No modules installed from '{self.modules_repo.remote_url}'") - else: - modules = self.modules_repo.get_avail_modules() - module = questionary.autocomplete( - "Please select a module", choices=modules, style=nf_core.utils.nfcore_question_style - ).unsafe_ask() - while module not in modules: - log.info(f"'{module}' is not a valid module name") - module = questionary.autocomplete( - "Please select a new module", choices=modules, style=nf_core.utils.nfcore_question_style - ).unsafe_ask() - - return module - - def get_module_info(self): - """Given the name of a module, parse meta.yml and print usage help.""" - - # Running with a local install, try to find the local meta - if self.local: - self.meta = self.get_local_yaml() - - # Either failed locally or in remote mode - if not self.meta: - self.meta = self.get_remote_yaml() - - # Could not find the meta - if self.meta is False: - raise UserWarning(f"Could not find module '{self.module}'") - - return self.generate_module_info_help() - - def get_local_yaml(self): - """Attempt to get the meta.yml file from a locally installed module. - - Returns: - dict or bool: Parsed meta.yml found, False otherwise - """ - - if self.repo_type == "pipeline": - # Try to find and load the meta.yml file - repo_name = self.modules_repo.fullname - module_base_path = os.path.join(self.dir, "modules", repo_name) - # Check that we have any modules installed from this repo - modules = self.modules_json.get_all_modules().get(repo_name) - if modules is None: - raise LookupError(f"No modules installed from {self.modules_repo.remote_url}") - - if self.module in modules: - mod_dir = os.path.join(module_base_path, self.module) - meta_fn = os.path.join(mod_dir, "meta.yml") - if os.path.exists(meta_fn): - log.debug(f"Found local file: {meta_fn}") - with open(meta_fn, "r") as fh: - self.local_path = mod_dir - return yaml.safe_load(fh) - - log.debug(f"Module '{self.module}' meta.yml not found locally") - else: - module_base_path = os.path.join(self.dir, "modules") - if self.module in os.listdir(module_base_path): - mod_dir = os.path.join(module_base_path, self.module) - meta_fn = os.path.join(mod_dir, "meta.yml") - if os.path.exists(meta_fn): - log.debug(f"Found local file: {meta_fn}") - with open(meta_fn, "r") as fh: - self.local_path = mod_dir - return yaml.safe_load(fh) - log.debug(f"Module '{self.module}' meta.yml not found locally") - - return None - - def get_remote_yaml(self): - """Attempt to get the meta.yml file from a remote repo. - - Returns: - dict or bool: Parsed meta.yml found, False otherwise - """ - # Check if our requested module is there - if self.module not in self.modules_repo.get_avail_modules(): - return False - - file_contents = self.modules_repo.get_meta_yml(self.module) - if file_contents is None: - return False - self.remote_location = self.modules_repo.remote_url - return yaml.safe_load(file_contents) - - def generate_module_info_help(self): - """Take the parsed meta.yml and generate rich help. - - Returns: - rich renderable - """ - - renderables = [] - - # Intro panel - intro_text = Text() - if self.local_path: - intro_text.append(Text.from_markup(f"Location: [blue]{self.local_path}\n")) - elif self.remote_location: - intro_text.append( - Text.from_markup( - ":globe_with_meridians: Repository: " - f"{ '[link={self.remote_location}]' if self.remote_location.startswith('http') else ''}" - f"{self.remote_location}" - f"{'[/link]' if self.remote_location.startswith('http') else '' }" - "\n" - ) - ) - - if self.meta.get("tools"): - tools_strings = [] - for tool in self.meta["tools"]: - for tool_name, tool_meta in tool.items(): - if "homepage" in tool_meta: - tools_strings.append(f"[link={tool_meta['homepage']}]{tool_name}[/link]") - else: - tools_strings.append(f"{tool_name}") - intro_text.append(Text.from_markup(f":wrench: Tools: {', '.join(tools_strings)}\n", style="dim")) - - if self.meta.get("description"): - intro_text.append(Text.from_markup(f":book: Description: {self.meta['description']}", style="dim")) - - renderables.append( - Panel( - intro_text, - title=f"[bold]Module: [green]{self.module}\n", - title_align="left", - ) - ) - - # Inputs - if self.meta.get("input"): - inputs_table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) - inputs_table.add_column(":inbox_tray: Inputs") - inputs_table.add_column("Description") - inputs_table.add_column("Pattern", justify="right", style="green") - for input in self.meta["input"]: - for key, info in input.items(): - inputs_table.add_row( - f"[orange1 on black] {key} [/][dim i] ({info['type']})", - Markdown(info["description"] if info["description"] else ""), - info.get("pattern", ""), - ) - - renderables.append(inputs_table) - - # Outputs - if self.meta.get("output"): - outputs_table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) - outputs_table.add_column(":outbox_tray: Outputs") - outputs_table.add_column("Description") - outputs_table.add_column("Pattern", justify="right", style="green") - for output in self.meta["output"]: - for key, info in output.items(): - outputs_table.add_row( - f"[orange1 on black] {key} [/][dim i] ({info['type']})", - Markdown(info["description"] if info["description"] else ""), - info.get("pattern", ""), - ) - - renderables.append(outputs_table) - - # Installation command - if self.remote_location: - cmd_base = "nf-core modules" - if self.remote_location != NF_CORE_MODULES_REMOTE: - cmd_base = f"nf-core modules --git-remote {self.remote_location}" - renderables.append( - Text.from_markup(f"\n :computer: Installation command: [magenta]{cmd_base} install {self.module}\n") - ) - - return Group(*renderables) +class ModuleInfo(ComponentInfo): + def __init__( + self, + pipeline_dir, + component_name, + remote_url=None, + branch=None, + no_pull=False, + ): + super().__init__("modules", pipeline_dir, component_name, remote_url, branch, no_pull) diff --git a/nf_core/modules/install.py b/nf_core/modules/install.py index 926d8e93a6..e1755cee98 100644 --- a/nf_core/modules/install.py +++ b/nf_core/modules/install.py @@ -1,19 +1,7 @@ -import logging -import os +from nf_core.components.install import ComponentInstall -import questionary -import nf_core.modules.module_utils -import nf_core.utils -from nf_core.modules.modules_json import ModulesJson - -from .modules_command import ModuleCommand -from .modules_repo import NF_CORE_MODULES_NAME - -log = logging.getLogger(__name__) - - -class ModuleInstall(ModuleCommand): +class ModuleInstall(ComponentInstall): def __init__( self, pipeline_dir, @@ -23,107 +11,16 @@ def __init__( remote_url=None, branch=None, no_pull=False, + installed_by=False, ): - super().__init__(pipeline_dir, remote_url, branch, no_pull) - self.force = force - self.prompt = prompt - self.sha = sha - - def install(self, module): - if self.repo_type == "modules": - log.error("You cannot install a module in a clone of nf-core/modules") - return False - # Check whether pipelines is valid - if not self.has_valid_directory(): - return False - - # Verify that 'modules.json' is consistent with the installed modules - modules_json = ModulesJson(self.dir) - modules_json.check_up_to_date() - - if self.prompt and self.sha is not None: - log.error("Cannot use '--sha' and '--prompt' at the same time!") - return False - - # Verify that the provided SHA exists in the repo - if self.sha: - if not self.modules_repo.sha_exists_on_branch(self.sha): - log.error(f"Commit SHA '{self.sha}' doesn't exist in '{self.modules_repo.fullname}'") - return False - - if module is None: - module = questionary.autocomplete( - "Tool name:", - choices=self.modules_repo.get_avail_modules(), - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - - # Check that the supplied name is an available module - if module and module not in self.modules_repo.get_avail_modules(): - log.error(f"Module '{module}' not found in list of available modules.") - log.info("Use the command 'nf-core modules list' to view available software") - return False - - if not self.modules_repo.module_exists(module): - warn_msg = ( - f"Module '{module}' not found in remote '{self.modules_repo.remote_url}' ({self.modules_repo.branch})" - ) - log.warning(warn_msg) - return False - - current_version = modules_json.get_module_version(module, self.modules_repo.fullname) - - # Set the install folder based on the repository name - install_folder = os.path.join(self.dir, "modules", self.modules_repo.fullname) - - # Compute the module directory - module_dir = os.path.join(install_folder, module) - - # Check that the module is not already installed - if (current_version is not None and os.path.exists(module_dir)) and not self.force: - - log.error("Module is already installed.") - repo_flag = ( - "" if self.modules_repo.fullname == NF_CORE_MODULES_NAME else f"-g {self.modules_repo.remote_url} " - ) - branch_flag = "" if self.modules_repo.branch == "master" else f"-b {self.modules_repo.branch} " - - log.info( - f"To update '{module}' run 'nf-core modules {repo_flag}{branch_flag}update {module}'. To force reinstallation use '--force'" - ) - return False - - if self.sha: - version = self.sha - elif self.prompt: - try: - version = nf_core.modules.module_utils.prompt_module_version_sha( - module, - installed_sha=current_version, - modules_repo=self.modules_repo, - ) - except SystemError as e: - log.error(e) - return False - else: - # Fetch the latest commit for the module - version = self.modules_repo.get_latest_module_version(module) - - if self.force: - log.info(f"Removing installed version of '{self.modules_repo.fullname}/{module}'") - self.clear_module_dir(module, module_dir) - - log.info(f"{'Rei' if self.force else 'I'}nstalling '{module}'") - log.debug(f"Installing module '{module}' at modules hash {version} from {self.modules_repo.remote_url}") - - # Download module files - if not self.install_module_files(module, version, self.modules_repo, install_folder): - return False - - # Print include statement - module_name = "_".join(module.upper().split("/")) - log.info(f"Include statement: include {{ {module_name} }} from '.{os.path.join(install_folder, module)}/main'") - - # Update module.json with newly installed module - modules_json.update(self.modules_repo, module, version) - return True + super().__init__( + pipeline_dir, + "modules", + force=force, + prompt=prompt, + sha=sha, + remote_url=remote_url, + branch=branch, + no_pull=no_pull, + installed_by=installed_by, + ) diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 32bfa8ce95..0f4ece6c49 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Code for linting modules in the nf-core/modules repository and in nf-core pipelines @@ -19,12 +18,11 @@ from rich.markdown import Markdown from rich.table import Table -import nf_core.modules.module_utils +import nf_core.modules.modules_utils import nf_core.utils +from nf_core.components.components_command import ComponentCommand from nf_core.lint_utils import console -from nf_core.modules.modules_command import ModuleCommand from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import ModulesRepo from nf_core.modules.nfcore_module import NFCoreModule from nf_core.utils import plural_s as _s @@ -37,7 +35,7 @@ class ModuleLintException(Exception): pass -class LintResult(object): +class LintResult: """An object to hold the results of a lint test""" def __init__(self, mod, lint_test, message, file_path): @@ -48,7 +46,7 @@ def __init__(self, mod, lint_test, message, file_path): self.module_name = mod.module_name -class ModuleLint(ModuleCommand): +class ModuleLint(ComponentCommand): """ An object for linting modules either in a clone of the 'nf-core/modules' repository or in any nf-core pipeline directory @@ -73,45 +71,52 @@ def __init__( no_pull=False, hide_progress=False, ): - self.dir = dir - try: - self.dir, self.repo_type = nf_core.modules.module_utils.get_repo_type(self.dir) - except LookupError as e: - raise UserWarning(e) + super().__init__( + "modules", + dir=dir, + remote_url=remote_url, + branch=branch, + no_pull=no_pull, + hide_progress=hide_progress, + ) self.fail_warned = fail_warned self.passed = [] self.warned = [] self.failed = [] - self.hide_progress = hide_progress - self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) self.lint_tests = self.get_all_lint_tests(self.repo_type == "pipeline") if self.repo_type == "pipeline": modules_json = ModulesJson(self.dir) modules_json.check_up_to_date() - all_pipeline_modules = modules_json.get_all_modules() - if self.modules_repo.fullname in all_pipeline_modules: - module_dir = Path(self.dir, "modules", self.modules_repo.fullname) - self.all_remote_modules = [ - NFCoreModule(m, self.modules_repo.fullname, module_dir / m, self.repo_type, Path(self.dir)) - for m in all_pipeline_modules[self.modules_repo.fullname] - ] - if not self.all_remote_modules: - raise LookupError(f"No modules from {self.modules_repo.remote_url} installed in pipeline.") - local_module_dir = Path(self.dir, "modules", "local") + self.all_remote_modules = [] + for repo_url, components in modules_json.get_all_components(self.component_type).items(): + for org, comp in components: + self.all_remote_modules.append( + NFCoreModule( + comp, + repo_url, + Path(self.dir, self.component_type, org, comp), + self.repo_type, + Path(self.dir), + ) + ) + if not self.all_remote_modules: + raise LookupError(f"No modules from {self.modules_repo.remote_url} installed in pipeline.") + local_module_dir = Path(self.dir, "modules", "local") + self.all_local_modules = [] + if local_module_dir.exists(): self.all_local_modules = [ - NFCoreModule(m, None, local_module_dir / m, self.repo_type, Path(self.dir), nf_core_module=False) - for m in self.get_local_modules() + NFCoreModule( + m, None, Path(local_module_dir, m), self.repo_type, Path(self.dir), remote_module=False + ) + for m in self.get_local_components() ] - - else: - raise LookupError(f"No modules from {self.modules_repo.remote_url} installed in pipeline.") else: - module_dir = Path(self.dir, "modules") + module_dir = Path(self.dir, self.default_modules_path) self.all_remote_modules = [ NFCoreModule(m, None, module_dir / m, self.repo_type, Path(self.dir)) - for m in self.get_modules_clone_modules() + for m in self.get_components_clone_modules() ] self.all_local_modules = [] if not self.all_remote_modules: @@ -140,9 +145,9 @@ def lint( module=None, key=(), all_modules=False, - hide_progress=False, print_results=True, show_passed=False, + sort_by="test", local=False, fix_version=False, ): @@ -150,7 +155,7 @@ def lint( Lint all or one specific module First gets a list of all local modules (in modules/local/process) and all modules - installed from nf-core (in modules/nf-core/modules) + installed from nf-core (in modules/nf-core) For all nf-core modules, the correct file structure is assured and important file content is verified. If directory subject to linting is a clone of 'nf-core/modules', @@ -227,7 +232,7 @@ def lint( self.lint_modules(remote_modules, local=False, fix_version=fix_version) if print_results: - self._print_results(show_passed=show_passed) + self._print_results(show_passed=show_passed, sort_by=sort_by) self.print_summary() def set_up_pipeline_files(self): @@ -272,7 +277,7 @@ def lint_modules(self, modules, local=False, fix_version=False): "[magenta]{task.completed} of {task.total}[reset] » [bold yellow]{task.fields[test_name]}", transient=True, console=console, - disable=self.hide_progress, + disable=self.hide_progress or os.environ.get("HIDE_PROGRESS", None) is not None, ) with progress_bar: lint_progress = progress_bar.add_task( @@ -329,7 +334,7 @@ def lint_module(self, mod, progress_bar, local=False, fix_version=False): self.failed += [LintResult(mod, *m) for m in mod.failed] - def _print_results(self, show_passed=False): + def _print_results(self, show_passed=False, sort_by="test"): """Print linting results to the command line. Uses the ``rich`` library to print a set of formatted tables to the command line @@ -338,10 +343,14 @@ def _print_results(self, show_passed=False): log.debug("Printing final results") + sort_order = ["lint_test", "module_name", "message"] + if sort_by == "module": + sort_order = ["module_name", "lint_test", "message"] + # Sort the results - self.passed.sort(key=operator.attrgetter("message", "module_name")) - self.warned.sort(key=operator.attrgetter("message", "module_name")) - self.failed.sort(key=operator.attrgetter("message", "module_name")) + self.passed.sort(key=operator.attrgetter(*sort_order)) + self.warned.sort(key=operator.attrgetter(*sort_order)) + self.failed.sort(key=operator.attrgetter(*sort_order)) # Find maximum module name length max_mod_name_len = 40 diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 4b5327020f..d44fe90f1e 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Lint the main.nf file of a module """ @@ -7,11 +6,12 @@ import re import sqlite3 from pathlib import Path +from urllib.parse import urlparse, urlunparse import requests import nf_core -import nf_core.modules.module_utils +import nf_core.modules.modules_utils from nf_core.modules.modules_differ import ModulesDiffer log = logging.getLogger(__name__) @@ -46,7 +46,7 @@ def main_nf(module_lint_object, module, fix_version, progress_bar): if module.is_patched: lines = ModulesDiffer.try_apply_patch( module.module_name, - module_lint_object.modules_repo.fullname, + module_lint_object.modules_repo.repo_path, module.patch_path, Path(module.module_dir).relative_to(module.base_dir), reverse=True, @@ -100,18 +100,18 @@ def main_nf(module_lint_object, module, fix_version, progress_bar): continue # Perform state-specific linting checks - if state == "process" and not _is_empty(module, l): + if state == "process" and not _is_empty(l): process_lines.append(l) - if state == "input" and not _is_empty(module, l): + if state == "input" and not _is_empty(l): inputs.extend(_parse_input(module, l)) - if state == "output" and not _is_empty(module, l): + if state == "output" and not _is_empty(l): outputs += _parse_output(module, l) outputs = list(set(outputs)) # remove duplicate 'meta's - if state == "when" and not _is_empty(module, l): + if state == "when" and not _is_empty(l): when_lines.append(l) - if state == "script" and not _is_empty(module, l): + if state == "script" and not _is_empty(l): script_lines.append(l) - if state == "shell" and not _is_empty(module, l): + if state == "shell" and not _is_empty(l): shell_lines.append(l) # Check that we have required sections @@ -235,7 +235,7 @@ def check_process_section(self, lines, fix_version, progress_bar): self.failed.append(("process_capitals", "Process name is not in capital letters", self.main_nf)) # Check that process labels are correct - correct_process_labels = ["process_low", "process_medium", "process_high", "process_long"] + correct_process_labels = ["process_single", "process_low", "process_medium", "process_high", "process_long"] process_label = [l for l in lines if l.lstrip().startswith("label")] if len(process_label) > 0: try: @@ -255,30 +255,81 @@ def check_process_section(self, lines, fix_version, progress_bar): self.passed.append(("process_standard_label", "Correct process label", self.main_nf)) else: self.warned.append(("process_standard_label", "Process label unspecified", self.main_nf)) - for l in lines: + for i, l in enumerate(lines): + url = None if _container_type(l) == "bioconda": bioconda_packages = [b for b in l.split() if "bioconda::" in b] l = l.strip(" '\"") if _container_type(l) == "singularity": # e.g. "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' :" -> v1.2.0_cv1 # e.g. "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' :" -> 0.11.9--0 - match = re.search(r"(?:/)?(?:biocontainers_)?(?::)?([A-Za-z\d\-_.]+?)(?:\.img)?['\"]", l) + match = re.search(r"(?:/)?(?:biocontainers_)?(?::)?([A-Za-z\d\-_.]+?)(?:\.img)?'", l) if match is not None: singularity_tag = match.group(1) self.passed.append(("singularity_tag", f"Found singularity tag: {singularity_tag}", self.main_nf)) else: self.failed.append(("singularity_tag", "Unable to parse singularity tag", self.main_nf)) singularity_tag = None + url = urlparse(l.split("'")[0]) + # lint double quotes + if l.count('"') > 2: + self.failed.append( + ( + "container_links", + "Too many double quotes found when specifying singularity container", + self.main_nf, + ) + ) if _container_type(l) == "docker": # e.g. "quay.io/biocontainers/krona:2.7.1--pl526_5' }" -> 2.7.1--pl526_5 # e.g. "biocontainers/biocontainers:v1.2.0_cv1' }" -> v1.2.0_cv1 - match = re.search(r"(?:[/])?(?::)?([A-Za-z\d\-_.]+)['\"]", l) + match = re.search(r"(?:[/])?(?::)?([A-Za-z\d\-_.]+)'", l) if match is not None: docker_tag = match.group(1) self.passed.append(("docker_tag", f"Found docker tag: {docker_tag}", self.main_nf)) else: self.failed.append(("docker_tag", "Unable to parse docker tag", self.main_nf)) docker_tag = None + url = urlparse(l.split("'")[0]) + # lint double quotes + if l.count('"') > 2: + self.failed.append( + ("container_links", "Too many double quotes found when specifying docker container", self.main_nf) + ) + # lint double quotes + if l.startswith("container"): + if l.count('"') > 2: + self.failed.append( + ("container_links", "Too many double quotes found when specifying containers", self.main_nf) + ) + # lint more than one container in the same line + if ("https://containers" in l or "https://depot" in l) and ("biocontainers/" in l or "quay.io/" in l): + self.warned.append( + ( + "container_links", + "Docker and Singularity containers specified in the same line. Only first one checked.", + self.main_nf, + ) + ) + # Try to connect to container URLs + if url is None: + continue + try: + response = requests.head( + "https://" + urlunparse(url) if not url.scheme == "https" else urlunparse(url), + stream=True, + allow_redirects=True, + ) + log.debug( + f"Connected to URL: {'https://' + urlunparse(url) if not url.scheme == 'https' else urlunparse(url)}, " + f"status_code: {response.status_code}" + ) + except (requests.exceptions.RequestException, sqlite3.InterfaceError) as e: + log.debug(f"Unable to connect to url '{urlunparse(url)}' due to error: {e}") + self.failed.append(("container_links", "Unable to connect to container URL", self.main_nf)) + continue + if response.status_code != 200: + self.failed.append(("container_links", "Unable to connect to container URL", self.main_nf)) # Check that all bioconda packages have build numbers # Also check for newer versions @@ -391,7 +442,7 @@ def _parse_output(self, line): return output -def _is_empty(self, line): +def _is_empty(line): """Check whether a line is empty or a comment""" empty = False if line.strip().startswith("//"): @@ -422,7 +473,7 @@ def _fix_module_version(self, current_version, latest_version, singularity_tag, build_type = _container_type(l) if build_type == "bioconda": new_lines.append(re.sub(rf"{current_version}", f"{latest_version}", line)) - elif build_type == "singularity" or build_type == "docker": + elif build_type in ("singularity", "docker"): # Check that the new url is valid new_url = re.search( "(?:['\"])(.+)(?:['\"])", re.sub(rf"{singularity_tag}", f"{latest_version}--{build}", line) @@ -432,7 +483,8 @@ def _fix_module_version(self, current_version, latest_version, singularity_tag, "https://" + new_url if not new_url.startswith("https://") else new_url, stream=True ) log.debug( - f"Connected to URL: {'https://' + new_url if not new_url.startswith('https://') else new_url}, status_code: {response_new_container.status_code}" + f"Connected to URL: {'https://' + new_url if not new_url.startswith('https://') else new_url}, " + f"status_code: {response_new_container.status_code}" ) except (requests.exceptions.RequestException, sqlite3.InterfaceError) as e: log.debug(f"Unable to connect to url '{new_url}' due to error: {e}") diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 146796fde2..d6ec296999 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - from pathlib import Path import yaml @@ -28,7 +26,7 @@ def meta_yml(module_lint_object, module): if module.is_patched: lines = ModulesDiffer.try_apply_patch( module.module_name, - module_lint_object.modules_repo.fullname, + module_lint_object.modules_repo.repo_path, module.patch_path, Path(module.module_dir).relative_to(module.base_dir), reverse=True, diff --git a/nf_core/modules/lint/module_changes.py b/nf_core/modules/lint/module_changes.py index 74e5df064e..61b416e5f7 100644 --- a/nf_core/modules/lint/module_changes.py +++ b/nf_core/modules/lint/module_changes.py @@ -5,6 +5,7 @@ import tempfile from pathlib import Path +import nf_core.modules.modules_repo from nf_core.modules.modules_differ import ModulesDiffer @@ -29,7 +30,7 @@ def module_changes(module_lint_object, module): shutil.copytree(module.module_dir, tempdir) try: new_lines = ModulesDiffer.try_apply_patch( - module.module_name, module_lint_object.modules_repo.fullname, module.patch_path, tempdir, reverse=True + module.module_name, module_lint_object.modules_repo.repo_path, module.patch_path, tempdir, reverse=True ) for file, lines in new_lines.items(): with open(tempdir / file, "w") as fh: @@ -39,10 +40,12 @@ def module_changes(module_lint_object, module): return else: tempdir = module.module_dir + module.branch = module_lint_object.modules_json.get_component_branch( + "modules", module.module_name, module.repo_url, module.org + ) + modules_repo = nf_core.modules.modules_repo.ModulesRepo(remote_url=module.repo_url, branch=module.branch) - for f, same in module_lint_object.modules_repo.module_files_identical( - module.module_name, tempdir, module.git_sha - ).items(): + for f, same in modules_repo.module_files_identical(module.module_name, tempdir, module.git_sha).items(): if same: module.passed.append( ( diff --git a/nf_core/modules/lint/module_deprecations.py b/nf_core/modules/lint/module_deprecations.py index f7e8761c75..8ab5b68c2e 100644 --- a/nf_core/modules/lint/module_deprecations.py +++ b/nf_core/modules/lint/module_deprecations.py @@ -1,11 +1,10 @@ -#!/usr/bin/env python import logging import os log = logging.getLogger(__name__) -def module_deprecations(module_lint_object, module): +def module_deprecations(_, module): """ Check that the modules are up to the latest nf-core standard """ diff --git a/nf_core/modules/lint/module_patch.py b/nf_core/modules/lint/module_patch.py index e6656136d1..6d91b44816 100644 --- a/nf_core/modules/lint/module_patch.py +++ b/nf_core/modules/lint/module_patch.py @@ -163,7 +163,7 @@ def patch_reversible(module_lint_object, module, patch_path): try: ModulesDiffer.try_apply_patch( module.module_name, - module_lint_object.modules_repo.fullname, + module_lint_object.modules_repo.repo_path, patch_path, Path(module.module_dir).relative_to(module.base_dir), reverse=True, diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index b0c9fa0ee2..0b76acb944 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -9,7 +9,7 @@ log = logging.getLogger(__name__) -def module_tests(module_lint_object, module): +def module_tests(_, module): """ Lint the tests of a module in ``nf-core/modules`` diff --git a/nf_core/modules/lint/module_todos.py b/nf_core/modules/lint/module_todos.py index 90af44987e..b48725cd9e 100644 --- a/nf_core/modules/lint/module_todos.py +++ b/nf_core/modules/lint/module_todos.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python import logging from nf_core.lint.pipeline_todos import pipeline_todos @@ -6,7 +5,7 @@ log = logging.getLogger(__name__) -def module_todos(module_lint_object, module): +def module_todos(_, module): """ Look for TODO statements in the module files diff --git a/nf_core/modules/lint/module_version.py b/nf_core/modules/lint/module_version.py index 2312560365..1cf142e8eb 100644 --- a/nf_core/modules/lint/module_version.py +++ b/nf_core/modules/lint/module_version.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Verify that a module has a correct entry in the modules.json file """ @@ -7,8 +6,8 @@ from pathlib import Path import nf_core -import nf_core.modules.module_utils import nf_core.modules.modules_repo +import nf_core.modules.modules_utils log = logging.getLogger(__name__) @@ -23,11 +22,8 @@ def module_version(module_lint_object, module): """ modules_json_path = Path(module_lint_object.dir, "modules.json") - # Verify that a git_sha exists in the `modules.json` file for this module - version = module_lint_object.modules_json.get_module_version( - module.module_name, module_lint_object.modules_repo.fullname - ) + version = module_lint_object.modules_json.get_module_version(module.module_name, module.repo_url, module.org) if version is None: module.failed.append(("git_sha", "No git_sha entry in `modules.json`", modules_json_path)) return @@ -37,8 +33,12 @@ def module_version(module_lint_object, module): # Check whether a new version is available try: - modules_repo = nf_core.modules.modules_repo.ModulesRepo() - module_git_log = modules_repo.get_module_git_log(module.module_name) + module.branch = module_lint_object.modules_json.get_component_branch( + "modules", module.module_name, module.repo_url, module.org + ) + modules_repo = nf_core.modules.modules_repo.ModulesRepo(remote_url=module.repo_url, branch=module.branch) + + module_git_log = modules_repo.get_component_git_log(module.module_name, "modules") if version == next(module_git_log)["git_sha"]: module.passed.append(("module_version", "Module is the latest version", module.module_dir)) else: diff --git a/nf_core/modules/list.py b/nf_core/modules/list.py index f063f21151..c7dc943f9e 100644 --- a/nf_core/modules/list.py +++ b/nf_core/modules/list.py @@ -1,125 +1,10 @@ -import json import logging -import rich - -from .modules_command import ModuleCommand -from .modules_json import ModulesJson -from .modules_repo import ModulesRepo +from nf_core.components.list import ComponentList log = logging.getLogger(__name__) -class ModuleList(ModuleCommand): +class ModuleList(ComponentList): def __init__(self, pipeline_dir, remote=True, remote_url=None, branch=None, no_pull=False): - super().__init__(pipeline_dir, remote_url, branch, no_pull) - self.remote = remote - - def list_modules(self, keywords=None, print_json=False): - """ - Get available module names from GitHub tree for repo - and print as list to stdout - """ - - # Initialise rich table - table = rich.table.Table() - table.add_column("Module Name") - modules = [] - - if keywords is None: - keywords = [] - - def pattern_msg(keywords): - if len(keywords) == 0: - return "" - if len(keywords) == 1: - return f" matching pattern '{keywords[0]}'" - else: - quoted_keywords = (f"'{key}'" for key in keywords) - return f" matching patterns {', '.join(quoted_keywords)}" - - # No pipeline given - show all remote - if self.remote: - - # Filter the modules by keywords - modules = [mod for mod in self.modules_repo.get_avail_modules() if all(k in mod for k in keywords)] - - # Nothing found - if len(modules) == 0: - log.info( - f"No available modules found in {self.modules_repo.fullname} ({self.modules_repo.branch})" - f"{pattern_msg(keywords)}" - ) - return "" - - for mod in sorted(modules): - table.add_row(mod) - - # We have a pipeline - list what's installed - else: - # Check whether pipelines is valid - try: - self.has_valid_directory() - except UserWarning as e: - log.error(e) - return "" - - # Verify that 'modules.json' is consistent with the installed modules - modules_json = ModulesJson(self.dir) - modules_json.check_up_to_date() - - # Filter by keywords - repos_with_mods = { - repo_name: [mod for mod in modules if all(k in mod for k in keywords)] - for repo_name, modules in modules_json.get_all_modules().items() - } - - # Nothing found - if sum(map(len, repos_with_mods)) == 0: - log.info(f"No nf-core modules found in '{self.dir}'{pattern_msg(keywords)}") - return "" - - table.add_column("Repository") - table.add_column("Version SHA") - table.add_column("Message") - table.add_column("Date") - - # Load 'modules.json' - modules_json = modules_json.modules_json - - for repo_name, modules in sorted(repos_with_mods.items()): - repo_entry = modules_json["repos"].get(repo_name, {}) - for module in sorted(modules): - repo_modules = repo_entry.get("modules") - module_entry = repo_modules.get(module) - - if module_entry: - version_sha = module_entry["git_sha"] - try: - # pass repo_name to get info on modules even outside nf-core/modules - message, date = ModulesRepo( - remote_url=repo_entry["git_url"], - branch=module_entry["branch"], - ).get_commit_info(version_sha) - except LookupError as e: - log.warning(e) - date = "[red]Not Available" - message = "[red]Not Available" - else: - log.warning(f"Commit SHA for module '{repo_name}/{module}' is missing from 'modules.json'") - version_sha = "[red]Not Available" - date = "[red]Not Available" - message = "[red]Not Available" - table.add_row(module, repo_name, version_sha, message, date) - - if print_json: - return json.dumps(modules, sort_keys=True, indent=4) - - if self.remote: - log.info( - f"Modules available from {self.modules_repo.fullname} ({self.modules_repo.branch})" - f"{pattern_msg(keywords)}:\n" - ) - else: - log.info(f"Modules installed in '{self.dir}'{pattern_msg(keywords)}:\n") - return table + super().__init__("modules", pipeline_dir, remote, remote_url, branch, no_pull) diff --git a/nf_core/modules/module_utils.py b/nf_core/modules/module_utils.py deleted file mode 100644 index 144f7ce3d4..0000000000 --- a/nf_core/modules/module_utils.py +++ /dev/null @@ -1,203 +0,0 @@ -import logging -import os -import urllib -from pathlib import Path - -import questionary -import rich - -import nf_core.utils - -from .nfcore_module import NFCoreModule - -log = logging.getLogger(__name__) - - -class ModuleException(Exception): - """Exception raised when there was an error with module commands""" - - pass - - -def path_from_remote(remote_url): - """ - Extracts the path from the remote URL - See https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-clone.html#URLS for the possible URL patterns - """ - # Check whether we have a https or ssh url - if remote_url.startswith("https"): - path = urllib.parse.urlparse(remote_url) - path = path.path - # Remove the intial '/' - path = path[1:] - path = os.path.splitext(path)[0] - else: - # Remove the initial `git@`` - path = remote_url.split("@") - path = path[-1] if len(path) > 1 else path[0] - path = urllib.parse.urlparse(path) - path = path.path - path = os.path.splitext(path)[0] - return path - - -def get_installed_modules(dir, repo_type="modules"): - """ - Make a list of all modules installed in this repository - - Returns a tuple of two lists, one for local modules - and one for nf-core modules. The local modules are represented - as direct filepaths to the module '.nf' file. - Nf-core module are returned as file paths to the module directories. - In case the module contains several tools, one path to each tool directory - is returned. - - returns (local_modules, nfcore_modules) - """ - # initialize lists - local_modules = [] - nfcore_modules = [] - local_modules_dir = None - nfcore_modules_dir = os.path.join(dir, "modules", "nf-core", "modules") - - # Get local modules - if repo_type == "pipeline": - local_modules_dir = os.path.join(dir, "modules", "local", "process") - - # Filter local modules - if os.path.exists(local_modules_dir): - local_modules = os.listdir(local_modules_dir) - local_modules = sorted([x for x in local_modules if x.endswith(".nf")]) - - # nf-core/modules - if repo_type == "modules": - nfcore_modules_dir = os.path.join(dir, "modules") - - # Get nf-core modules - if os.path.exists(nfcore_modules_dir): - for m in sorted([m for m in os.listdir(nfcore_modules_dir) if not m == "lib"]): - if not os.path.isdir(os.path.join(nfcore_modules_dir, m)): - raise ModuleException( - f"File found in '{nfcore_modules_dir}': '{m}'! This directory should only contain module directories." - ) - m_content = os.listdir(os.path.join(nfcore_modules_dir, m)) - # Not a module, but contains sub-modules - if not "main.nf" in m_content: - for tool in m_content: - nfcore_modules.append(os.path.join(m, tool)) - else: - nfcore_modules.append(m) - - # Make full (relative) file paths and create NFCoreModule objects - local_modules = [os.path.join(local_modules_dir, m) for m in local_modules] - nfcore_modules = [ - NFCoreModule(m, "nf-core/modules", Path(nfcore_modules_dir, m), repo_type=repo_type, base_dir=Path(dir)) - for m in nfcore_modules - ] - - return local_modules, nfcore_modules - - -def get_repo_type(dir, repo_type=None, use_prompt=True): - """ - Determine whether this is a pipeline repository or a clone of - nf-core/modules - """ - # Verify that the pipeline dir exists - if dir is None or not os.path.exists(dir): - raise UserWarning(f"Could not find directory: {dir}") - - # Try to find the root directory - base_dir = os.path.abspath(dir) - config_path_yml = os.path.join(base_dir, ".nf-core.yml") - config_path_yaml = os.path.join(base_dir, ".nf-core.yaml") - while ( - not os.path.exists(config_path_yml) - and not os.path.exists(config_path_yaml) - and base_dir != os.path.dirname(base_dir) - ): - base_dir = os.path.dirname(base_dir) - config_path_yml = os.path.join(base_dir, ".nf-core.yml") - config_path_yaml = os.path.join(base_dir, ".nf-core.yaml") - # Reset dir if we found the config file (will be an absolute path) - if os.path.exists(config_path_yml) or os.path.exists(config_path_yaml): - dir = base_dir - - # Figure out the repository type from the .nf-core.yml config file if we can - tools_config = nf_core.utils.load_tools_config(dir) - repo_type = tools_config.get("repository_type", None) - - # If not set, prompt the user - if not repo_type and use_prompt: - log.warning("Can't find a '.nf-core.yml' file that defines 'repository_type'") - repo_type = questionary.select( - "Is this repository an nf-core pipeline or a fork of nf-core/modules?", - choices=[ - {"name": "Pipeline", "value": "pipeline"}, - {"name": "nf-core/modules", "value": "modules"}, - ], - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - - # Save the choice in the config file - log.info("To avoid this prompt in the future, add the 'repository_type' key to a root '.nf-core.yml' file.") - if rich.prompt.Confirm.ask("[bold][blue]?[/] Would you like me to add this config now?", default=True): - with open(os.path.join(dir, ".nf-core.yml"), "a+") as fh: - fh.write(f"repository_type: {repo_type}\n") - log.info("Config added to '.nf-core.yml'") - - # Not set and not allowed to ask - elif not repo_type: - raise UserWarning("Repository type could not be established") - - # Check if it's a valid answer - if not repo_type in ["pipeline", "modules"]: - raise UserWarning(f"Invalid repository type: '{repo_type}'") - - # It was set on the command line, return what we were given - return [dir, repo_type] - - -def prompt_module_version_sha(module, modules_repo, installed_sha=None): - """ - Creates an interactive questionary prompt for selecting the module version - Args: - module (str): Module name - modules_repo (ModulesRepo): Modules repo the module originate in - installed_sha (str): Optional extra argument to highlight the current installed version - - Returns: - git_sha (str): The selected version of the module - """ - older_commits_choice = questionary.Choice( - title=[("fg:ansiyellow", "older commits"), ("class:choice-default", "")], value="" - ) - git_sha = "" - page_nbr = 1 - - all_commits = modules_repo.get_module_git_log(module) - next_page_commits = [next(all_commits, None) for _ in range(10)] - next_page_commits = [commit for commit in next_page_commits if commit is not None] - - while git_sha == "": - commits = next_page_commits - next_page_commits = [next(all_commits, None) for _ in range(10)] - next_page_commits = [commit for commit in next_page_commits if commit is not None] - if all(commit is None for commit in next_page_commits): - next_page_commits = None - - choices = [] - for title, sha in map(lambda commit: (commit["trunc_message"], commit["git_sha"]), commits): - display_color = "fg:ansiblue" if sha != installed_sha else "fg:ansired" - message = f"{title} {sha}" - if installed_sha == sha: - message += " (installed version)" - commit_display = [(display_color, message), ("class:choice-default", "")] - choices.append(questionary.Choice(title=commit_display, value=sha)) - if next_page_commits is not None: - choices += [older_commits_choice] - git_sha = questionary.select( - f"Select '{module}' commit:", choices=choices, style=nf_core.utils.nfcore_question_style - ).unsafe_ask() - page_nbr += 1 - return git_sha diff --git a/nf_core/modules/modules_command.py b/nf_core/modules/modules_command.py deleted file mode 100644 index dbfd1ff6dd..0000000000 --- a/nf_core/modules/modules_command.py +++ /dev/null @@ -1,146 +0,0 @@ -import logging -import os -import shutil -from pathlib import Path - -import yaml - -import nf_core.modules.module_utils -import nf_core.utils - -from .modules_json import ModulesJson -from .modules_repo import ModulesRepo - -log = logging.getLogger(__name__) - - -class ModuleCommand: - """ - Base class for the 'nf-core modules' commands - """ - - def __init__(self, dir, remote_url=None, branch=None, no_pull=False): - """ - Initialise the ModulesCommand object - """ - self.modules_repo = ModulesRepo(remote_url, branch, no_pull) - self.dir = dir - try: - if self.dir: - self.dir, self.repo_type = nf_core.modules.module_utils.get_repo_type(self.dir) - else: - self.repo_type = None - except LookupError as e: - raise UserWarning(e) - - def get_modules_clone_modules(self): - """ - Get the modules available in a clone of nf-core/modules - """ - module_base_path = Path(self.dir, "modules") - return [ - str(Path(dir).relative_to(module_base_path)) - for dir, _, files in os.walk(module_base_path) - if "main.nf" in files - ] - - def get_local_modules(self): - """ - Get the local modules in a pipeline - """ - local_module_dir = Path(self.dir, "modules", "local") - return [str(path.relative_to(local_module_dir)) for path in local_module_dir.iterdir() if path.suffix == ".nf"] - - def has_valid_directory(self): - """Check that we were given a pipeline or clone of nf-core/modules""" - if self.repo_type == "modules": - return True - if self.dir is None or not os.path.exists(self.dir): - log.error(f"Could not find pipeline: {self.dir}") - return False - main_nf = os.path.join(self.dir, "main.nf") - nf_config = os.path.join(self.dir, "nextflow.config") - if not os.path.exists(main_nf) and not os.path.exists(nf_config): - raise UserWarning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.dir}'") - return True - - def has_modules_file(self): - """Checks whether a module.json file has been created and creates one if it is missing""" - modules_json_path = os.path.join(self.dir, "modules.json") - if not os.path.exists(modules_json_path): - log.info("Creating missing 'module.json' file.") - ModulesJson(self.dir).create() - - def clear_module_dir(self, module_name, module_dir): - """Removes all files in the module directory""" - try: - shutil.rmtree(module_dir) - # Try cleaning up empty parent if tool/subtool and tool/ is empty - if module_name.count("/") > 0: - parent_dir = os.path.dirname(module_dir) - try: - os.rmdir(parent_dir) - except OSError: - log.debug(f"Parent directory not empty: '{parent_dir}'") - else: - log.debug(f"Deleted orphan tool directory: '{parent_dir}'") - log.debug(f"Successfully removed {module_name} module") - return True - except OSError as e: - log.error(f"Could not remove module: {e}") - return False - - def modules_from_repo(self, repo_name): - """ - Gets the modules installed from a certain repository - - Args: - repo_name (str): The name of the repository - - Returns: - [str]: The names of the modules - """ - repo_dir = Path(self.dir, "modules", repo_name) - if not repo_dir.exists(): - raise LookupError(f"Nothing installed from {repo_name} in pipeline") - - return [ - str(Path(dir_path).relative_to(repo_dir)) for dir_path, _, files in os.walk(repo_dir) if "main.nf" in files - ] - - def install_module_files(self, module_name, module_version, modules_repo, install_dir): - """ - Installs a module into the given directory - - Args: - module_name (str): The name of the module - module_versioN (str): Git SHA for the version of the module to be installed - modules_repo (ModulesRepo): A correctly configured ModulesRepo object - install_dir (str): The path to where the module should be installed (should be the 'modules/' dir of the pipeline) - - Returns: - (bool): Whether the operation was successful of not - """ - return modules_repo.install_module(module_name, install_dir, module_version) - - def load_lint_config(self): - """Parse a pipeline lint config file. - - Look for a file called either `.nf-core-lint.yml` or - `.nf-core-lint.yaml` in the pipeline root directory and parse it. - (`.yml` takes precedence). - - Add parsed config to the `self.lint_config` class attribute. - """ - config_fn = os.path.join(self.dir, ".nf-core-lint.yml") - - # Pick up the file if it's .yaml instead of .yml - if not os.path.isfile(config_fn): - config_fn = os.path.join(self.dir, ".nf-core-lint.yaml") - - # Load the YAML - try: - with open(config_fn, "r") as fh: - self.lint_config = yaml.safe_load(fh) - except FileNotFoundError: - log.debug(f"No lint config file found: {config_fn}") diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index cc919c722a..efce3868e5 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -124,7 +124,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d def write_diff_file( diff_path, module, - repo_name, + repo_path, from_dir, to_dir, current_version=None, @@ -140,7 +140,7 @@ def write_diff_file( Args: diff_path (str | Path): The path to the file that should be appended module (str): The module name - repo_name (str): The name of the repo where the module resides + repo_path (str): The name of the repo where the module resides from_dir (str | Path): The directory containing the old module files to_dir (str | Path): The directory containing the new module files diffs (dict[str, (ModulesDiffer.DiffEnum, str)]): A dictionary containing @@ -167,12 +167,12 @@ def write_diff_file( with open(diff_path, file_action) as fh: if current_version is not None and new_version is not None: fh.write( - f"Changes in module '{Path(repo_name, module)}' between" + f"Changes in module '{Path(repo_path, module)}' between" f" ({current_version}) and" f" ({new_version})\n" ) else: - fh.write(f"Changes in module '{Path(repo_name, module)}'\n") + fh.write(f"Changes in module '{Path(repo_path, module)}'\n") for _, (diff_status, diff) in diffs.items(): if diff_status != ModulesDiffer.DiffEnum.UNCHANGED: @@ -219,14 +219,14 @@ def append_modules_json_diff(diff_path, old_modules_json, new_modules_json, modu @staticmethod def print_diff( - module, repo_name, from_dir, to_dir, current_version=None, new_version=None, dsp_from_dir=None, dsp_to_dir=None + module, repo_path, from_dir, to_dir, current_version=None, new_version=None, dsp_from_dir=None, dsp_to_dir=None ): """ Prints the diffs between two module versions to the terminal Args: module (str): The module name - repo_name (str): The name of the repo where the module resides + repo_path (str): The name of the repo where the module resides from_dir (str | Path): The directory containing the old module files to_dir (str | Path): The directory containing the new module files module_dir (str): The path to the current installation of the module @@ -246,10 +246,10 @@ def print_diff( console = Console(force_terminal=nf_core.utils.rich_force_colors()) if current_version is not None and new_version is not None: log.info( - f"Changes in module '{Path(repo_name, module)}' between" f" ({current_version}) and" f" ({new_version})" + f"Changes in module '{Path(repo_path, module)}' between" f" ({current_version}) and" f" ({new_version})" ) else: - log.info(f"Changes in module '{Path(repo_name, module)}'") + log.info(f"Changes in module '{Path(repo_path, module)}'") for file, (diff_status, diff) in diffs.items(): if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: @@ -423,13 +423,13 @@ def try_apply_single_patch(file_lines, patch, reverse=False): return patched_new_lines @staticmethod - def try_apply_patch(module, repo_name, patch_path, module_dir, reverse=False): + def try_apply_patch(module, repo_path, patch_path, module_dir, reverse=False): """ Try applying a full patch file to a module Args: module (str): Name of the module - repo_name (str): Name of the repository where the module resides + repo_path (str): Name of the repository where the module resides patch_path (str): The absolute path to the patch file to be applied module_dir (Path): The directory containing the module @@ -440,7 +440,7 @@ def try_apply_patch(module, repo_name, patch_path, module_dir, reverse=False): Raises: LookupError: If the patch application fails in a file """ - module_relpath = Path("modules", repo_name, module) + module_relpath = Path("modules", repo_path, module) patches = ModulesDiffer.per_file_patch(patch_path) new_files = {} for file, patch in patches.items(): diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 005baa84a5..431ad1d657 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -11,9 +11,14 @@ import questionary from git.exc import GitCommandError -import nf_core.modules.module_utils -import nf_core.modules.modules_repo import nf_core.utils +from nf_core.components.components_utils import get_components_to_install +from nf_core.lint_utils import dump_json_with_prettier +from nf_core.modules.modules_repo import ( + NF_CORE_MODULES_NAME, + NF_CORE_MODULES_REMOTE, + ModulesRepo, +) from .modules_differ import ModulesDiffer @@ -34,12 +39,24 @@ def __init__(self, pipeline_dir): """ self.dir = pipeline_dir self.modules_dir = Path(self.dir, "modules") + self.subworkflows_dir = Path(self.dir, "subworkflows") + self.modules_json_path = Path(self.dir, "modules.json") self.modules_json = None self.pipeline_modules = None + self.pipeline_subworkflows = None + self.pipeline_components = None + + def __str__(self): + if self.modules_json is None: + self.load() + return json.dumps(self.modules_json, indent=4) + + def __repr__(self): + return self.__str__() def create(self): """ - Creates the modules.json file from the modules installed in the pipeline directory + Creates the modules.json file from the modules and subworkflows installed in the pipeline directory Raises: UserWarning: If the creation fails @@ -47,49 +64,73 @@ def create(self): pipeline_config = nf_core.utils.fetch_wf_config(self.dir) pipeline_name = pipeline_config.get("manifest.name", "") pipeline_url = pipeline_config.get("manifest.homePage", "") - modules_json = {"name": pipeline_name.strip("'"), "homePage": pipeline_url.strip("'"), "repos": {}} - modules_dir = Path(self.dir, "modules") + new_modules_json = {"name": pipeline_name.strip("'"), "homePage": pipeline_url.strip("'"), "repos": {}} - if not modules_dir.exists(): + if not self.modules_dir.exists(): raise UserWarning("Can't find a ./modules directory. Is this a DSL2 pipeline?") - repos, _ = self.get_pipeline_module_repositories(modules_dir) + # Get repositories + repos, _ = self.get_pipeline_module_repositories("modules", self.modules_dir) + + # Get all module/subworkflow names in the repos + repo_module_names = self.get_component_names_from_repo(repos, self.modules_dir) + repo_subworkflow_names = self.get_component_names_from_repo(repos, self.subworkflows_dir) + + # Add module/subworkflow info + for repo_url, module_names, install_dir in sorted(repo_module_names): + new_modules_json["repos"][repo_url] = {} + new_modules_json["repos"][repo_url]["modules"] = {} + new_modules_json["repos"][repo_url]["modules"][install_dir] = {} + new_modules_json["repos"][repo_url]["modules"][install_dir] = self.determine_branches_and_shas( + "modules", install_dir, repo_url, module_names + ) + for repo_url, subworkflow_names, install_dir in sorted(repo_subworkflow_names): + if repo_url not in new_modules_json["repos"]: # Don't overwrite the repo if it was already added by modules + new_modules_json["repos"][repo_url] = {} + new_modules_json["repos"][repo_url]["subworkflows"] = {} + new_modules_json["repos"][repo_url]["subworkflows"][install_dir] = {} + new_modules_json["repos"][repo_url]["subworkflows"][install_dir] = self.determine_branches_and_shas( + "subworkflows", install_dir, repo_url, subworkflow_names + ) + + # write the modules.json file and assign it to the object + self.modules_json = new_modules_json + self.dump() + + def get_component_names_from_repo(self, repos, directory): + """ + Get component names from repositories in a pipeline. + + Args: + repos (list): list of repository urls + directory (str): modules directory or subworkflows directory - # Get all module names in the repos - repo_module_names = [ - ( - repo_name, + Returns: + [(str),[(str),(str)]]: list of tuples with repository url, component names and install directory + """ + names = [] + for repo_url in repos: + modules_repo = ModulesRepo(repo_url) + components = ( + repo_url, [ - str(Path(dir_name).relative_to(modules_dir / repo_name)) - for dir_name, _, file_names in os.walk(modules_dir / repo_name) + str(Path(component_name).relative_to(directory / modules_repo.repo_path)) + for component_name, _, file_names in os.walk(directory / modules_repo.repo_path) if "main.nf" in file_names ], - repo_remote, - ) - for repo_name, repo_remote in repos.items() - ] - - for repo_name, module_names, remote_url in sorted(repo_module_names): - modules_json["repos"][repo_name] = {} - modules_json["repos"][repo_name]["git_url"] = remote_url - modules_json["repos"][repo_name]["modules"] = {} - modules_json["repos"][repo_name]["modules"] = self.determine_module_branches_and_shas( - repo_name, remote_url, module_names + modules_repo.repo_path, ) - # write the modules.json file and assign it to the object - modules_json_path = Path(self.dir, "modules.json") - with open(modules_json_path, "w") as fh: - json.dump(modules_json, fh, indent=4) - fh.write("\n") - self.modules_json = modules_json + names.append(components) + return names - def get_pipeline_module_repositories(self, modules_dir, repos=None): + def get_pipeline_module_repositories(self, component_type, directory, repos=None): """ - Finds all module repositories in the modules directory. - Ignores the local modules. + Finds all module repositories in the modules and subworkflows directory. + Ignores the local modules/subworkflows. Args: - modules_dir (Path): base directory for the module files + component_type (str): modules or subworkflows + directory (Path): base directory for the module files Returns repos ([ (str, str, str) ]), renamed_dirs (dict[Path, Path]): List of tuples of repo name, repo @@ -98,24 +139,22 @@ def get_pipeline_module_repositories(self, modules_dir, repos=None): """ if repos is None: repos = {} - # Check if there are any nf-core modules installed - if (modules_dir / nf_core.modules.modules_repo.NF_CORE_MODULES_NAME).exists(): - repos[ - nf_core.modules.modules_repo.NF_CORE_MODULES_NAME - ] = nf_core.modules.modules_repo.NF_CORE_MODULES_REMOTE + if (directory / NF_CORE_MODULES_NAME).exists() and NF_CORE_MODULES_REMOTE not in repos.keys(): + repos[NF_CORE_MODULES_REMOTE] = {} # The function might rename some directories, keep track of them renamed_dirs = {} # Check if there are any untracked repositories - dirs_not_covered = self.dir_tree_uncovered(modules_dir, [Path(name) for name in repos]) + dirs_not_covered = self.dir_tree_uncovered(directory, [Path(ModulesRepo(url).repo_path) for url in repos]) if len(dirs_not_covered) > 0: - log.info("Found custom module repositories when creating 'modules.json'") + log.info(f"Found custom {component_type[:-1]} repositories when creating 'modules.json'") # Loop until all directories in the base directory are covered by a remote while len(dirs_not_covered) > 0: log.info( - "The following director{s} in the modules directory are untracked: '{l}'".format( + "The following director{s} in the {t} directory are untracked: '{l}'".format( s="ies" if len(dirs_not_covered) > 0 else "y", - l="', '".join(str(dir.relative_to(modules_dir)) for dir in dirs_not_covered), + t=component_type, + l="', '".join(str(dir.relative_to(directory)) for dir in dirs_not_covered), ) ) nrepo_remote = questionary.text( @@ -133,8 +172,8 @@ def get_pipeline_module_repositories(self, modules_dir, repos=None): ).unsafe_ask() # Verify that there is a directory corresponding the remote - nrepo_name = nf_core.modules.module_utils.path_from_remote(nrepo_remote) - if not (modules_dir / nrepo_name).exists(): + nrepo_name = ModulesRepo(nrepo_remote).repo_path + if not (directory / nrepo_name).exists(): log.info( "The provided remote does not seem to correspond to a local directory. " "The directory structure should be the same as in the remote." @@ -144,26 +183,33 @@ def get_pipeline_module_repositories(self, modules_dir, repos=None): style=nf_core.utils.nfcore_question_style, ).unsafe_ask() if dir_name: - old_path = modules_dir / dir_name - new_path = modules_dir / nrepo_name + old_path = directory / dir_name + new_path = directory / nrepo_name old_path.rename(new_path) renamed_dirs[old_path] = new_path else: continue - repos[nrepo_name] = (nrepo_remote, "modules") - dirs_not_covered = self.dir_tree_uncovered(modules_dir, [Path(name) for name in repos]) + if nrepo_remote not in repos: + repos[nrepo_remote] = {} + if component_type not in repos[nrepo_remote]: + repos[nrepo_remote][component_type] = {} + repos[nrepo_remote][component_type][nrepo_name] = {} + dirs_not_covered = self.dir_tree_uncovered( + directory, [Path(name) for url in repos for name in repos[url][component_type]] + ) + return repos, renamed_dirs - def dir_tree_uncovered(self, modules_dir, repos): + def dir_tree_uncovered(self, components_directory, repos): """ - Does a BFS of the modules directory to look for directories that + Does a BFS of the modules/subworkflos directory to look for directories that are not tracked by a remote. The 'repos' argument contains the directories that are currently covered by remote, and it and its subdirectories are therefore ignore. Args: - module_dir (Path): Base path of modules in pipeline + components_directory (Path): Base path of modules or subworkflows in pipeline repos ([ Path ]): List of repos that are covered by a remote Returns: @@ -171,14 +217,14 @@ def dir_tree_uncovered(self, modules_dir, repos): """ # Initialise the FIFO queue. Note that we assume the directory to be correctly # configured, i.e. no files etc. - fifo = [subdir for subdir in modules_dir.iterdir() if subdir.stem != "local"] + fifo = [subdir for subdir in components_directory.iterdir() if subdir.stem != "local"] depth = 1 dirs_not_covered = [] while len(fifo) > 0: temp_queue = [] repos_at_level = {Path(*repo.parts[:depth]): len(repo.parts) for repo in repos} for directory in fifo: - rel_dir = directory.relative_to(modules_dir) + rel_dir = directory.relative_to(components_directory) if rel_dir in repos_at_level.keys(): # Go the next depth if this directory is not one of the repos if depth < repos_at_level[rel_dir]: @@ -190,56 +236,70 @@ def dir_tree_uncovered(self, modules_dir, repos): depth += 1 return dirs_not_covered - def determine_module_branches_and_shas(self, repo_name, remote_url, modules): + def determine_branches_and_shas(self, component_type, install_dir, remote_url, components): """ - Determines what branch and commit sha each module in the pipeline belong to + Determines what branch and commit sha each module/subworkflow in the pipeline belongs to - Assumes all modules are installed from the default branch. If it fails to find the - module in the default branch, it prompts the user with the available branches + Assumes all modules/subworkflows are installed from the default branch. If it fails to find the + module/subworkflow in the default branch, it prompts the user with the available branches Args: - repo_name (str): The name of the module repository + install_dir (str): The name of the directory inside modules or subworkflows where components are installed remote_url (str): The url to the remote repository - modules ([str]): List of names of installed modules from the repository + components ([str]): List of names of installed modules/subworkflows from the repository Returns: - (dict[str, dict[str, str]]): The module.json entries for the modules + (dict[str, dict[str, str]]): The module.json entries for the modules/subworkflows from the repository """ - default_modules_repo = nf_core.modules.modules_repo.ModulesRepo(remote_url=remote_url) - repo_path = self.modules_dir / repo_name + default_modules_repo = ModulesRepo(remote_url=remote_url) + if component_type == "modules": + repo_path = self.modules_dir / install_dir + elif component_type == "subworkflows": + repo_path = self.subworkflows_dir / install_dir # Get the branches present in the repository, as well as the default branch - available_branches = nf_core.modules.modules_repo.ModulesRepo.get_remote_branches(remote_url) + available_branches = ModulesRepo.get_remote_branches(remote_url) sb_local = [] - dead_modules = [] + dead_components = [] repo_entry = {} - for module in sorted(modules): + for component in sorted(components): modules_repo = default_modules_repo - module_path = repo_path / module + component_path = repo_path / component correct_commit_sha = None tried_branches = {default_modules_repo.branch} found_sha = False while True: - # If the module is patched - patch_file = module_path / f"{module}.diff" + # If the module/subworkflow is patched + patch_file = component_path / f"{component}.diff" if patch_file.is_file(): - temp_module_dir = self.try_apply_patch_reverse(module, repo_name, patch_file, module_path) - correct_commit_sha = self.find_correct_commit_sha(module, temp_module_dir, modules_repo) + temp_module_dir = self.try_apply_patch_reverse(component, install_dir, patch_file, component_path) + correct_commit_sha = self.find_correct_commit_sha( + component_type, component, temp_module_dir, modules_repo + ) else: - correct_commit_sha = self.find_correct_commit_sha(module, module_path, modules_repo) + correct_commit_sha = self.find_correct_commit_sha( + component_type, component, component_path, modules_repo + ) + if correct_commit_sha is None: + # Check in the old path + correct_commit_sha = self.find_correct_commit_sha( + component_type, component, repo_path / component_type / component, modules_repo + ) if correct_commit_sha is None: - log.info(f"Was unable to find matching module files in the {modules_repo.branch} branch.") - choices = [{"name": "No", "value": None}] + [ + log.info( + f"Was unable to find matching {component_type[:-1]} files in the {modules_repo.branch} branch." + ) + choices = [{"name": "No", "value": False}] + [ {"name": branch, "value": branch} for branch in (available_branches - tried_branches) ] branch = questionary.select( - "Was the modules installed from a different branch in the remote?", + f"Was the {component_type[:-1]} '{component}' installed from a different branch in the remote?\nSelect 'No' for a local {component_type[:-1]}", choices=choices, style=nf_core.utils.nfcore_question_style, ).unsafe_ask() - if branch is None: + if not branch: action = questionary.select( - f"Module is untracked '{module}'. Please select what action to take", + f"{component_type[:-1].title()} is untracked '{component}'. Please select what action to take", choices=[ {"name": "Move the directory to 'local'", "value": 0}, {"name": "Remove the files", "value": 1}, @@ -247,117 +307,154 @@ def determine_module_branches_and_shas(self, repo_name, remote_url, modules): style=nf_core.utils.nfcore_question_style, ).unsafe_ask() if action == 0: - sb_local.append(module) + sb_local.append(component) else: - dead_modules.append(module) + dead_components.append(component) break # Create a new modules repo with the selected branch, and retry find the sha - modules_repo = nf_core.modules.modules_repo.ModulesRepo( - remote_url=remote_url, branch=branch, no_pull=True, hide_progress=True - ) + modules_repo = ModulesRepo(remote_url=remote_url, branch=branch, no_pull=True, hide_progress=True) else: found_sha = True break if found_sha: - repo_entry[module] = {"branch": modules_repo.branch, "git_sha": correct_commit_sha} + repo_entry[component] = { + "branch": modules_repo.branch, + "git_sha": correct_commit_sha, + "installed_by": [component_type], + } - # Clean up the modules we were unable to find the sha for - for module in sb_local: - log.debug(f"Moving module '{Path(repo_name, module)}' to 'local' directory") - self.move_module_to_local(module, repo_name) + # Clean up the modules/subworkflows we were unable to find the sha for + for component in sb_local: + log.debug(f"Moving {component_type[:-1]} '{Path(install_dir, component)}' to 'local' directory") + self.move_component_to_local(component_type, component, install_dir) - for module in dead_modules: - log.debug(f"Removing module {Path(repo_name, module)}'") - shutil.rmtree(repo_path / module) + for component in dead_components: + log.debug(f"Removing {component_type[:-1]} {Path(install_dir, component)}'") + shutil.rmtree(repo_path / component) return repo_entry - def find_correct_commit_sha(self, module_name, module_path, modules_repo): + def find_correct_commit_sha(self, component_type, component_name, component_path, modules_repo): """ Returns the SHA for the latest commit where the local files are identical to the remote files Args: - module_name (str): Name of module - module_path (str): Path to module in local repo - module_repo (str): Remote repo for module + component_type (str): modules or subworkflows + component_name (str): Name of module/subowrkflow + component_path (str): Path to module/subworkflow in local repo + modules_repo (str): Remote repo for module/subworkflow Returns: commit_sha (str): The latest commit SHA where local files are identical to remote files, or None if no commit is found """ - # Find the correct commit SHA for the local module files. - # We iterate over the commit history for the module until we find + # Find the correct commit SHA for the local module/subworkflow files. + # We iterate over the commit history for the module/subworkflow until we find # a revision that matches the file contents - commit_shas = (commit["git_sha"] for commit in modules_repo.get_module_git_log(module_name, depth=1000)) + commit_shas = ( + commit["git_sha"] + for commit in modules_repo.get_component_git_log(component_name, component_type, depth=1000) + ) for commit_sha in commit_shas: - if all(modules_repo.module_files_identical(module_name, module_path, commit_sha).values()): + if all(modules_repo.module_files_identical(component_name, component_path, commit_sha).values()): return commit_sha return None - def move_module_to_local(self, module, repo_name): + def move_component_to_local(self, component_type, component, repo_name): """ - Move a module to the 'local' directory + Move a module/subworkflow to the 'local' directory Args: - module (str): The name of the modules + component (str): The name of the module/subworkflow repo_name (str): The name of the repository the module resides in """ - current_path = self.modules_dir / repo_name / module - local_modules_dir = self.modules_dir / "local" - if not local_modules_dir.exists(): - local_modules_dir.mkdir() - - to_name = module + if component_type == "modules": + directory = self.modules_dir + elif component_type == "subworkflows": + directory = self.subworkflows_dir + current_path = directory / repo_name / component + local_dir = directory / "local" + if not local_dir.exists(): + local_dir.mkdir() + + to_name = component # Check if there is already a subdirectory with the name - while (local_modules_dir / to_name).exists(): + while (local_dir / to_name).exists(): # Add a time suffix to the path to make it unique # (do it again and again if it didn't work out...) to_name += f"-{datetime.datetime.now().strftime('%y%m%d%H%M%S')}" - shutil.move(current_path, local_modules_dir / to_name) + shutil.move(current_path, local_dir / to_name) - def unsynced_modules(self): + def unsynced_components(self): """ - Compute the difference between the modules in the directory and the - modules in the 'modules.json' file. This is done by looking at all + Compute the difference between the modules/subworkflows in the directory and the + modules/subworkflows in the 'modules.json' file. This is done by looking at all directories containing a 'main.nf' file Returns: (untrack_dirs ([ Path ]), missing_installation (dict)): Directories that are not tracked - by the modules.json file, and modules in the modules.json where + by the modules.json file, and modules/subworkflows in the modules.json where the installation directory is missing """ + # Add all modules from modules.json to missing_installation missing_installation = copy.deepcopy(self.modules_json["repos"]) - dirs = [ + # Obtain the path of all installed modules + module_dirs = [ Path(dir_name).relative_to(self.modules_dir) for dir_name, _, file_names in os.walk(self.modules_dir) if "main.nf" in file_names and not str(Path(dir_name).relative_to(self.modules_dir)).startswith("local") ] + untracked_dirs_modules, missing_installation = self.parse_dirs(module_dirs, missing_installation, "modules") + + # Obtain the path of all installed subworkflows + subworkflow_dirs = [ + Path(dir_name).relative_to(self.subworkflows_dir) + for dir_name, _, file_names in os.walk(self.subworkflows_dir) + if "main.nf" in file_names + and not str(Path(dir_name).relative_to(self.subworkflows_dir)).startswith("local") + ] + untracked_dirs_subworkflows, missing_installation = self.parse_dirs( + subworkflow_dirs, missing_installation, "subworkflows" + ) + + return untracked_dirs_modules, untracked_dirs_subworkflows, missing_installation + + def parse_dirs(self, dirs, missing_installation, component_type): untracked_dirs = [] - for dir in dirs: - # Check if the modules directory exists - module_repo_name = None + for dir_ in dirs: + # Check if the module/subworkflows directory exists in modules.json + install_dir = dir_.parts[0] + component = str(Path(*dir_.parts[1:])) + component_in_file = False + git_url = None for repo in missing_installation: - if str(dir).startswith(repo + os.sep): - module_repo_name = repo - break - if module_repo_name is not None: - # If it does, check if the module is in the 'modules.json' file - module = str(dir.relative_to(module_repo_name)) - module_repo = missing_installation[module_repo_name] - - if module not in module_repo.get("modules", {}): - untracked_dirs.append(dir) - else: - # Check if the entry has a git sha and branch before removing - modules = module_repo["modules"] - if "git_sha" not in modules[module] or "branch" not in modules[module]: - self.determine_module_branches_and_shas( - module, module_repo["git_url"], module_repo["base_path"], [module] - ) - module_repo["modules"].pop(module) - if len(module_repo["modules"]) == 0: - missing_installation.pop(module_repo_name) + if component_type in missing_installation[repo]: + for dir_name in missing_installation[repo][component_type]: + if component in missing_installation[repo][component_type][dir_name]: + component_in_file = True + git_url = repo + break + if not component_in_file: + # If it is not, add it to the list of missing components + untracked_dirs.append(component) else: - # If it is not, add it to the list of missing modules - untracked_dirs.append(dir) + # If it does, remove the component from missing_installation + module_repo = missing_installation[git_url] + # Check if the entry has a git sha and branch before removing + components_dict = module_repo[component_type][install_dir] + if "git_sha" not in components_dict[component] or "branch" not in components_dict[component]: + self.determine_branches_and_shas( + component_type, component, git_url, module_repo["base_path"], [component] + ) + # Remove the module/subworkflow from modules/subworkflows without installation + module_repo[component_type][install_dir].pop(component) + if len(module_repo[component_type][install_dir]) == 0: + # If no modules/subworkflows with missing installation left, remove the install_dir from missing_installation + missing_installation[git_url][component_type].pop(install_dir) + if len(module_repo[component_type]) == 0: + # If no modules/subworkflows with missing installation left, remove the component_type from missing_installation + missing_installation[git_url].pop(component_type) + if len(module_repo) == 0: + # If no modules/subworkflows with missing installation left, remove the git_url from missing_installation + missing_installation.pop(git_url) return untracked_dirs, missing_installation @@ -368,28 +465,36 @@ def has_git_url_and_modules(self): Returns: (bool): True if they are found for all repos, False otherwise """ - for repo_entry in self.modules_json.get("repos", {}).values(): - if "git_url" not in repo_entry or "modules" not in repo_entry: - log.warning(f"modules.json entry {repo_entry} does not have a git_url or modules entry") + for repo_url, repo_entry in self.modules_json.get("repos", {}).items(): + if "modules" not in repo_entry: + if "subworkflows" in repo_entry: + continue + log.warning(f"modules.json entry {repo_entry} does not have a modules entry") return False elif ( - not isinstance(repo_entry["git_url"], str) - or repo_entry["git_url"] == "" + not isinstance(repo_url, str) + or repo_url == "" + or not ( + repo_url.startswith("http") + or repo_url.startswith("ftp") + or repo_url.startswith("ssh") + or repo_url.startswith("git") + ) or not isinstance(repo_entry["modules"], dict) or repo_entry["modules"] == {} ): - log.warning(f"modules.json entry {repo_entry} has non-string or empty entries for git_url or modules") + log.debug(f"modules.json entry {repo_entry} has non-string or empty entries for git_url or modules.") return False return True - def reinstall_repo(self, repo_name, remote_url, module_entries): + def reinstall_repo(self, install_dir, remote_url, module_entries): """ Reinstall modules from a repository Args: - repo_name (str): The name of the repository + install_dir (str): The name of directory where modules are installed remote_url (str): The git url of the remote repository - modules ([ dict[str, dict[str, str]] ]): Module entries with + module_entries ([ dict[str, dict[str, str]] ]): Module entries with branch and git sha info Returns: @@ -409,114 +514,96 @@ def reinstall_repo(self, repo_name, remote_url, module_entries): for branch, modules in branches_and_mods.items(): try: - modules_repo = nf_core.modules.modules_repo.ModulesRepo(remote_url=remote_url, branch=branch) + modules_repo = ModulesRepo(remote_url=remote_url, branch=branch) except LookupError as e: log.error(e) failed_to_install.extend(modules) for module, sha in modules: - if not modules_repo.install_module(module, (self.modules_dir / repo_name), sha): - log.warning(f"Could not install module '{Path(repo_name, module)}' - removing from modules.json") + if not modules_repo.install_component(module, self.modules_dir / install_dir, sha, "modules"): + log.warning( + f"Could not install module '{Path(self.modules_dir, install_dir, module)}' - removing from modules.json" + ) failed_to_install.append(module) return failed_to_install def check_up_to_date(self): """ - Checks whether the modules installed in the directory + Checks whether the modules and subworkflows installed in the directory are consistent with the entries in the 'modules.json' file and vice versa. - If a module has an entry in the 'modules.json' file but is missing in the directory, - we first try to reinstall the module from the remote and if that fails we remove the entry + If a module/subworkflow has an entry in the 'modules.json' file but is missing in the directory, + we first try to reinstall the module/subworkflow from the remote and if that fails we remove the entry in 'modules.json'. - If a module is installed but the entry in 'modules.json' is missing we iterate through + If a module/subworkflow is installed but the entry in 'modules.json' is missing we iterate through the commit log in the remote to try to determine the SHA. + + Check that we have the "installed_by" value in 'modules.json', otherwise add it. + Assume that the modules/subworkflows were installed by an nf-core command (don't track installed by subworkflows). """ try: self.load() if not self.has_git_url_and_modules(): raise UserWarning + # check that all "installed_by" entries are lists and not strings + # [these strings come from an older dev version, so this check can probably be removed in a future release] + for _, repo_entry in self.modules_json.get("repos", {}).items(): + for component_type in ["modules", "subworkflows"]: + if component_type in repo_entry: + for install_dir, install_dir_entry in repo_entry[component_type].items(): + for _, component in install_dir_entry.items(): + if "installed_by" in component and isinstance(component["installed_by"], str): + log.debug(f"Updating {component} in modules.json") + component["installed_by"] = [component["installed_by"]] except UserWarning: - log.info("The 'modules.json' file is not up to date. Recreating the 'module.json' file.") + log.info("The 'modules.json' file is not up to date. Recreating the 'modules.json' file.") self.create() - missing_from_modules_json, missing_installation = self.unsynced_modules() + # Get unsynced components + ( + modules_missing_from_modules_json, + subworkflows_missing_from_modules_json, + missing_installation, + ) = self.unsynced_components() - # If there are any modules left in 'modules.json' after all installed are removed, + # If there are any modules/subworkflows left in 'modules.json' after all installed are removed, # we try to reinstall them if len(missing_installation) > 0: - missing_but_in_mod_json = [ - f"'{repo}/{module}'" - for repo, contents in missing_installation.items() - for module in contents["modules"] - ] - log.info( - f"Reinstalling modules found in 'modules.json' but missing from directory: {', '.join(missing_but_in_mod_json)}" - ) - - remove_from_mod_json = {} - for repo, contents in missing_installation.items(): - module_entries = contents["modules"] - remote_url = contents["git_url"] - remove_from_mod_json[repo] = self.reinstall_repo(repo, remote_url, module_entries) - - # If the reinstall fails, we remove those entries in 'modules.json' - if sum(map(len, remove_from_mod_json.values())) > 0: - uninstallable_mods = [ - f"'{repo}/{module}'" for repo, modules in remove_from_mod_json.items() for module in modules - ] - if len(uninstallable_mods) == 1: - log.info(f"Was unable to reinstall {uninstallable_mods[0]}. Removing 'modules.json' entry") - else: - log.info( - f"Was unable to reinstall some modules. Removing 'modules.json' entries: {', '.join(uninstallable_mods)}" - ) - - for repo, module_entries in remove_from_mod_json.items(): - for module in module_entries: - self.modules_json["repos"][repo]["modules"].pop(module) - if len(self.modules_json["repos"][repo]["modules"]) == 0: - self.modules_json["repos"].pop(repo) - - # If some modules didn't have an entry in the 'modules.json' file + if "subworkflows" in [ + c_type for _, repo_content in missing_installation.items() for c_type in repo_content.keys() + ]: + self.resolve_missing_installation(missing_installation, "subworkflows") + if "modules" in [ + c_type for _, repo_content in missing_installation.items() for c_type in repo_content.keys() + ]: + self.resolve_missing_installation(missing_installation, "modules") + + # If some modules/subworkflows didn't have an entry in the 'modules.json' file # we try to determine the SHA from the commit log of the remote - if len(missing_from_modules_json) > 0: - format_missing = [f"'{dir}'" for dir in missing_from_modules_json] - if len(format_missing) == 1: - log.info(f"Recomputing commit SHA for module {format_missing[0]} which was missing from 'modules.json'") - else: - log.info( - f"Recomputing commit SHAs for modules which were missing from 'modules.json': {', '.join(format_missing)}" - ) - - # Get the remotes we are missing - tracked_repos = { - repo_name: (repo_entry["git_url"]) for repo_name, repo_entry in self.modules_json["repos"].items() - } - repos, _ = self.get_pipeline_module_repositories(self.modules_dir, tracked_repos) - - modules_with_repos = ( - (repo_name, str(dir.relative_to(repo_name))) - for dir in missing_from_modules_json - for repo_name in repos - if nf_core.utils.is_relative_to(dir, repo_name) - ) - - repos_with_modules = {} - for repo_name, module in modules_with_repos: - if repo_name not in repos_with_modules: - repos_with_modules[repo_name] = [] - repos_with_modules[repo_name].append(module) - - for repo_name, modules in repos_with_modules.items(): - remote_url = repos[repo_name] - repo_entry = self.determine_module_branches_and_shas(repo_name, remote_url, modules) - if repo_name in self.modules_json["repos"]: - self.modules_json["repos"][repo_name]["modules"].update(repo_entry) - else: - self.modules_json["repos"][repo_name] = { - "git_url": remote_url, - "modules": repo_entry, - } + if len(modules_missing_from_modules_json) > 0: + self.resolve_missing_from_modules_json(modules_missing_from_modules_json, "modules") + if len(subworkflows_missing_from_modules_json) > 0: + self.resolve_missing_from_modules_json(subworkflows_missing_from_modules_json, "subworkflows") + + # If the "installed_by" value is not present for modules/subworkflows, add it. + for repo, repo_content in self.modules_json["repos"].items(): + for component_type, dir_content in repo_content.items(): + for install_dir, installed_components in dir_content.items(): + for component, component_features in installed_components.items(): + if "installed_by" not in component_features: + self.modules_json["repos"][repo][component_type][install_dir][component]["installed_by"] = [ + component_type + ] + + # Recreate "installed_by" entry + original_pipeline_components = self.pipeline_components + self.pipeline_components = None + subworkflows_dict = self.get_all_components("subworkflows") + if subworkflows_dict: + for repo, subworkflows in subworkflows_dict.items(): + for org, subworkflow in subworkflows: + self.recreate_dependencies(repo, org, subworkflow) + self.pipeline_components = original_pipeline_components self.dump() @@ -529,97 +616,157 @@ def load(self): Raises: UserWarning: If the modules.json file is not found """ - modules_json_path = os.path.join(self.dir, "modules.json") try: - with open(modules_json_path, "r") as fh: - self.modules_json = json.load(fh) + with open(self.modules_json_path, "r") as fh: + try: + self.modules_json = json.load(fh) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{self.modules_json_path}' due to error {e}") + except FileNotFoundError: raise UserWarning("File 'modules.json' is missing") - def update(self, modules_repo, module_name, module_version, write_file=True): + def update( + self, + component_type, + modules_repo, + component_name, + component_version, + installed_by, + installed_by_log=None, + write_file=True, + ): """ - Updates the 'module.json' file with new module info + Updates the 'module.json' file with new module/subworkflow info Args: - modules_repo (ModulesRepo): A ModulesRepo object configured for the new module - module_name (str): Name of new module - module_version (str): git SHA for the new module entry + component_type (str): modules or subworkflows + modules_repo (ModulesRepo): A ModulesRepo object configured for the new module/subworkflow + component_name (str): Name of new module/subworkflow + component_version (str): git SHA for the new module/subworkflow entry + installed_by_log (list): previous tracing of installed_by that needs to be added to 'modules.json' write_file (bool): whether to write the updated modules.json to a file. + + Returns: + bool: True if the module/subworkflow was successfully added to the 'modules.json' file """ + if installed_by_log is None: + installed_by_log = [] + if self.modules_json is None: self.load() - repo_name = modules_repo.fullname + repo_name = modules_repo.repo_path remote_url = modules_repo.remote_url branch = modules_repo.branch - if repo_name not in self.modules_json["repos"]: - self.modules_json["repos"][repo_name] = {"modules": {}, "git_url": remote_url} - repo_modules_entry = self.modules_json["repos"][repo_name]["modules"] - if module_name not in repo_modules_entry: - repo_modules_entry[module_name] = {} - repo_modules_entry[module_name]["git_sha"] = module_version - repo_modules_entry[module_name]["branch"] = branch + if remote_url not in self.modules_json["repos"]: + self.modules_json["repos"][remote_url] = {component_type: {repo_name: {}}} + if component_type not in self.modules_json["repos"][remote_url]: + self.modules_json["repos"][remote_url][component_type] = {repo_name: {}} + repo_component_entry = self.modules_json["repos"][remote_url][component_type][repo_name] + if component_name not in repo_component_entry: + repo_component_entry[component_name] = {} + repo_component_entry[component_name]["git_sha"] = component_version + repo_component_entry[component_name]["branch"] = branch + try: + if installed_by not in repo_component_entry[component_name]["installed_by"] and installed_by is not None: + repo_component_entry[component_name]["installed_by"].append(installed_by) + except KeyError: + repo_component_entry[component_name]["installed_by"] = [installed_by] + finally: + new_installed_by = repo_component_entry[component_name]["installed_by"] + list(installed_by_log) + repo_component_entry[component_name]["installed_by"] = [*set(new_installed_by)] # Sort the 'modules.json' repo entries self.modules_json["repos"] = nf_core.utils.sort_dictionary(self.modules_json["repos"]) if write_file: self.dump() + return True - def remove_entry(self, module_name, repo_name): + def remove_entry(self, component_type, name, repo_url, install_dir, removed_by=None): """ Removes an entry from the 'modules.json' file. Args: - module_name (str): Name of the module to be removed - repo_name (str): Name of the repository containing the module + component_type (str): Type of component [modules, subworkflows] + name (str): Name of the component to be removed + repo_url (str): URL of the repository containing the component + install_dir (str): Name of the directory where components are installed + removed_by (str): Name of the component that wants to remove the component Returns: - (bool): True if the removal was successful, False otherwise + (bool): return True if the component was removed, False if it was not found or is still depended on """ + + if removed_by is None or removed_by == name: + removed_by = component_type if not self.modules_json: return False - if repo_name in self.modules_json.get("repos", {}): - repo_entry = self.modules_json["repos"][repo_name] - if module_name in repo_entry.get("modules", {}): - repo_entry["modules"].pop(module_name) + if repo_url in self.modules_json.get("repos", {}): + repo_entry = self.modules_json["repos"][repo_url] + if name in repo_entry[component_type].get(install_dir, {}): + if removed_by in repo_entry[component_type][install_dir][name]["installed_by"]: + self.modules_json["repos"][repo_url][component_type][install_dir][name]["installed_by"].remove( + removed_by + ) + # clean up empty entries + if len(repo_entry[component_type][install_dir][name]["installed_by"]) == 0: + self.modules_json["repos"][repo_url][component_type][install_dir].pop(name) + if len(repo_entry[component_type][install_dir]) == 0: + self.modules_json["repos"][repo_url].pop(component_type) + if len(repo_entry) == 0: + self.modules_json["repos"].pop(repo_url) + # write the updated modules.json file + self.dump() + return True + self.dump() + return False else: - log.warning(f"Module '{repo_name}/{module_name}' is missing from 'modules.json' file.") + log.warning( + f"{component_type[:-1].title()} '{install_dir}/{name}' is missing from 'modules.json' file." + ) return False - if len(repo_entry["modules"]) == 0: - self.modules_json["repos"].pop(repo_name) + else: - log.warning(f"Module '{repo_name}/{module_name}' is missing from 'modules.json' file.") + log.warning(f"{component_type[:-1].title()} '{install_dir}/{name}' is missing from 'modules.json' file.") return False - self.dump() - return True + return False - def add_patch_entry(self, module_name, repo_name, patch_filename, write_file=True): + def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, write_file=True): """ Adds (or replaces) the patch entry for a module """ if self.modules_json is None: self.load() - if repo_name not in self.modules_json["repos"]: - raise LookupError(f"Repo '{repo_name}' not present in 'modules.json'") - if module_name not in self.modules_json["repos"][repo_name]["modules"]: - raise LookupError(f"Module '{repo_name}/{module_name}' not present in 'modules.json'") - self.modules_json["repos"][repo_name]["modules"][module_name]["patch"] = str(patch_filename) + if repo_url not in self.modules_json["repos"]: + raise LookupError(f"Repo '{repo_url}' not present in 'modules.json'") + if module_name not in self.modules_json["repos"][repo_url]["modules"][install_dir]: + raise LookupError(f"Module '{install_dir}/{module_name}' not present in 'modules.json'") + self.modules_json["repos"][repo_url]["modules"][install_dir][module_name]["patch"] = str(patch_filename) if write_file: self.dump() - def get_patch_fn(self, module_name, repo_name): + def get_patch_fn(self, module_name, repo_url, install_dir): """ Get the patch filename of a module Args: module_name (str): The name of the module - repo_name (str): The name of the repository containing the module + repo_url (str): The URL of the repository containing the module + install_dir (str): The name of the directory where modules are installed Returns: (str): The patch filename for the module, None if not present """ if self.modules_json is None: self.load() - path = self.modules_json["repos"].get(repo_name, {}).get("modules").get(module_name, {}).get("patch") + path = ( + self.modules_json["repos"] + .get(repo_url, {}) + .get("modules") + .get(install_dir) + .get(module_name, {}) + .get("patch") + ) return Path(path) if path is not None else None def try_apply_patch_reverse(self, module, repo_name, patch_relpath, module_dir): @@ -670,18 +817,21 @@ def repo_present(self, repo_name): self.load() return repo_name in self.modules_json.get("repos", {}) - def module_present(self, module_name, repo_name): + def module_present(self, module_name, repo_url, install_dir): """ Checks if a module is present in the modules.json file Args: module_name (str): Name of the module - repo_name (str): Name of the repository + repo_url (str): URL of the repository + install_dir (str): Name of the directory where modules are installed Returns: (bool): Whether the module is present in the 'modules.json' file """ if self.modules_json is None: self.load() - return module_name in self.modules_json.get("repos", {}).get(repo_name, {}).get("modules", {}) + return module_name in self.modules_json.get("repos", {}).get(repo_url, {}).get("modules", {}).get( + install_dir, {} + ) def get_modules_json(self): """ @@ -694,13 +844,37 @@ def get_modules_json(self): self.load() return copy.deepcopy(self.modules_json) - def get_module_version(self, module_name, repo_name): + def get_component_version(self, component_type, component_name, repo_url, install_dir): + """ + Returns the version of a module or subworkflow + + Args: + component_name (str): Name of the module/subworkflow + repo_url (str): URL of the repository + install_dir (str): Name of the directory where modules/subworkflows are installed + + Returns: + (str): The git SHA of the module/subworkflow if it exists, None otherwise + """ + if self.modules_json is None: + self.load() + return ( + self.modules_json.get("repos", {}) + .get(repo_url, {}) + .get(component_type, {}) + .get(install_dir, {}) + .get(component_name, {}) + .get("git_sha", None) + ) + + def get_module_version(self, module_name, repo_url, install_dir): """ Returns the version of a module Args: module_name (str): Name of the module - repo_name (str): Name of the repository + repo_url (str): URL of the repository + install_dir (str): Name of the directory where modules are installed Returns: (str): The git SHA of the module if it exists, None otherwise @@ -709,59 +883,140 @@ def get_module_version(self, module_name, repo_name): self.load() return ( self.modules_json.get("repos", {}) - .get(repo_name, {}) + .get(repo_url, {}) .get("modules", {}) + .get(install_dir, {}) .get(module_name, {}) .get("git_sha", None) ) - def get_git_url(self, repo_name): + def get_subworkflow_version(self, subworkflow_name, repo_url, install_dir): """ - Returns the git url of a repo + Returns the version of a subworkflow Args: - repo_name (str): Name of the repository + subworkflow_name (str): Name of the module + repo_url (str): URL of the repository + install_dir (str): Name of the directory where subworkflows are installed Returns: - (str): The git url of the repository if it exists, None otherwise + (str): The git SHA of the subworkflow if it exists, None otherwise """ if self.modules_json is None: self.load() - return self.modules_json.get("repos", {}).get(repo_name, {}).get("git_url", None) + return ( + self.modules_json.get("repos", {}) + .get(repo_url, {}) + .get("subworkflows", {}) + .get(install_dir, {}) + .get(subworkflow_name, {}) + .get("git_sha", None) + ) - def get_all_modules(self): + def get_all_components(self, component_type): """ - Retrieves all pipeline modules that are reported in the modules.json + Retrieves all pipeline modules/subworkflows that are reported in the modules.json Returns: - (dict[str, [str]]): Dictionary indexed with the repo names, with a - list of modules as values + (dict[str, [(str, str)]]): Dictionary indexed with the repo urls, with a + list of tuples (component_dir, components) as values """ if self.modules_json is None: self.load() - if self.pipeline_modules is None: - self.pipeline_modules = {} + if self.pipeline_components is None: + self.pipeline_components = {} for repo, repo_entry in self.modules_json.get("repos", {}).items(): - if "modules" in repo_entry: - self.pipeline_modules[repo] = list(repo_entry["modules"]) + if component_type in repo_entry: + for dir, components in repo_entry[component_type].items(): + self.pipeline_components[repo] = [(dir, m) for m in components] + + return self.pipeline_components + + def get_dependent_components( + self, + component_type, + name, + repo_url, + install_dir, + dependent_components, + ): + """ + Retrieves all pipeline modules/subworkflows that are reported in the modules.json + as being installed by the given component - return self.pipeline_modules + Args: + component_type (str): Type of component [modules, subworkflows] + name (str): Name of the component to find dependencies for + repo_url (str): URL of the repository containing the components + install_dir (str): Name of the directory where components are installed - def get_module_branch(self, module, repo_name): + Returns: + (dict[str: str,]): Dictionary indexed with the component names, with component_type as value """ - Gets the branch from which the module was installed + + if self.modules_json is None: + self.load() + component_types = ["modules"] if component_type == "modules" else ["modules", "subworkflows"] + # Find all components that have an entry of install by of a given component, recursively call this function for subworkflows + for type in component_types: + try: + components = self.modules_json["repos"][repo_url][type][install_dir].items() + except KeyError as e: + # This exception will raise when there are only modules installed + log.debug(f"Trying to retrieve all {type}. There aren't {type} installed. Failed with error {e}") + continue + for component_name, component_entry in components: + if name in component_entry["installed_by"]: + dependent_components[component_name] = type + + return dependent_components + + def get_installed_by_entries(self, component_type, name): + """ + Retrieves all entries of installed_by for a given component + + Args: + component_type (str): Type of component [modules, subworkflows] + name (str): Name of the component to find dependencies for + + Returns: + (list): The list of installed_by entries + + """ + if self.modules_json is None: + self.load() + installed_by_entries = {} + for repo_url, repo_entry in self.modules_json.get("repos", {}).items(): + if component_type in repo_entry: + for install_dir, components in repo_entry[component_type].items(): + if name in components: + installed_by_entries = components[name]["installed_by"] + break + + return installed_by_entries + + def get_component_branch(self, component_type, component, repo_url, install_dir): + """ + Gets the branch from which the module/subworkflow was installed Returns: (str): The branch name Raises: - LookupError: If their is no branch entry in the `modules.json` + LookupError: If there is no branch entry in the `modules.json` """ if self.modules_json is None: self.load() - branch = self.modules_json["repos"].get(repo_name, {}).get("modules", {}).get(module, {}).get("branch") + branch = ( + self.modules_json["repos"] + .get(repo_url, {}) + .get(component_type, {}) + .get(install_dir, {}) + .get(component, {}) + .get("branch") + ) if branch is None: raise LookupError( - f"Could not find branch information for module '{Path(repo_name, module)}'." + f"Could not find branch information for component '{Path(install_dir, component)}'." f"Please remove the 'modules.json' and rerun the command to recreate it" ) return branch @@ -772,15 +1027,141 @@ def dump(self): """ # Sort the modules.json self.modules_json["repos"] = nf_core.utils.sort_dictionary(self.modules_json["repos"]) - modules_json_path = os.path.join(self.dir, "modules.json") - with open(modules_json_path, "w") as fh: - json.dump(self.modules_json, fh, indent=4) - fh.write("\n") + dump_json_with_prettier(self.modules_json_path, self.modules_json) + + def resolve_missing_installation(self, missing_installation, component_type): + missing_but_in_mod_json = [ + f"'{component_type}/{install_dir}/{component}'" + for repo_url, contents in missing_installation.items() + for install_dir, dir_contents in contents[component_type].items() + for component in dir_contents + ] + log.info( + f"Reinstalling {component_type} found in 'modules.json' but missing from directory: {', '.join(missing_but_in_mod_json)}" + ) - def __str__(self): - if self.modules_json is None: - self.load() - return json.dumps(self.modules_json, indent=4) + remove_from_mod_json = {} + for repo_url, contents in missing_installation.items(): + for install_dir, component_entries in contents[component_type].items(): + remove_from_mod_json[(repo_url, install_dir)] = self.reinstall_repo( + install_dir, repo_url, component_entries + ) - def __repr__(self): - return self.__str__() + # If the reinstall fails, we remove those entries in 'modules.json' + if sum(map(len, remove_from_mod_json.values())) > 0: + uninstallable_components = [ + f"'{install_dir}/{component}'" + for (repo_url, install_dir), components in remove_from_mod_json.items() + for component in components + ] + if len(uninstallable_components) == 1: + log.info(f"Was unable to reinstall {uninstallable_components[0]}. Removing 'modules.json' entry") + else: + log.info( + f"Was unable to reinstall some {component_type}. Removing 'modules.json' entries: {', '.join(uninstallable_components)}" + ) + + for (repo_url, install_dir), component_entries in remove_from_mod_json.items(): + for component in component_entries: + self.modules_json["repos"][repo_url][component_type][install_dir].pop(component) + if len(self.modules_json["repos"][repo_url][component_type][install_dir]) == 0: + self.modules_json["repos"].pop(repo_url) + + def resolve_missing_from_modules_json(self, missing_from_modules_json, component_type): + format_missing = [f"'{dir}'" for dir in missing_from_modules_json] + if len(format_missing) == 1: + log.info( + f"Recomputing commit SHA for {component_type[:-1]} {format_missing[0]} which was missing from 'modules.json'" + ) + else: + log.info( + f"Recomputing commit SHAs for {component_type} which were missing from 'modules.json': {', '.join(format_missing)}" + ) + + # Get the remotes we are missing + tracked_repos = {repo_url: (repo_entry) for repo_url, repo_entry in self.modules_json["repos"].items()} + repos, _ = self.get_pipeline_module_repositories(component_type, self.modules_dir, tracked_repos) + + # Get tuples of components that miss installation and their install directory + def components_with_repos(): + for dir in missing_from_modules_json: + for repo_url in repos: + modules_repo = ModulesRepo(repo_url) + paths_in_directory = [] + repo_url_path = Path( + self.modules_dir, + modules_repo.repo_path, + ) + for dir_name, _, _ in os.walk(repo_url_path): + if component_type == "modules": + if len(Path(dir).parts) > 1: # The module name is TOOL/SUBTOOL + paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) + pass + paths_in_directory.append(Path(dir_name).parts[-1]) + if dir in paths_in_directory: + yield (modules_repo.repo_path, dir) + + # Add all components into a dictionary with install directories + repos_with_components = {} + for install_dir, component in components_with_repos(): + if install_dir not in repos_with_components: + repos_with_components[install_dir] = [] + repos_with_components[install_dir].append(component) + + for install_dir, components in repos_with_components.items(): + remote_url = [ + url + for url, content in repos.items() + for comp_type, install_directories in content.items() + if install_dir in install_directories + ][0] + repo_entry = self.determine_branches_and_shas(component_type, install_dir, remote_url, components) + try: + self.modules_json["repos"][remote_url][component_type][install_dir].update(repo_entry) + except KeyError: + try: + self.modules_json["repos"][remote_url][component_type].update({install_dir: repo_entry}) + except KeyError: + try: + self.modules_json["repos"][remote_url].update( + { + component_type: { + install_dir: repo_entry, + } + } + ) + except KeyError: + self.modules_json["repos"].update( + { + remote_url: { + component_type: { + install_dir: repo_entry, + } + } + } + ) + + def recreate_dependencies(self, repo, org, subworkflow): + """ + Try to recreate the installed_by entries for subworkflows. + Remove self installation entry from dependencies, assuming that the modules.json has been freshly created, + i.e., no module or subworkflow has been installed by the user in the meantime + """ + + sw_path = Path(self.subworkflows_dir, org, subworkflow) + dep_mods, dep_subwfs = get_components_to_install(sw_path) + + for dep_mod in dep_mods: + installed_by = self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] + if installed_by == ["modules"]: + self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] = [] + if subworkflow not in installed_by: + self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"].append(subworkflow) + + for dep_subwf in dep_subwfs: + installed_by = self.modules_json["repos"][repo]["subworkflows"][org][dep_subwf]["installed_by"] + if installed_by == ["subworkflows"]: + self.modules_json["repos"][repo]["subworkflows"][org][dep_subwf]["installed_by"] = [] + if subworkflow not in installed_by: + self.modules_json["repos"][repo]["subworkflows"][org][dep_subwf]["installed_by"].append(subworkflow) + self.recreate_dependencies(repo, org, dep_subwf) diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 1bb2770f33..5f77148867 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -5,17 +5,18 @@ from pathlib import Path import git +import rich import rich.progress -from git.exc import GitCommandError +from git.exc import GitCommandError, InvalidGitRepositoryError -import nf_core.modules.module_utils import nf_core.modules.modules_json -from nf_core.utils import NFCORE_DIR +import nf_core.modules.modules_utils +from nf_core.utils import NFCORE_DIR, load_tools_config log = logging.getLogger(__name__) # Constants for the nf-core/modules repo used throughout the module files -NF_CORE_MODULES_NAME = "nf-core/modules" +NF_CORE_MODULES_NAME = "nf-core" NF_CORE_MODULES_REMOTE = "https://github.com/nf-core/modules.git" NF_CORE_MODULES_DEFAULT_BRANCH = "master" @@ -57,7 +58,7 @@ def update(self, op_code, cur_count, max_count=None, message=""): ) -class ModulesRepo(object): +class ModulesRepo: """ An object to store details about the repository being used for modules. @@ -125,19 +126,46 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa self.remote_url = remote_url - self.fullname = nf_core.modules.module_utils.path_from_remote(self.remote_url) + self.fullname = nf_core.modules.modules_utils.repo_full_name_from_remote(self.remote_url) self.setup_local_repo(remote_url, branch, hide_progress) + config_fn, repo_config = load_tools_config(self.local_repo_dir) + try: + self.repo_path = repo_config["org_path"] + except KeyError: + raise UserWarning(f"'org_path' key not present in {config_fn.name}") + # Verify that the repo seems to be correctly configured - if self.fullname != NF_CORE_MODULES_NAME or self.branch: + if self.repo_path != NF_CORE_MODULES_NAME or self.branch: self.verify_branch() # Convenience variable - self.modules_dir = os.path.join(self.local_repo_dir, "modules") + self.modules_dir = os.path.join(self.local_repo_dir, "modules", self.repo_path) + self.subworkflows_dir = os.path.join(self.local_repo_dir, "subworkflows", self.repo_path) self.avail_module_names = None + def verify_sha(self, prompt, sha): + """ + Verify that 'sha' and 'prompt' arguments are not provided together. + Verify that the provided SHA exists in the repo. + + Arguments: + prompt (bool): prompt asking for SHA + sha (str): provided sha + """ + if prompt and sha is not None: + log.error("Cannot use '--sha' and '--prompt' at the same time!") + return False + + if sha: + if not self.sha_exists_on_branch(sha): + log.error(f"Commit SHA '{sha}' doesn't exist in '{self.remote_url}'") + return False + + return True + def setup_local_repo(self, remote, branch, hide_progress=True): """ Sets up the local git repository. If the repository has been cloned previously, it @@ -150,55 +178,64 @@ def setup_local_repo(self, remote, branch, hide_progress=True): Sets self.repo """ self.local_repo_dir = os.path.join(NFCORE_DIR, self.fullname) - if not os.path.exists(self.local_repo_dir): - try: - pbar = rich.progress.Progress( - "[bold blue]{task.description}", - rich.progress.BarColumn(bar_width=None), - "[bold yellow]{task.fields[state]}", - transient=True, - disable=hide_progress, - ) - with pbar: - self.repo = git.Repo.clone_from( - remote, - self.local_repo_dir, - progress=RemoteProgressbar(pbar, self.fullname, self.remote_url, "Cloning"), + try: + if not os.path.exists(self.local_repo_dir): + try: + pbar = rich.progress.Progress( + "[bold blue]{task.description}", + rich.progress.BarColumn(bar_width=None), + "[bold yellow]{task.fields[state]}", + transient=True, + disable=hide_progress or os.environ.get("HIDE_PROGRESS", None) is not None, ) - ModulesRepo.update_local_repo_status(self.fullname, True) - except GitCommandError: - raise LookupError(f"Failed to clone from the remote: `{remote}`") - # Verify that the requested branch exists by checking it out - self.setup_branch(branch) - else: - self.repo = git.Repo(self.local_repo_dir) - - if ModulesRepo.no_pull_global: - ModulesRepo.update_local_repo_status(self.fullname, True) - # If the repo is already cloned, fetch the latest changes from the remote - if not ModulesRepo.local_repo_synced(self.fullname): - pbar = rich.progress.Progress( - "[bold blue]{task.description}", - rich.progress.BarColumn(bar_width=None), - "[bold yellow]{task.fields[state]}", - transient=True, - disable=hide_progress, - ) - with pbar: - self.repo.remotes.origin.fetch( - progress=RemoteProgressbar(pbar, self.fullname, self.remote_url, "Pulling") + with pbar: + self.repo = git.Repo.clone_from( + remote, + self.local_repo_dir, + progress=RemoteProgressbar(pbar, self.fullname, self.remote_url, "Cloning"), + ) + ModulesRepo.update_local_repo_status(self.fullname, True) + except GitCommandError: + raise LookupError(f"Failed to clone from the remote: `{remote}`") + # Verify that the requested branch exists by checking it out + self.setup_branch(branch) + else: + self.repo = git.Repo(self.local_repo_dir) + + if ModulesRepo.no_pull_global: + ModulesRepo.update_local_repo_status(self.fullname, True) + # If the repo is already cloned, fetch the latest changes from the remote + if not ModulesRepo.local_repo_synced(self.fullname): + pbar = rich.progress.Progress( + "[bold blue]{task.description}", + rich.progress.BarColumn(bar_width=None), + "[bold yellow]{task.fields[state]}", + transient=True, + disable=hide_progress or os.environ.get("HIDE_PROGRESS", None) is not None, ) - ModulesRepo.update_local_repo_status(self.fullname, True) - - # Before verifying the branch, fetch the changes - # Verify that the requested branch exists by checking it out - self.setup_branch(branch) - - # Now merge the changes - tracking_branch = self.repo.active_branch.tracking_branch() - if tracking_branch is None: - raise LookupError(f"There is no remote tracking branch '{self.branch}' in '{self.remote_url}'") - self.repo.git.merge(tracking_branch.name) + with pbar: + self.repo.remotes.origin.fetch( + progress=RemoteProgressbar(pbar, self.fullname, self.remote_url, "Pulling") + ) + ModulesRepo.update_local_repo_status(self.fullname, True) + + # Before verifying the branch, fetch the changes + # Verify that the requested branch exists by checking it out + self.setup_branch(branch) + + # Now merge the changes + tracking_branch = self.repo.active_branch.tracking_branch() + if tracking_branch is None: + raise LookupError(f"There is no remote tracking branch '{self.branch}' in '{self.remote_url}'") + self.repo.git.merge(tracking_branch.name) + except (GitCommandError, InvalidGitRepositoryError) as e: + log.error(f"[red]Could not set up local cache of modules repository:[/]\n{e}\n") + if rich.prompt.Confirm.ask(f"[violet]Delete local cache '{self.local_repo_dir}' and try again?"): + log.info(f"Removing '{self.local_repo_dir}'") + shutil.rmtree(self.local_repo_dir) + self.setup_local_repo(remote, branch, hide_progress) + else: + raise LookupError("Exiting due to error with local modules git repo") def setup_branch(self, branch): """ @@ -210,7 +247,7 @@ def setup_branch(self, branch): """ if branch is None: # Don't bother fetching default branch if we're using nf-core - if self.fullname == NF_CORE_MODULES_NAME: + if self.remote_url == NF_CORE_MODULES_REMOTE: self.branch = "master" else: self.branch = self.get_default_branch() @@ -235,7 +272,7 @@ def branch_exists(self): try: self.checkout_branch() except GitCommandError: - raise LookupError(f"Branch '{self.branch}' not found in '{self.fullname}'") + raise LookupError(f"Branch '{self.branch}' not found in '{self.remote_url}'") def verify_branch(self): """ @@ -243,7 +280,7 @@ def verify_branch(self): """ dir_names = os.listdir(self.local_repo_dir) if "modules" not in dir_names: - err_str = f"Repository '{self.fullname}' ({self.branch}) does not contain the 'modules/' directory" + err_str = f"Repository '{self.remote_url}' ({self.branch}) does not contain the 'modules/' directory" if "software" in dir_names: err_str += ( ".\nAs of nf-core/tools version 2.0, the 'software/' directory should be renamed to 'modules/'" @@ -265,38 +302,41 @@ def checkout(self, commit): """ self.repo.git.checkout(commit) - def module_exists(self, module_name, checkout=True): + def component_exists(self, component_name, component_type, checkout=True, commit=None): """ - Check if a module exists in the branch of the repo + Check if a module/subworkflow exists in the branch of the repo Args: - module_name (str): The name of the module + component_name (str): The name of the module/subworkflow Returns: - (bool): Whether the module exists in this branch of the repository + (bool): Whether the module/subworkflow exists in this branch of the repository """ - return module_name in self.get_avail_modules(checkout=checkout) + return component_name in self.get_avail_components(component_type, checkout=checkout, commit=commit) - def get_module_dir(self, module_name): + def get_component_dir(self, component_name, component_type): """ - Returns the file path of a module directory in the repo. + Returns the file path of a module/subworkflow directory in the repo. Does not verify that the path exists. Args: - module_name (str): The name of the module + component_name (str): The name of the module/subworkflow Returns: - module_path (str): The path of the module in the local copy of the repository + component_path (str): The path of the module/subworkflow in the local copy of the repository """ - return os.path.join(self.modules_dir, module_name) + if component_type == "modules": + return os.path.join(self.modules_dir, component_name) + elif component_type == "subworkflows": + return os.path.join(self.subworkflows_dir, component_name) - def install_module(self, module_name, install_dir, commit): + def install_component(self, component_name, install_dir, commit, component_type): """ - Install the module files into a pipeline at the given commit + Install the module/subworkflow files into a pipeline at the given commit Args: - module_name (str): The name of the module - install_dir (str): The path where the module should be installed - commit (str): The git SHA for the version of the module to be installed + component_name (str): The name of the module/subworkflow + install_dir (str): The path where the module/subworkflow should be installed + commit (str): The git SHA for the version of the module/subworkflow to be installed Returns: (bool): Whether the operation was successful or not @@ -307,13 +347,15 @@ def install_module(self, module_name, install_dir, commit): except git.GitCommandError: return False - # Check if the module exists in the branch - if not self.module_exists(module_name, checkout=False): - log.error(f"The requested module does not exists in the '{self.branch}' of {self.fullname}'") + # Check if the module/subworkflow exists in the branch + if not self.component_exists(component_name, component_type, checkout=False): + log.error( + f"The requested {component_type[:-1]} does not exists in the branch '{self.branch}' of {self.remote_url}'" + ) return False # Copy the files from the repo to the install folder - shutil.copytree(self.get_module_dir(module_name), os.path.join(install_dir, module_name)) + shutil.copytree(self.get_component_dir(component_name, component_type), Path(install_dir, component_name)) # Switch back to the tip of the branch self.checkout_branch() @@ -334,8 +376,8 @@ def module_files_identical(self, module_name, base_path, commit): else: self.checkout(commit) module_files = ["main.nf", "meta.yml"] - module_dir = self.get_module_dir(module_name) files_identical = {file: True for file in module_files} + module_dir = self.get_component_dir(module_name, "modules") for file in module_files: try: files_identical[file] = filecmp.cmp(os.path.join(module_dir, file), os.path.join(base_path, file)) @@ -345,33 +387,40 @@ def module_files_identical(self, module_name, base_path, commit): self.checkout_branch() return files_identical - def get_module_git_log(self, module_name, depth=None, since="2021-07-07T00:00:00Z"): + def get_component_git_log(self, component_name, component_type, depth=None): """ - Fetches the commit history the of requested module since a given date. The default value is + Fetches the commit history the of requested module/subworkflow since a given date. The default value is not arbitrary - it is the last time the structure of the nf-core/modules repository was had an update breaking backwards compatibility. Args: - module_name (str): Name of module + component_name (str): Name of module/subworkflow modules_repo (ModulesRepo): A ModulesRepo object configured for the repository in question - per_page (int): Number of commits per page returned by API - page_nbr (int): Page number of the retrieved commits - since (str): Only show commits later than this timestamp. - Time should be given in ISO-8601 format: YYYY-MM-DDTHH:MM:SSZ. Returns: ( dict ): Iterator of commit SHAs and associated (truncated) message """ self.checkout_branch() - module_path = os.path.join("modules", module_name) - commits = self.repo.iter_commits(max_count=depth, paths=module_path) - commits = ({"git_sha": commit.hexsha, "trunc_message": commit.message.partition("\n")[0]} for commit in commits) + component_path = os.path.join(component_type, self.repo_path, component_name) + commits_new = self.repo.iter_commits(max_count=depth, paths=component_path) + commits_new = [ + {"git_sha": commit.hexsha, "trunc_message": commit.message.partition("\n")[0]} for commit in commits_new + ] + commits_old = [] + if component_type == "modules": + # Grab commits also from previous modules structure + component_path = os.path.join("modules", component_name) + commits_old = self.repo.iter_commits(max_count=depth, paths=component_path) + commits_old = [ + {"git_sha": commit.hexsha, "trunc_message": commit.message.partition("\n")[0]} for commit in commits_old + ] + commits = iter(commits_new + commits_old) return commits - def get_latest_module_version(self, module_name): + def get_latest_component_version(self, component_name, component_type): """ Returns the latest commit in the repository """ - return list(self.get_module_git_log(module_name, depth=1))[0]["git_sha"] + return list(self.get_component_git_log(component_name, component_type, depth=1))[0]["git_sha"] def sha_exists_on_branch(self, sha): """ @@ -398,27 +447,34 @@ def get_commit_info(self, sha): date_obj = commit.committed_datetime date = str(date_obj.date()) return message, date - raise LookupError(f"Commit '{sha}' not found in the '{self.fullname}'") + raise LookupError(f"Commit '{sha}' not found in the '{self.remote_url}'") - def get_avail_modules(self, checkout=True): + def get_avail_components(self, component_type, checkout=True, commit=None): """ - Gets the names of the modules in the repository. They are detected by + Gets the names of the modules/subworkflows in the repository. They are detected by checking which directories have a 'main.nf' file Returns: - ([ str ]): The module names + ([ str ]): The module/subworkflow names """ if checkout: self.checkout_branch() - # Module directories are characterized by having a 'main.nf' file - avail_module_names = [ - os.path.relpath(dirpath, start=self.modules_dir) - for dirpath, _, file_names in os.walk(self.modules_dir) + if commit is not None: + self.checkout(commit) + # Get directory + if component_type == "modules": + directory = self.modules_dir + elif component_type == "subworkflows": + directory = self.subworkflows_dir + # Module/Subworkflow directories are characterized by having a 'main.nf' file + avail_component_names = [ + os.path.relpath(dirpath, start=directory) + for dirpath, _, file_names in os.walk(directory) if "main.nf" in file_names ] - return avail_module_names + return avail_component_names - def get_meta_yml(self, module_name): + def get_meta_yml(self, component_type, module_name): """ Returns the contents of the 'meta.yml' file of a module @@ -429,8 +485,13 @@ def get_meta_yml(self, module_name): (str): The contents of the file in text format """ self.checkout_branch() - path = os.path.join(self.modules_dir, module_name, "meta.yml") - if not os.path.exists(path): + if component_type == "modules": + path = Path(self.modules_dir, module_name, "meta.yml") + elif component_type == "subworkflows": + path = Path(self.subworkflows_dir, module_name, "meta.yml") + else: + raise ValueError(f"Invalid component type: {component_type}") + if not path.exists(): return None with open(path) as fh: contents = fh.read() diff --git a/nf_core/modules/modules_test.py b/nf_core/modules/modules_test.py new file mode 100644 index 0000000000..d1f47dcff9 --- /dev/null +++ b/nf_core/modules/modules_test.py @@ -0,0 +1,30 @@ +""" +The ModulesTest class runs the tests locally +""" + +from nf_core.components.components_test import ComponentsTest + + +class ModulesTest(ComponentsTest): + """ + Class to run module pytests. + """ + + def __init__( + self, + module_name=None, + no_prompts=False, + pytest_args="", + remote_url=None, + branch=None, + no_pull=False, + ): + super().__init__( + component_type="modules", + component_name=module_name, + no_prompts=no_prompts, + pytest_args=pytest_args, + remote_url=remote_url, + branch=branch, + no_pull=no_pull, + ) diff --git a/nf_core/modules/modules_utils.py b/nf_core/modules/modules_utils.py new file mode 100644 index 0000000000..47826d3804 --- /dev/null +++ b/nf_core/modules/modules_utils.py @@ -0,0 +1,91 @@ +import logging +import os +import urllib +from pathlib import Path + +from .nfcore_module import NFCoreModule + +log = logging.getLogger(__name__) + + +class ModuleException(Exception): + """Exception raised when there was an error with module commands""" + + pass + + +def repo_full_name_from_remote(remote_url): + """ + Extracts the path from the remote URL + See https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-clone.html#URLS for the possible URL patterns + """ + # Check whether we have a https or ssh url + if remote_url.startswith("https"): + path = urllib.parse.urlparse(remote_url) + path = path.path + # Remove the intial '/' + path = path[1:] + # Remove extension + path = os.path.splitext(path)[0] + else: + # Remove the initial `git@`` + path = remote_url.split("@") + path = path[-1] if len(path) > 1 else path[0] + path = urllib.parse.urlparse(path) + path = path.path + # Remove extension + path = os.path.splitext(path)[0] + return path + + +def get_installed_modules(dir, repo_type="modules"): + """ + Make a list of all modules installed in this repository + + Returns a tuple of two lists, one for local modules + and one for nf-core modules. The local modules are represented + as direct filepaths to the module '.nf' file. + Nf-core module are returned as file paths to the module directories. + In case the module contains several tools, one path to each tool directory + is returned. + + returns (local_modules, nfcore_modules) + """ + # initialize lists + local_modules = [] + nfcore_modules = [] + local_modules_dir = None + nfcore_modules_dir = os.path.join(dir, "modules", "nf-core") + + # Get local modules + if repo_type == "pipeline": + local_modules_dir = os.path.join(dir, "modules", "local") + + # Filter local modules + if os.path.exists(local_modules_dir): + local_modules = os.listdir(local_modules_dir) + local_modules = sorted([x for x in local_modules if x.endswith(".nf")]) + + # Get nf-core modules + if os.path.exists(nfcore_modules_dir): + for m in sorted([m for m in os.listdir(nfcore_modules_dir) if not m == "lib"]): + if not os.path.isdir(os.path.join(nfcore_modules_dir, m)): + raise ModuleException( + f"File found in '{nfcore_modules_dir}': '{m}'! This directory should only contain module directories." + ) + m_content = os.listdir(os.path.join(nfcore_modules_dir, m)) + # Not a module, but contains sub-modules + if not "main.nf" in m_content: + for tool in m_content: + nfcore_modules.append(os.path.join(m, tool)) + else: + nfcore_modules.append(m) + + # Make full (relative) file paths and create NFCoreModule objects + local_modules = [os.path.join(local_modules_dir, m) for m in local_modules] + nfcore_modules = [ + NFCoreModule(m, "nf-core/modules", Path(nfcore_modules_dir, m), repo_type=repo_type, base_dir=Path(dir)) + for m in nfcore_modules + ] + + return local_modules, nfcore_modules diff --git a/nf_core/modules/nfcore_module.py b/nf_core/modules/nfcore_module.py index 2654a4ebbb..431ef23381 100644 --- a/nf_core/modules/nfcore_module.py +++ b/nf_core/modules/nfcore_module.py @@ -4,13 +4,13 @@ from pathlib import Path -class NFCoreModule(object): +class NFCoreModule: """ A class to hold the information about a nf-core module Includes functionality for linting """ - def __init__(self, module_name, repo_name, module_dir, repo_type, base_dir, nf_core_module=True): + def __init__(self, module_name, repo_url, module_dir, repo_type, base_dir, remote_module=True): """ Initialize the object @@ -20,11 +20,11 @@ def __init__(self, module_name, repo_name, module_dir, repo_type, base_dir, nf_c whether the directory is a pipeline or clone of nf-core/modules. base_dir (Path): The absolute path to the pipeline base dir - nf_core_module (bool): Whether the module is to be treated as a + remote_module (bool): Whether the module is to be treated as a nf-core or local module """ self.module_name = module_name - self.repo_name = repo_name + self.repo_url = repo_url self.module_dir = module_dir self.repo_type = repo_type self.base_dir = base_dir @@ -36,14 +36,15 @@ def __init__(self, module_name, repo_name, module_dir, repo_type, base_dir, nf_c self.has_meta = False self.git_sha = None self.is_patched = False - self.is_patched = None - if nf_core_module: + if remote_module: # Initialize the important files self.main_nf = self.module_dir / "main.nf" self.meta_yml = self.module_dir / "meta.yml" - self.test_dir = Path(self.base_dir, "tests", "modules", self.module_name) + repo_dir = self.module_dir.parts[: self.module_dir.parts.index(self.module_name.split("/")[0])][-1] + self.org = repo_dir + self.test_dir = Path(self.base_dir, "tests", "modules", repo_dir, self.module_name) self.test_yml = self.test_dir / "test.yml" self.test_main_nf = self.test_dir / "main.nf" diff --git a/nf_core/modules/patch.py b/nf_core/modules/patch.py index b907256bcf..fa51640c06 100644 --- a/nf_core/modules/patch.py +++ b/nf_core/modules/patch.py @@ -7,17 +7,17 @@ import questionary import nf_core.utils +from nf_core.components.components_command import ComponentCommand -from .modules_command import ModuleCommand from .modules_differ import ModulesDiffer from .modules_json import ModulesJson log = logging.getLogger(__name__) -class ModulePatch(ModuleCommand): +class ModulePatch(ComponentCommand): def __init__(self, dir, remote_url=None, branch=None, no_pull=False): - super().__init__(dir, remote_url, branch, no_pull) + super().__init__("modules", dir, remote_url, branch, no_pull) self.modules_json = ModulesJson(dir) @@ -25,42 +25,57 @@ def param_check(self, module): if not self.has_valid_directory(): raise UserWarning() - if module is not None and module not in self.modules_json.get_all_modules().get(self.modules_repo.fullname, {}): - raise UserWarning(f"Module '{Path(self.modules_repo.fullname, module)}' does not exist in the pipeline") + modules = self.modules_json.get_all_components(self.component_type)[self.modules_repo.remote_url] + module_names = [module for _, module in modules] + + if module is not None and module not in module_names: + module_dir = [dir for dir, m in modules if m == module][0] + raise UserWarning(f"Module '{Path('modules', module_dir, module)}' does not exist in the pipeline") def patch(self, module=None): + # Check modules directory structure + self.check_modules_structure() + self.modules_json.check_up_to_date() self.param_check(module) + modules = self.modules_json.get_all_components(self.component_type)[self.modules_repo.remote_url] if module is None: + choices = [ + module if directory == self.modules_repo.repo_path else f"{directory}/{module}" + for directory, module in modules + ] module = questionary.autocomplete( "Tool:", - self.modules_json.get_all_modules()[self.modules_repo.fullname], + choices, style=nf_core.utils.nfcore_question_style, ).unsafe_ask() - module_fullname = str(Path(self.modules_repo.fullname, module)) + module_dir = [dir for dir, m in modules if m == module][0] + module_fullname = str(Path("modules", module_dir, module)) - # Verify that the module has an entry is the modules.json file - if not self.modules_json.module_present(module, self.modules_repo.fullname): + # Verify that the module has an entry in the modules.json file + if not self.modules_json.module_present(module, self.modules_repo.remote_url, module_dir): raise UserWarning( f"The '{module_fullname}' module does not have an entry in the 'modules.json' file. Cannot compute patch" ) - module_version = self.modules_json.get_module_version(module, self.modules_repo.fullname) + module_version = self.modules_json.get_module_version(module, self.modules_repo.remote_url, module_dir) if module_version is None: raise UserWarning( f"The '{module_fullname}' module does not have a valid version in the 'modules.json' file. Cannot compute patch" ) # Get the module branch and reset it in the ModulesRepo object - module_branch = self.modules_json.get_module_branch(module, self.modules_repo.fullname) + module_branch = self.modules_json.get_component_branch( + self.component_type, module, self.modules_repo.remote_url, module_dir + ) if module_branch != self.modules_repo.branch: self.modules_repo.setup_branch(module_branch) # Set the diff filename based on the module name patch_filename = f"{module.replace('/', '-')}.diff" - module_relpath = Path("modules", self.modules_repo.fullname, module) + module_relpath = Path("modules", module_dir, module) patch_relpath = Path(module_relpath, patch_filename) - module_dir = Path(self.dir, module_relpath) + module_current_dir = Path(self.dir, module_relpath) patch_path = Path(self.dir, patch_relpath) if patch_path.exists(): @@ -76,9 +91,9 @@ def patch(self, module=None): # Create a temporary directory for storing the unchanged version of the module install_dir = tempfile.mkdtemp() module_install_dir = Path(install_dir, module) - if not self.install_module_files(module, module_version, self.modules_repo, install_dir): + if not self.install_component_files(module, module_version, self.modules_repo, install_dir): raise UserWarning( - f"Failed to install files of module '{module_fullname}' from remote ({self.modules_repo.remote_url})." + f"Failed to install files of module '{module}' from remote ({self.modules_repo.remote_url})." ) # Write the patch to a temporary location (otherwise it is printed to the screen later) @@ -87,27 +102,29 @@ def patch(self, module=None): ModulesDiffer.write_diff_file( patch_temp_path, module, - self.modules_repo.fullname, + self.modules_repo.repo_path, module_install_dir, - module_dir, + module_current_dir, for_git=False, dsp_from_dir=module_relpath, dsp_to_dir=module_relpath, ) + log.debug(f"Patch file wrote to a temporary directory {patch_temp_path}") except UserWarning: raise UserWarning(f"Module '{module_fullname}' is unchanged. No patch to compute") # Write changes to modules.json - self.modules_json.add_patch_entry(module, self.modules_repo.fullname, patch_relpath) + self.modules_json.add_patch_entry(module, self.modules_repo.remote_url, module_dir, patch_relpath) + log.debug(f"Wrote patch path for module {module} to modules.json") # Show the changes made to the module ModulesDiffer.print_diff( module, - self.modules_repo.fullname, + self.modules_repo.repo_path, module_install_dir, - module_dir, - dsp_from_dir=module_dir, - dsp_to_dir=module_dir, + module_current_dir, + dsp_from_dir=module_current_dir, + dsp_to_dir=module_current_dir, ) # Finally move the created patch file to its final location diff --git a/nf_core/modules/remove.py b/nf_core/modules/remove.py index 3a248bd647..33d9f62d62 100644 --- a/nf_core/modules/remove.py +++ b/nf_core/modules/remove.py @@ -1,58 +1,10 @@ import logging -from pathlib import Path -import questionary - -import nf_core.utils - -from .modules_command import ModuleCommand -from .modules_json import ModulesJson +from nf_core.components.remove import ComponentRemove log = logging.getLogger(__name__) -class ModuleRemove(ModuleCommand): - def remove(self, module): - """ - Remove an already installed module - This command only works for modules that are installed from 'nf-core/modules' - """ - if self.repo_type == "modules": - log.error("You cannot remove a module in a clone of nf-core/modules") - return False - - # Check whether pipeline is valid and with a modules.json file - self.has_valid_directory() - self.has_modules_file() - - repo_name = self.modules_repo.fullname - if module is None: - module = questionary.autocomplete( - "Tool name:", - choices=self.modules_from_repo(repo_name), - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - - # Get the module directory - module_dir = Path(self.dir, "modules", repo_name, module) - - # Load the modules.json file - modules_json = ModulesJson(self.dir) - modules_json.load() - - # Verify that the module is actually installed - if not module_dir.exists(): - log.error(f"Module directory does not exist: '{module_dir}'") - - if modules_json.module_present(module, repo_name): - log.error(f"Found entry for '{module}' in 'modules.json'. Removing...") - modules_json.remove_entry(module, repo_name) - return False - - log.info(f"Removing {module}") - - # Remove entry from modules.json - modules_json.remove_entry(module, repo_name) - - # Remove the module - return self.clear_module_dir(module_name=module, module_dir=module_dir) +class ModuleRemove(ComponentRemove): + def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False): + super().__init__("modules", pipeline_dir, remote_url=remote_url, branch=branch, no_pull=no_pull) diff --git a/nf_core/modules/test_yml_builder.py b/nf_core/modules/test_yml_builder.py index a47f7c352a..c9a6273a58 100644 --- a/nf_core/modules/test_yml_builder.py +++ b/nf_core/modules/test_yml_builder.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ The ModulesTestYmlBuilder class handles automatic generation of the modules test.yml file along with running the tests and creating md5 sums @@ -24,22 +23,30 @@ from rich.syntax import Syntax import nf_core.utils +from nf_core.components.components_command import ComponentCommand +from ..lint_utils import run_prettier_on_file from .modules_repo import ModulesRepo log = logging.getLogger(__name__) -class ModulesTestYmlBuilder(object): +class ModulesTestYmlBuilder(ComponentCommand): def __init__( self, module_name=None, + directory=".", run_tests=False, test_yml_output_path=None, force_overwrite=False, no_prompts=False, + remote_url=None, + branch=None, ): + super().__init__("modules", directory, remote_url, branch) self.module_name = module_name + self.remote_url = remote_url + self.branch = branch self.run_tests = run_tests self.test_yml_output_path = test_yml_output_path self.force_overwrite = force_overwrite @@ -54,7 +61,8 @@ def run(self): """Run build steps""" if not self.no_prompts: log.info( - "[yellow]Press enter to use default values [cyan bold](shown in brackets) [yellow]or type your own responses" + "[yellow]Press enter to use default values " + "[cyan bold](shown in brackets) [yellow]or type your own responses" ) self.check_inputs() self.scrape_workflow_entry_points() @@ -66,17 +74,18 @@ def run(self): def check_inputs(self): """Do more complex checks about supplied flags.""" + # Check modules directory structure + self.check_modules_structure() # Get the tool name if not specified if self.module_name is None: - modules_repo = ModulesRepo() self.module_name = questionary.autocomplete( "Tool name:", - choices=modules_repo.get_avail_modules(), + choices=self.components_from_repo(self.org), style=nf_core.utils.nfcore_question_style, ).unsafe_ask() - self.module_dir = os.path.join("modules", *self.module_name.split("/")) - self.module_test_main = os.path.join("tests", "modules", *self.module_name.split("/"), "main.nf") + self.module_dir = os.path.join(self.default_modules_path, *self.module_name.split("/")) + self.module_test_main = os.path.join(self.default_tests_path, *self.module_name.split("/"), "main.nf") # First, sanity check that the module directory exists if not os.path.isdir(self.module_dir): @@ -91,7 +100,7 @@ def check_inputs(self): # Get the output YAML file / check it does not already exist while self.test_yml_output_path is None: - default_val = f"tests/modules/{self.module_name}/test.yml" + default_val = f"tests/modules/{self.org}/{self.module_name}/test.yml" if self.no_prompts: self.test_yml_output_path = default_val else: @@ -166,7 +175,10 @@ def build_single_test(self, entry_point): while ep_test["command"] == "": # Don't think we need the last `-c` flag, but keeping to avoid having to update 100s modules. # See https://github.com/nf-core/tools/issues/1562 - default_val = f"nextflow run ./tests/modules/{self.module_name} -entry {entry_point} -c ./tests/config/nextflow.config -c ./tests/modules/{self.module_name}/nextflow.config" + default_val = ( + f"nextflow run ./tests/modules/{self.org}/{self.module_name} -entry {entry_point} " + f"-c ./tests/config/nextflow.config -c ./tests/modules/{self.org}/{self.module_name}/nextflow.config" + ) if self.no_prompts: ep_test["command"] = default_val else: @@ -188,7 +200,7 @@ def build_single_test(self, entry_point): ).strip() ep_test["tags"] = [t.strip() for t in prompt_tags.split(",")] - ep_test["files"] = self.get_md5_sums(entry_point, ep_test["command"]) + ep_test["files"] = self.get_md5_sums(ep_test["command"]) return ep_test @@ -227,9 +239,6 @@ def create_test_file_dict(self, results_dir, is_repeat=False): test_files = [] for root, _, files in os.walk(results_dir, followlinks=True): for filename in files: - # Check that the file is not versions.yml - if filename == "versions.yml": - continue file_path = os.path.join(root, filename) # add the key here so that it comes first in the dict test_file = {"path": file_path} @@ -240,8 +249,10 @@ def create_test_file_dict(self, results_dir, is_repeat=False): # Add the md5 anyway, linting should fail later and can be manually removed if needed. # Originally we skipped this if empty, but then it's too easy to miss the warning. # Equally, if a file is legitimately empty we don't want to prevent this from working. - file_md5 = self._md5(file_path) - test_file["md5sum"] = file_md5 + if filename != "versions.yml": + # Only add md5sum if the file is not versions.yml + file_md5 = self._md5(file_path) + test_file["md5sum"] = file_md5 # Switch out the results directory path with the expected 'output' directory test_file["path"] = file_path.replace(results_dir, "output") test_files.append(test_file) @@ -250,7 +261,7 @@ def create_test_file_dict(self, results_dir, is_repeat=False): return test_files - def get_md5_sums(self, entry_point, command, results_dir=None, results_dir_repeat=None): + def get_md5_sums(self, command, results_dir=None, results_dir_repeat=None): """ Recursively go through directories and subdirectories and generate tuples of (, ) @@ -282,9 +293,9 @@ def get_md5_sums(self, entry_point, command, results_dir=None, results_dir_repea for i in range(len(test_files)): if test_files[i].get("md5sum") and not test_files[i].get("md5sum") == test_files_repeat[i]["md5sum"]: test_files[i].pop("md5sum") - test_files[i][ - "contains" - ] = "[ # TODO nf-core: file md5sum was variable, please replace this text with a string found in the file instead ]" + test_files[i]["contains"] = [ + "# TODO nf-core: file md5sum was variable, please replace this text with a string found in the file instead " + ] if len(test_files) == 0: raise UserWarning(f"Could not find any test result files in '{results_dir}'") @@ -351,16 +362,19 @@ def print_test_yml(self): """ Generate the test yml file. """ + with tempfile.NamedTemporaryFile(mode="w+") as fh: + yaml.dump(self.tests, fh, Dumper=nf_core.utils.custom_yaml_dumper(), width=10000000) + run_prettier_on_file(fh.name) + fh.seek(0) + prettified_yml = fh.read() if self.test_yml_output_path == "-": console = rich.console.Console() - yaml_str = yaml.dump(self.tests, Dumper=nf_core.utils.custom_yaml_dumper(), width=10000000) - console.print("\n", Syntax(yaml_str, "yaml"), "\n") - return - - try: - log.info(f"Writing to '{self.test_yml_output_path}'") - with open(self.test_yml_output_path, "w") as fh: - yaml.dump(self.tests, fh, Dumper=nf_core.utils.custom_yaml_dumper(), width=10000000) - except FileNotFoundError as e: - raise UserWarning(f"Could not create test.yml file: '{e}'") + console.print("\n", Syntax(prettified_yml, "yaml"), "\n") + else: + try: + log.info(f"Writing to '{self.test_yml_output_path}'") + with open(self.test_yml_output_path, "w") as fh: + fh.write(prettified_yml) + except FileNotFoundError as e: + raise UserWarning(f"Could not create test.yml file: '{e}'") diff --git a/nf_core/modules/update.py b/nf_core/modules/update.py index 4644c2398e..9d53bf2017 100644 --- a/nf_core/modules/update.py +++ b/nf_core/modules/update.py @@ -1,24 +1,7 @@ -import logging -import os -import shutil -import tempfile -from pathlib import Path +from nf_core.components.update import ComponentUpdate -import questionary -import nf_core.modules.module_utils -import nf_core.utils -from nf_core.utils import plural_es, plural_s, plural_y - -from .modules_command import ModuleCommand -from .modules_differ import ModulesDiffer -from .modules_json import ModulesJson -from .modules_repo import ModulesRepo - -log = logging.getLogger(__name__) - - -class ModuleUpdate(ModuleCommand): +class ModuleUpdate(ComponentUpdate): def __init__( self, pipeline_dir, @@ -28,558 +11,22 @@ def __init__( update_all=False, show_diff=None, save_diff_fn=None, + update_deps=False, remote_url=None, branch=None, no_pull=False, ): - super().__init__(pipeline_dir, remote_url, branch, no_pull) - self.force = force - self.prompt = prompt - self.sha = sha - self.update_all = update_all - self.show_diff = show_diff - self.save_diff_fn = save_diff_fn - self.module = None - self.update_config = None - self.modules_json = ModulesJson(self.dir) - self.branch = branch - - def _parameter_checks(self): - """Checks the compatibilty of the supplied parameters. - - Raises: - UserWarning: if any checks fail. - """ - - if self.save_diff_fn and self.show_diff: - raise UserWarning("Either `--preview` or `--save_diff` can be specified, not both.") - - if self.update_all and self.module: - raise UserWarning("Either a module or the '--all' flag can be specified, not both.") - - if self.repo_type == "modules": - raise UserWarning("Modules in clones of nf-core/modules can not be updated.") - - if self.prompt and self.sha is not None: - raise UserWarning("Cannot use '--sha' and '--prompt' at the same time.") - - if not self.has_valid_directory(): - raise UserWarning("The command was not run in a valid pipeline directory.") - - def update(self, module=None): - """Updates a specified module or all modules modules in a pipeline. - - Args: - module (str): The name of the module to update. - - Returns: - bool: True if the update was successful, False otherwise. - """ - self.module = module - - tool_config = nf_core.utils.load_tools_config(self.dir) - self.update_config = tool_config.get("update", {}) - - self._parameter_checks() - - # Verify that 'modules.json' is consistent with the installed modules - self.modules_json.check_up_to_date() - - if not self.update_all and module is None: - choices = ["All modules", "Named module"] - self.update_all = ( - questionary.select( - "Update all modules or a single named module?", - choices=choices, - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - == "All modules" - ) - - # Verify that the provided SHA exists in the repo - if self.sha is not None and not self.modules_repo.sha_exists_on_branch(self.sha): - log.error(f"Commit SHA '{self.sha}' doesn't exist in '{self.modules_repo.fullname}'") - return False - - # Get the list of modules to update, and their version information - modules_info = self.get_all_modules_info() if self.update_all else [self.get_single_module_info(module)] - - # Save the current state of the modules.json - old_modules_json = self.modules_json.get_modules_json() - - # Ask if we should show the diffs (unless a filename was already given on the command line) - if not self.save_diff_fn and self.show_diff is None: - diff_type = questionary.select( - "Do you want to view diffs of the proposed changes?", - choices=[ - {"name": "No previews, just update everything", "value": 0}, - {"name": "Preview diff in terminal, choose whether to update files", "value": 1}, - {"name": "Just write diffs to a patch file", "value": 2}, - ], - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - - self.show_diff = diff_type == 1 - self.save_diff_fn = diff_type == 2 - - if self.save_diff_fn: # True or a string - self.setup_diff_file() - - # Loop through all modules to be updated - # and do the requested action on them - exit_value = True - all_patches_successful = True - for modules_repo, module, sha, patch_relpath in modules_info: - module_fullname = str(Path(modules_repo.fullname, module)) - # Are we updating the files in place or not? - dry_run = self.show_diff or self.save_diff_fn - - current_version = self.modules_json.get_module_version(module, modules_repo.fullname) - - # Set the temporary installation folder - install_dir = Path(tempfile.mkdtemp()) - module_install_dir = install_dir / module - - # Compute the module directory - module_dir = os.path.join(self.dir, "modules", modules_repo.fullname, module) - - if sha is not None: - version = sha - elif self.prompt: - version = nf_core.modules.module_utils.prompt_module_version_sha( - module, modules_repo=modules_repo, installed_sha=current_version - ) - else: - version = modules_repo.get_latest_module_version(module) - - if current_version is not None and not self.force: - if current_version == version: - if self.sha or self.prompt: - log.info(f"'{module_fullname}' is already installed at {version}") - else: - log.info(f"'{module_fullname}' is already up to date") - continue - - # Download module files - if not self.install_module_files(module, version, modules_repo, install_dir): - exit_value = False - continue - - if patch_relpath is not None: - patch_successful = self.try_apply_patch( - module, modules_repo.fullname, patch_relpath, module_dir, module_install_dir - ) - if patch_successful: - log.info(f"Module '{module_fullname}' patched successfully") - else: - log.warning(f"Failed to patch module '{module_fullname}'. Will proceed with unpatched files.") - all_patches_successful &= patch_successful - - if dry_run: - if patch_relpath is not None: - if patch_successful: - log.info(f"Current installation is compared against patched version in remote") - else: - log.warning(f"Current installation is compared against unpatched version in remote") - # Compute the diffs for the module - if self.save_diff_fn: - log.info(f"Writing diff file for module '{module_fullname}' to '{self.save_diff_fn}'") - ModulesDiffer.write_diff_file( - self.save_diff_fn, - module, - modules_repo.fullname, - module_dir, - module_install_dir, - current_version, - version, - dsp_from_dir=module_dir, - dsp_to_dir=module_dir, - ) - - elif self.show_diff: - ModulesDiffer.print_diff( - module, - modules_repo.fullname, - module_dir, - module_install_dir, - current_version, - version, - dsp_from_dir=module_dir, - dsp_to_dir=module_dir, - ) - - # Ask the user if they want to install the module - dry_run = not questionary.confirm( - f"Update module '{module}'?", default=False, style=nf_core.utils.nfcore_question_style - ).unsafe_ask() - - if not dry_run: - # Clear the module directory and move the installed files there - self.move_files_from_tmp_dir(module, module_dir, install_dir, modules_repo.fullname, version) - # Update modules.json with newly installed module - self.modules_json.update(modules_repo, module, version) - else: - # Don't save to a file, just iteratively update the variable - self.modules_json.update(modules_repo, module, version, write_file=False) - - if self.save_diff_fn: - # Write the modules.json diff to the file - ModulesDiffer.append_modules_json_diff( - self.save_diff_fn, - old_modules_json, - self.modules_json.get_modules_json(), - Path(self.dir, "modules.json"), - ) - if exit_value: - log.info( - f"[bold magenta italic] TIP! [/] If you are happy with the changes in '{self.save_diff_fn}', you " - "can apply them by running the command :point_right:" - f" [bold magenta italic]git apply {self.save_diff_fn} [/]" - ) - elif not all_patches_successful: - log.info(f"Updates complete. Please apply failed patch{plural_es(modules_info)} manually") - else: - log.info("Updates complete :sparkles:") - - return exit_value - - def get_single_module_info(self, module): - """Collects the module repository, version and sha for a module. - - Information about the module version in the '.nf-core.yml' overrides - the '--sha' option - - Args: - module_name (str): The name of the module to get info for. - - Returns: - (ModulesRepo, str, str): The modules repo containing the module, - the module name, and the module version. - - Raises: - LookupError: If the module is not found either in the pipeline or the modules repo. - UserWarning: If the '.nf-core.yml' entry is not valid. - """ - # Check if there are any modules installed from the repo - repo_name = self.modules_repo.fullname - if repo_name not in self.modules_json.get_all_modules(): - raise LookupError(f"No modules installed from '{repo_name}'") - - if module is None: - module = questionary.autocomplete( - "Tool name:", - choices=self.modules_json.get_all_modules()[repo_name], - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - - # Check if module is installed before trying to update - if module not in self.modules_json.get_all_modules()[repo_name]: - raise LookupError(f"Module '{module}' is not installed in pipeline and could therefore not be updated") - - # Check that the supplied name is an available module - if module and module not in self.modules_repo.get_avail_modules(): - raise LookupError( - f"Module '{module}' not found in list of available modules." - f"Use the command 'nf-core modules list remote' to view available software" - ) - - sha = self.sha - if module in self.update_config.get(self.modules_repo.fullname, {}): - config_entry = self.update_config[self.modules_repo.fullname].get(module) - if config_entry is not None and config_entry is not True: - if config_entry is False: - raise UserWarning("Module's update entry in '.nf-core.yml' is set to False") - if not isinstance(config_entry, str): - raise UserWarning("Module's update entry in '.nf-core.yml' is of wrong type") - - sha = config_entry - if self.sha is not None: - log.warning( - f"Found entry in '.nf-core.yml' for module '{module}' " - "which will override version specified with '--sha'" - ) - else: - log.info(f"Found entry in '.nf-core.yml' for module '{module}'") - log.info(f"Updating module to ({sha})") - - # Check if the update branch is the same as the installation branch - current_branch = self.modules_json.get_module_branch(module, self.modules_repo.fullname) - new_branch = self.modules_repo.branch - if current_branch != new_branch: - log.warning( - f"You are trying to update the '{Path(self.modules_repo.fullname, module)}' module from " - f"the '{new_branch}' branch. This module was installed from the '{current_branch}'" - ) - switch = questionary.confirm(f"Do you want to update using the '{current_branch}' instead?").unsafe_ask() - if switch: - # Change the branch - self.modules_repo.setup_branch(current_branch) - - # If there is a patch file, get its filename - patch_fn = self.modules_json.get_patch_fn(module, self.modules_repo.fullname) - - return (self.modules_repo, module, sha, patch_fn) - - def get_all_modules_info(self, branch=None): - """Collects the module repository, version and sha for all modules. - - Information about the module version in the '.nf-core.yml' overrides the '--sha' option. - - Returns: - [(ModulesRepo, str, str)]: A list of tuples containing a ModulesRepo object, - the module name, and the module version. - """ - if branch is not None: - use_branch = questionary.confirm( - "'--branch' was specified. Should this branch be used to update all modules?", default=False - ) - if not use_branch: - branch = None - skipped_repos = [] - skipped_modules = [] - overridden_repos = [] - overridden_modules = [] - modules_info = {} - # Loop through all the modules in the pipeline - # and check if they have an entry in the '.nf-core.yml' file - for repo_name, modules in self.modules_json.get_all_modules().items(): - if repo_name not in self.update_config or self.update_config[repo_name] is True: - modules_info[repo_name] = [ - (module, self.sha, self.modules_json.get_module_branch(module, repo_name)) for module in modules - ] - elif isinstance(self.update_config[repo_name], dict): - # If it is a dict, then there are entries for individual modules - repo_config = self.update_config[repo_name] - modules_info[repo_name] = [] - for module in modules: - if module not in repo_config or repo_config[module] is True: - modules_info[repo_name].append( - (module, self.sha, self.modules_json.get_module_branch(module, repo_name)) - ) - elif isinstance(repo_config[module], str): - # If a string is given it is the commit SHA to which we should update to - custom_sha = repo_config[module] - modules_info[repo_name].append( - (module, custom_sha, self.modules_json.get_module_branch(module, repo_name)) - ) - if self.sha is not None: - overridden_modules.append(module) - elif repo_config[module] is False: - # Otherwise the entry must be 'False' and we should ignore the module - skipped_modules.append(f"{repo_name}/{module}") - else: - raise UserWarning(f"Module '{module}' in '{repo_name}' has an invalid entry in '.nf-core.yml'") - elif isinstance(self.update_config[repo_name], str): - # If a string is given it is the commit SHA to which we should update to - custom_sha = self.update_config[repo_name] - modules_info[repo_name] = [ - (module_name, custom_sha, self.modules_json.get_module_branch(module_name, repo_name)) - for module_name in modules - ] - if self.sha is not None: - overridden_repos.append(repo_name) - elif self.update_config[repo_name] is False: - skipped_repos.append(repo_name) - else: - raise UserWarning(f"Repo '{repo_name}' has an invalid entry in '.nf-core.yml'") - - if skipped_repos: - skipped_str = "', '".join(skipped_repos) - log.info(f"Skipping modules in repositor{plural_y(skipped_repos)}: '{skipped_str}'") - - if skipped_modules: - skipped_str = "', '".join(skipped_modules) - log.info(f"Skipping module{plural_s(skipped_modules)}: '{skipped_str}'") - - if overridden_repos: - overridden_str = "', '".join(overridden_repos) - log.info( - f"Overriding '--sha' flag for modules in repositor{plural_y(overridden_repos)} " - f"with '.nf-core.yml' entry: '{overridden_str}'" - ) - - if overridden_modules: - overridden_str = "', '".join(overridden_modules) - log.info( - f"Overriding '--sha' flag for module{plural_s(overridden_modules)} with " - f"'.nf-core.yml' entry: '{overridden_str}'" - ) - # Loop through modules_info and create on ModulesRepo object per remote and branch - repos_and_branches = {} - for repo_name, mods in modules_info.items(): - for mod, sha, mod_branch in mods: - if branch is not None: - mod_branch = branch - if (repo_name, mod_branch) not in repos_and_branches: - repos_and_branches[(repo_name, mod_branch)] = [] - repos_and_branches[(repo_name, mod_branch)].append((mod, sha)) - - # Get the git urls from the modules.json - modules_info = ( - ( - repo_name, - self.modules_json.get_git_url(repo_name), - branch, - mods_shas, - ) - for (repo_name, branch), mods_shas in repos_and_branches.items() - ) - - # Create ModulesRepo objects - repo_objs_mods = [] - for repo_name, repo_url, branch, mods_shas in modules_info: - try: - modules_repo = ModulesRepo(remote_url=repo_url, branch=branch) - except LookupError as e: - log.warning(e) - log.info(f"Skipping modules in '{repo_name}'") - else: - repo_objs_mods.append((modules_repo, mods_shas)) - - # Flatten the list - modules_info = [(repo, mod, sha) for repo, mods_shas in repo_objs_mods for mod, sha in mods_shas] - - # Verify that that all modules and shas exist in their respective ModulesRepo, - # don't try to update those that don't - i = 0 - while i < len(modules_info): - repo, module, sha = modules_info[i] - if not repo.module_exists(module): - log.warning(f"Module '{module}' does not exist in '{repo.fullname}'. Skipping...") - modules_info.pop(i) - elif sha is not None and not repo.sha_exists_on_branch(sha): - log.warning( - f"Git sha '{sha}' does not exists on the '{repo.branch}' of '{repo.fullname}'. Skipping module '{module}'" - ) - modules_info.pop(i) - else: - i += 1 - - # Add patch filenames to the modules that have them - modules_info = [ - (repo, mod, sha, self.modules_json.get_patch_fn(mod, repo.fullname)) for repo, mod, sha in modules_info - ] - - return modules_info - - def setup_diff_file(self): - """Sets up the diff file. - - If the save diff option was chosen interactively, the user is asked to supply a name for the diff file. - - Then creates the file for saving the diff. - """ - if self.save_diff_fn is True: - # From questionary - no filename yet - self.save_diff_fn = questionary.path( - "Enter the filename: ", style=nf_core.utils.nfcore_question_style - ).unsafe_ask() - - self.save_diff_fn = Path(self.save_diff_fn) - - # Check if filename already exists (questionary or cli) - while self.save_diff_fn.exists(): - if questionary.confirm(f"'{self.save_diff_fn}' exists. Remove file?").unsafe_ask(): - os.remove(self.save_diff_fn) - break - self.save_diff_fn = questionary.path( - "Enter a new filename: ", - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - self.save_diff_fn = Path(self.save_diff_fn) - - # This guarantees that the file exists after calling the function - self.save_diff_fn.touch() - - def move_files_from_tmp_dir(self, module, module_dir, install_folder, repo_name, new_version): - """Move the files from the temporary to the installation directory. - - Args: - module (str): The module name. - module_dir (str): The path to the module directory. - install_folder [str]: The path to the temporary installation directory. - modules_repo (ModulesRepo): The ModulesRepo object from which the module was installed. - new_version (str): The version of the module that was installed. - """ - temp_module_dir = os.path.join(install_folder, module) - files = os.listdir(temp_module_dir) - - log.debug(f"Removing old version of module '{module}'") - self.clear_module_dir(module, module_dir) - - os.makedirs(module_dir) - for file in files: - path = os.path.join(temp_module_dir, file) - if os.path.exists(path): - shutil.move(path, os.path.join(module_dir, file)) - - log.info(f"Updating '{repo_name}/{module}'") - log.debug(f"Updating module '{module}' to {new_version} from {repo_name}") - - def try_apply_patch(self, module, repo_name, patch_relpath, module_dir, module_install_dir): - """ - Try applying a patch file to the new module files - - - Args: - module (str): The name of the module - repo_name (str): The name of the repository where the module resides - patch_relpath (Path | str): The path to patch file in the pipeline - module_dir (Path | str): The module directory in the pipeline - module_install_dir (Path | str): The directory where the new module - file have been installed - - Returns: - (bool): Whether the patch application was successful - """ - module_fullname = str(Path(repo_name, module)) - log.info(f"Found patch for module '{module_fullname}'. Trying to apply it to new files") - - patch_path = Path(self.dir / patch_relpath) - module_relpath = Path("modules", repo_name, module) - - # Copy the installed files to a new temporary directory to save them for later use - temp_dir = Path(tempfile.mkdtemp()) - temp_module_dir = temp_dir / module - shutil.copytree(module_install_dir, temp_module_dir) - - try: - new_files = ModulesDiffer.try_apply_patch(module, repo_name, patch_path, temp_module_dir) - except LookupError: - # Patch failed. Save the patch file by moving to the install dir - shutil.move(patch_path, Path(module_install_dir, patch_path.relative_to(module_dir))) - log.warning( - f"Failed to apply patch for module '{module_fullname}'. You will have to apply the patch manually" - ) - return False - - # Write the patched files to a temporary directory - log.debug("Writing patched files") - for file, new_content in new_files.items(): - fn = temp_module_dir / file - with open(fn, "w") as fh: - fh.writelines(new_content) - - # Create the new patch file - log.debug("Regenerating patch file") - ModulesDiffer.write_diff_file( - Path(temp_module_dir, patch_path.relative_to(module_dir)), - module, - repo_name, - module_install_dir, - temp_module_dir, - file_action="w", - for_git=False, - dsp_from_dir=module_relpath, - dsp_to_dir=module_relpath, + super().__init__( + pipeline_dir, + "modules", + force, + prompt, + sha, + update_all, + show_diff, + save_diff_fn, + update_deps, + remote_url, + branch, + no_pull, ) - - # Move the patched files to the install dir - log.debug("Overwriting installed files installed files with patched files") - shutil.rmtree(module_install_dir) - shutil.copytree(temp_module_dir, module_install_dir) - - # Add the patch file to the modules.json file - self.modules_json.add_patch_entry(module, repo_name, patch_relpath, write_file=True) - - return True diff --git a/nf_core/pipeline-template/.devcontainer/devcontainer.json b/nf_core/pipeline-template/.devcontainer/devcontainer.json new file mode 100644 index 0000000000..ea27a5843a --- /dev/null +++ b/nf_core/pipeline-template/.devcontainer/devcontainer.json @@ -0,0 +1,27 @@ +{ + "name": "nfcore", + "image": "nfcore/gitpod:latest", + "remoteUser": "gitpod", + + // Configure tool-specific properties. + "customizations": { + // Configure properties specific to VS Code. + "vscode": { + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/opt/conda/bin/python", + "python.linting.enabled": true, + "python.linting.pylintEnabled": true, + "python.formatting.autopep8Path": "/opt/conda/bin/autopep8", + "python.formatting.yapfPath": "/opt/conda/bin/yapf", + "python.linting.flake8Path": "/opt/conda/bin/flake8", + "python.linting.pycodestylePath": "/opt/conda/bin/pycodestyle", + "python.linting.pydocstylePath": "/opt/conda/bin/pydocstyle", + "python.linting.pylintPath": "/opt/conda/bin/pylint" + }, + + // Add the IDs of extensions you want installed when the container is created. + "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"] + } + } +} diff --git a/nf_core/pipeline-template/.gitattributes b/nf_core/pipeline-template/.gitattributes index 050bb12035..7a2dabc293 100644 --- a/nf_core/pipeline-template/.gitattributes +++ b/nf_core/pipeline-template/.gitattributes @@ -1,3 +1,4 @@ *.config linguist-language=nextflow +*.nf.test linguist-language=nextflow modules/nf-core/** linguist-generated subworkflows/nf-core/** linguist-generated diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index b9720ac70b..9afdd2987b 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -109,3 +109,19 @@ If you are using a new feature from core Nextflow, you may bump the minimum requ ### Images and figures For overview images and other documents we follow the nf-core [style guidelines and examples](https://nf-co.re/developers/design_guidelines). + +## GitHub Codespaces + +This repo includes a devcontainer configuration which will create a GitHub Codespaces for Nextflow development! This is an online developer environment that runs in your browser, complete with VSCode and a terminal. + +To get started: + +- Open the repo in [Codespaces](https://github.com/{{ name }}/codespaces) +- Tools installed + - nf-core + - Nextflow + +Devcontainer specs: + +- [DevContainer config](.devcontainer/devcontainer.json) +- [Dockerfile](.devcontainer/Dockerfile) diff --git a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml index 209c6e2d9f..27866452f0 100644 --- a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml @@ -42,7 +42,7 @@ body: attributes: label: System information description: | - * Nextflow version _(eg. 21.10.3)_ + * Nextflow version _(eg. 22.10.1)_ * Hardware _(eg. HPC, Desktop, Cloud)_ * Executor _(eg. slurm, local, awsbatch)_ * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter or Charliecloud)_ diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index c37914276e..4b96fa80ee 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -28,3 +28,7 @@ jobs: "outdir": "s3://{% raw %}${{ secrets.AWS_S3_BUCKET }}{% endraw %}/{{ short_name }}/{% raw %}results-${{ github.sha }}{% endraw %}" } profiles: test_full,aws_tower + - uses: actions/upload-artifact@v3 + with: + name: Tower debug log file + path: tower_action_*.log diff --git a/nf_core/pipeline-template/.github/workflows/awstest.yml b/nf_core/pipeline-template/.github/workflows/awstest.yml index 209cabded1..0f261fcb42 100644 --- a/nf_core/pipeline-template/.github/workflows/awstest.yml +++ b/nf_core/pipeline-template/.github/workflows/awstest.yml @@ -23,3 +23,7 @@ jobs: "outdir": "s3://{% raw %}${{ secrets.AWS_S3_BUCKET }}{% endraw %}/{{ short_name }}/{% raw %}results-test-${{ github.sha }}{% endraw %}" } profiles: test,aws_tower + - uses: actions/upload-artifact@v3 + with: + name: Tower debug log file + path: tower_action_*.log diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 64cc12a26d..bf3dc36bc5 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -11,6 +11,10 @@ on: env: NXF_ANSI_LOG: false +concurrency: + group: "{% raw %}${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}{% endraw %}" + cancel-in-progress: true + jobs: test: name: Run pipeline with test data @@ -20,11 +24,11 @@ jobs: strategy: matrix: NXF_VER: - - "21.10.3" + - "22.10.1" - "latest-everything" steps: - name: Check out pipeline code - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Install Nextflow uses: nf-core/setup-nextflow@v1 diff --git a/nf_core/pipeline-template/.github/workflows/fix-linting.yml b/nf_core/pipeline-template/.github/workflows/fix-linting.yml index 5ad82fe8f8..f3dc3e50fe 100644 --- a/nf_core/pipeline-template/.github/workflows/fix-linting.yml +++ b/nf_core/pipeline-template/.github/workflows/fix-linting.yml @@ -24,7 +24,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install Prettier run: npm install -g prettier @prettier/plugin-php @@ -34,9 +34,9 @@ jobs: id: prettier_status run: | if prettier --check ${GITHUB_WORKSPACE}; then - echo "::set-output name=result::pass" + echo "result=pass" >> $GITHUB_OUTPUT else - echo "::set-output name=result::fail" + echo "result=fail" >> $GITHUB_OUTPUT fi - name: Run 'prettier --write' diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index 9fb569ab0d..46249e9a79 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -4,6 +4,8 @@ name: nf-core linting # that the code meets the nf-core guidelines. {%- raw %} on: push: + branches: + - dev pull_request: release: types: [published] @@ -12,9 +14,9 @@ jobs: EditorConfig: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install editorconfig-checker run: npm install -g editorconfig-checker @@ -25,9 +27,9 @@ jobs: Prettier: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install Prettier run: npm install -g prettier @@ -38,7 +40,7 @@ jobs: PythonBlack: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Check code lints with Black uses: psf/black@stable @@ -69,12 +71,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out pipeline code - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Install Nextflow uses: nf-core/setup-nextflow@v1 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: "3.7" architecture: "x64" @@ -97,7 +99,7 @@ jobs: - name: Upload linting log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: linting-logs path: | diff --git a/nf_core/pipeline-template/.github/workflows/linting_comment.yml b/nf_core/pipeline-template/.github/workflows/linting_comment.yml index 5b91eedce0..09f8c423e5 100644 --- a/nf_core/pipeline-template/.github/workflows/linting_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/linting_comment.yml @@ -18,7 +18,7 @@ jobs: - name: Get PR number id: pr_number - run: echo "::set-output name=pr_number::$(cat linting-logs/PR_number.txt)" + run: echo "pr_number=$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT - name: Post PR comment uses: marocchino/sticky-pull-request-comment@v2 diff --git a/nf_core/pipeline-template/.pre-commit-config.yaml b/nf_core/pipeline-template/.pre-commit-config.yaml new file mode 100644 index 0000000000..0c31cdb99f --- /dev/null +++ b/nf_core/pipeline-template/.pre-commit-config.yaml @@ -0,0 +1,5 @@ +repos: + - repo: https://github.com/pre-commit/mirrors-prettier + rev: "v2.7.1" + hooks: + - id: prettier diff --git a/nf_core/pipeline-template/.prettierignore b/nf_core/pipeline-template/.prettierignore index d0e7ae5891..437d763d0c 100644 --- a/nf_core/pipeline-template/.prettierignore +++ b/nf_core/pipeline-template/.prettierignore @@ -1,4 +1,6 @@ email_template.html +adaptivecard.json +slackreport.json .nextflow* work/ data/ @@ -7,3 +9,4 @@ results/ testing/ testing* *.pyc +bin/ diff --git a/nf_core/pipeline-template/CITATION.cff b/nf_core/pipeline-template/CITATION.cff deleted file mode 100644 index 4533e2f28c..0000000000 --- a/nf_core/pipeline-template/CITATION.cff +++ /dev/null @@ -1,56 +0,0 @@ -cff-version: 1.2.0 -message: "If you use `nf-core tools` in your work, please cite the `nf-core` publication" -authors: - - family-names: Ewels - given-names: Philip - - family-names: Peltzer - given-names: Alexander - - family-names: Fillinger - given-names: Sven - - family-names: Patel - given-names: Harshil - - family-names: Alneberg - given-names: Johannes - - family-names: Wilm - given-names: Andreas - - family-names: Ulysse Garcia - given-names: Maxime - - family-names: Di Tommaso - given-names: Paolo - - family-names: Nahnsen - given-names: Sven -title: "The nf-core framework for community-curated bioinformatics pipelines." -version: 2.4.1 -doi: 10.1038/s41587-020-0439-x -date-released: 2022-05-16 -url: https://github.com/nf-core/tools -prefered-citation: - type: article - authors: - - family-names: Ewels - given-names: Philip - - family-names: Peltzer - given-names: Alexander - - family-names: Fillinger - given-names: Sven - - family-names: Patel - given-names: Harshil - - family-names: Alneberg - given-names: Johannes - - family-names: Wilm - given-names: Andreas - - family-names: Ulysse Garcia - given-names: Maxime - - family-names: Di Tommaso - given-names: Paolo - - family-names: Nahnsen - given-names: Sven - doi: 10.1038/s41587-020-0439-x - journal: nature biotechnology - start: 276 - end: 278 - title: "The nf-core framework for community-curated bioinformatics pipelines." - issue: 3 - volume: 38 - year: 2020 - url: https://dx.doi.org/10.1038/s41587-020-0439-x diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index 02a32f1f6e..0845f6aca0 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -10,7 +10,7 @@ {%- if github_badges -%} [![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A522.10.1-23aa62.svg)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) @@ -46,7 +46,7 @@ The results obtained from the full-sized test can be viewed on the [nf-core webs ## Quick Start -1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.10.3`) +1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=22.10.1`) 2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/) (you can follow [this tutorial](https://singularity-tutorial.github.io/01-installation/)), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(you can use [`Conda`](https://conda.io/miniconda.html) both to install Nextflow itself and also to manage software within pipelines. Please only use it within pipelines as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_. diff --git a/nf_core/pipeline-template/assets/adaptivecard.json b/nf_core/pipeline-template/assets/adaptivecard.json new file mode 100644 index 0000000000..b818868d4b --- /dev/null +++ b/nf_core/pipeline-template/assets/adaptivecard.json @@ -0,0 +1,67 @@ +{ + "type": "message", + "attachments": [ + { + "contentType": "application/vnd.microsoft.card.adaptive", + "contentUrl": null, + "content": { + "\$schema": "http://adaptivecards.io/schemas/adaptive-card.json", + "msteams": { + "width": "Full" + }, + "type": "AdaptiveCard", + "version": "1.2", + "body": [ + { + "type": "TextBlock", + "size": "Large", + "weight": "Bolder", + "color": "<% if (success) { %>Good<% } else { %>Attention<%} %>", + "text": "{{ name }} v${version} - ${runName}", + "wrap": true + }, + { + "type": "TextBlock", + "spacing": "None", + "text": "Completed at ${dateComplete} (duration: ${duration})", + "isSubtle": true, + "wrap": true + }, + { + "type": "TextBlock", + "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors. The full error message was: ${errorReport}.<% } %>", + "wrap": true + }, + { + "type": "TextBlock", + "text": "The command used to launch the workflow was as follows:", + "wrap": true + }, + { + "type": "TextBlock", + "text": "${commandLine}", + "isSubtle": true, + "wrap": true + } + ], + "actions": [ + { + "type": "Action.ShowCard", + "title": "Pipeline Configuration", + "card": { + "type": "AdaptiveCard", + "\$schema": "http://adaptivecards.io/schemas/adaptive-card.json", + "body": [ + { + "type": "FactSet", + "facts": [<% out << summary.collect{ k,v -> "{\"title\": \"$k\", \"value\" : \"$v\"}"}.join(",\n") %> + ] + } + ] + } + } + ] + } + } + ] +} diff --git a/nf_core/pipeline-template/assets/methods_description_template.yml b/nf_core/pipeline-template/assets/methods_description_template.yml new file mode 100644 index 0000000000..b2dc0a99c1 --- /dev/null +++ b/nf_core/pipeline-template/assets/methods_description_template.yml @@ -0,0 +1,25 @@ +id: "{{ name_noslash }}-methods-description" +description: "Suggested text and references to use when describing pipeline usage within the methods section of a publication." +section_name: "{{ name }} Methods Description" +section_href: "https://github.com/{{ name }}" +plot_type: "html" +## TODO nf-core: Update the HTML below to your prefered methods description, e.g. add publication citation for this pipeline +## You inject any metadata in the Nextflow '${workflow}' object +data: | +

Methods

+

Data was processed using {{ name }} v${workflow.manifest.version} ${doi_text} of the nf-core collection of workflows (Ewels et al., 2020).

+

The pipeline was executed with Nextflow v${workflow.nextflow.version} (Di Tommaso et al., 2017) with the following command:

+
${workflow.commandLine}
+

References

+
    +
  • Di Tommaso, P., Chatzou, M., Floden, E. W., Barja, P. P., Palumbo, E., & Notredame, C. (2017). Nextflow enables reproducible computational workflows. Nature Biotechnology, 35(4), 316-319. https://doi.org/10.1038/nbt.3820
  • +
  • Ewels, P. A., Peltzer, A., Fillinger, S., Patel, H., Alneberg, J., Wilm, A., Garcia, M. U., Di Tommaso, P., & Nahnsen, S. (2020). The nf-core framework for community-curated bioinformatics pipelines. Nature Biotechnology, 38(3), 276-278. https://doi.org/10.1038/s41587-020-0439-x
  • +
+
+
Notes:
+
    + ${nodoi_text} +
  • The command above does not include parameters contained in any configs or profiles that may have been used. Ensure the config file is also uploaded with your publication!
  • +
  • You should also cite all software used within this run. Check the "Software Versions" of this report to get version information.
  • +
+
diff --git a/nf_core/pipeline-template/assets/multiqc_config.yml b/nf_core/pipeline-template/assets/multiqc_config.yml index a9cc6cdb35..440b0b9a3a 100644 --- a/nf_core/pipeline-template/assets/multiqc_config.yml +++ b/nf_core/pipeline-template/assets/multiqc_config.yml @@ -3,9 +3,11 @@ report_comment: > analysis pipeline.{% if branded %} For information about how to interpret these results, please see the documentation.{% endif %} report_section_order: - software_versions: + "{{ name_noslash }}-methods-description": order: -1000 - "{{ name.lower().replace('/', '-') }}-summary": + software_versions: order: -1001 + "{{ name_noslash }}-summary": + order: -1002 export_plots: true diff --git a/nf_core/pipeline-template/assets/slackreport.json b/nf_core/pipeline-template/assets/slackreport.json new file mode 100644 index 0000000000..043d02f275 --- /dev/null +++ b/nf_core/pipeline-template/assets/slackreport.json @@ -0,0 +1,34 @@ +{ + "attachments": [ + { + "fallback": "Plain-text summary of the attachment.", + "color": "<% if (success) { %>good<% } else { %>danger<%} %>", + "author_name": "sanger-tol/readmapping v${version} - ${runName}", + "author_icon": "https://www.nextflow.io/docs/latest/_static/favicon.ico", + "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors<% } %>", + "fields": [ + { + "title": "Command used to launch the workflow", + "value": "```${commandLine}```", + "short": false + } + <% + if (!success) { %> + , + { + "title": "Full error message", + "value": "```${errorReport}```", + "short": false + }, + { + "title": "Pipeline configuration", + "value": "<% out << summary.collect{ k,v -> k == "hook_url" ? "_${k}_: (_hidden_)" : ( ( v.class.toString().contains('Path') || ( v.class.toString().contains('String') && v.contains('/') ) ) ? "_${k}_: `${v}`" : (v.class.toString().contains('DateTime') ? ("_${k}_: " + v.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM))) : "_${k}_: ${v}") ) }.join(",\n") %>", + "short": false + } + <% } + %> + ], + "footer": "Completed at <% out << dateComplete.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM)) %> (duration: ${duration})" + } + ] +} diff --git a/nf_core/pipeline-template/bin/check_samplesheet.py b/nf_core/pipeline-template/bin/check_samplesheet.py index 9a8b896239..11b155723a 100755 --- a/nf_core/pipeline-template/bin/check_samplesheet.py +++ b/nf_core/pipeline-template/bin/check_samplesheet.py @@ -98,7 +98,9 @@ def _validate_pair(self, row): """Assert that read pairs have the same file extension. Report pair status.""" if row[self._first_col] and row[self._second_col]: row[self._single_col] = False - if Path(row[self._first_col]).suffixes[-2:] != Path(row[self._second_col]).suffixes[-2:]: + first_col_suffix = Path(row[self._first_col]).suffixes[-2:] + second_col_suffix = Path(row[self._second_col]).suffixes[-2:] + if first_col_suffix != second_col_suffix: raise AssertionError("FASTQ pairs must have the same file extensions.") else: row[self._single_col] = True @@ -157,7 +159,7 @@ def sniff_format(handle): handle.seek(0) sniffer = csv.Sniffer() if not sniffer.has_header(peek): - logger.critical(f"The given sample sheet does not appear to contain a header.") + logger.critical("The given sample sheet does not appear to contain a header.") sys.exit(1) dialect = sniffer.sniff(peek) return dialect @@ -195,7 +197,8 @@ def check_samplesheet(file_in, file_out): reader = csv.DictReader(in_handle, dialect=sniff_format(in_handle)) # Validate the existence of the expected header columns. if not required_columns.issubset(reader.fieldnames): - logger.critical(f"The sample sheet **must** contain the column headers: {', '.join(required_columns)}.") + req_cols = ", ".join(required_columns) + logger.critical(f"The sample sheet **must** contain these column headers: {req_cols}.") sys.exit(1) # Validate each row. checker = RowChecker() diff --git a/nf_core/pipeline-template/conf/base.config b/nf_core/pipeline-template/conf/base.config index c5c691057d..f73c5afaa4 100644 --- a/nf_core/pipeline-template/conf/base.config +++ b/nf_core/pipeline-template/conf/base.config @@ -15,7 +15,7 @@ process { memory = { check_max( 6.GB * task.attempt, 'memory' ) } time = { check_max( 4.h * task.attempt, 'time' ) } - errorStrategy = { task.exitStatus in [143,137,104,134,139] ? 'retry' : 'finish' } + errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } maxRetries = 1 maxErrors = '-1' diff --git a/nf_core/pipeline-template/conf/test_full.config b/nf_core/pipeline-template/conf/test_full.config index d92692fa94..46b165a910 100644 --- a/nf_core/pipeline-template/conf/test_full.config +++ b/nf_core/pipeline-template/conf/test_full.config @@ -10,6 +10,8 @@ ---------------------------------------------------------------------------------------- */ +cleanup = true + params { config_profile_name = 'Full test profile' config_profile_description = 'Full test dataset to check pipeline function' diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index aac1b9da5e..3a87c2f9de 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -75,6 +75,29 @@ work # Directory containing the nextflow working files # Other nextflow hidden files, eg. history of pipeline runs and old logs. ``` +If you wish to repeatedly use the same parameters for multiple runs, rather than specifying each flag in the command, you can specify these in a params file. + +Pipeline settings can be provided in a `yaml` or `json` file via `-params-file `. + +> ⚠️ Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args). +> The above pipeline run specified with a params file in yaml format: + +```bash +nextflow run {{ name }} -profile docker -params-file params.yaml +``` + +with `params.yaml` containing: + +```yaml +input: './samplesheet.csv' +outdir: './results/' +genome: 'GRCh37' +input: 'data' +<...> +``` + +You can also generate such `YAML`/`JSON` files via [nf-core/launch](https://nf-co.re/launch). + ### Updating the pipeline When you run the above command, Nextflow automatically pulls the pipeline code from GitHub and stores it as a cached version. When running the pipeline after this, it will always use the cached version if available - even if the pipeline has been updated since. To make sure that you're running the latest version of the pipeline, make sure that you regularly update the cached version of the pipeline: @@ -87,9 +110,13 @@ nextflow pull {{ name }} It is a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. -First, go to the [{{ name }} releases page](https://github.com/{{ name }}/releases) and find the latest version number - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. +First, go to the [{{ name }} releases page](https://github.com/{{ name }}/releases) and find the latest pipeline version - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. Of course, you can switch to another version by changing the number after the `-r` flag. + +This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. For example, at the bottom of the MultiQC reports. + +To further assist in reproducbility, you can use share and re-use [parameter files](#running-the-pipeline) to repeat pipeline runs with the same settings without having to write out a command with every single parameter. -This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. +> 💡 If you wish to share such profile (such as upload as supplementary material for academic publications), make sure to NOT include cluster specific paths to files, nor institutional specific profiles. ## Core Nextflow arguments @@ -99,7 +126,7 @@ This version number will be logged in reports when you run the pipeline, so that Use this parameter to choose a configuration profile. Profiles can give configuration presets for different compute environments. -Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Conda) - see below. When using Biocontainers, most of these software packaging methods pull Docker containers from quay.io e.g [FastQC](https://quay.io/repository/biocontainers/fastqc) except for Singularity which directly downloads Singularity images via https hosted by the [Galaxy project](https://depot.galaxyproject.org/singularity/) and Conda which downloads and installs software locally from [Bioconda](https://bioconda.github.io/). +Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Conda) - see below. > We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. @@ -111,8 +138,11 @@ The pipeline also dynamically loads configurations from [https://github.com/nf-c Note that multiple profiles can be loaded, for example: `-profile test,docker` - the order of arguments is important! They are loaded in sequence, so later profiles can overwrite earlier profiles. -If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended. +If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended, since it can lead to different results on different machines dependent on the computer enviroment. +- `test` + - A profile with a complete configuration for automated testing + - Includes links to test data so needs no other parameters - `docker` - A generic configuration profile to be used with [Docker](https://docker.com/) - `singularity` @@ -125,9 +155,6 @@ If `-profile` is not specified, the pipeline will run locally and expect all sof - A generic configuration profile to be used with [Charliecloud](https://hpc.github.io/charliecloud/) - `conda` - A generic configuration profile to be used with [Conda](https://conda.io/docs/). Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity, Podman, Shifter or Charliecloud. -- `test` - - A profile with a complete configuration for automated testing - - Includes links to test data so needs no other parameters ### `-resume` @@ -176,8 +203,14 @@ Work dir: Tip: you can replicate the issue by changing to the process work dir and entering the command `bash .command.run` ``` +#### For beginners + +A first step to bypass this error, you could try to increase the amount of CPUs, memory, and time for the whole pipeline. Therefor you can try to increase the resource for the parameters `--max_cpus`, `--max_memory`, and `--max_time`. Based on the error above, you have to increase the amount of memory. Therefore you can go to the [parameter documentation of rnaseq](https://nf-co.re/rnaseq/3.9/parameters) and scroll down to the `show hidden parameter` button to get the default value for `--max_memory`. In this case 128GB, you than can try to run your pipeline again with `--max_memory 200GB -resume` to skip all process, that were already calculated. If you can not increase the resource of the complete pipeline, you can try to adapt the resource for a single process as mentioned below. + +#### Advanced option on process level + To bypass this error you would need to find exactly which resources are set by the `STAR_ALIGN` process. The quickest way is to search for `process STAR_ALIGN` in the [nf-core/rnaseq Github repo](https://github.com/nf-core/rnaseq/search?q=process+STAR_ALIGN). -We have standardised the structure of Nextflow DSL2 pipelines such that all module files will be present in the `modules/` directory and so, based on the search results, the file we want is `modules/nf-core/software/star/align/main.nf`. +We have standardised the structure of Nextflow DSL2 pipelines such that all module files will be present in the `modules/` directory and so, based on the search results, the file we want is `modules/nf-core/star/align/main.nf`. If you click on the link to that file you will notice that there is a `label` directive at the top of the module that is set to [`label process_high`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/modules/nf-core/software/star/align/main.nf#L9). The [Nextflow `label`](https://www.nextflow.io/docs/latest/process.html#label) directive allows us to organise workflow processes in separate groups which can be referenced in a configuration file to select and configure subset of processes having similar computing requirements. The default values for the `process_high` label are set in the pipeline's [`base.config`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L33-L37) which in this case is defined as 72GB. @@ -196,7 +229,7 @@ process { > > If you get a warning suggesting that the process selector isn't recognised check that the process name has been specified correctly. -### Updating containers +### Updating containers (advanced users) The [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html) implementation of this pipeline uses one container per process which makes it much easier to maintain and update software dependencies. If for some reason you need to use a different version of a particular tool with the pipeline then you just need to identify the `process` name and override the Nextflow `container` definition for that process using the `withName` declaration. For example, in the [nf-core/viralrecon](https://nf-co.re/viralrecon) pipeline a tool called [Pangolin](https://github.com/cov-lineages/pangolin) has been used during the COVID-19 pandemic to assign lineages to SARS-CoV-2 genome sequenced samples. Given that the lineage assignments change quite frequently it doesn't make sense to re-release the nf-core/viralrecon everytime a new version of Pangolin has been released. However, you can override the default container used by the pipeline by creating a custom config file and passing it as a command-line argument via `-c custom.config`. @@ -246,6 +279,14 @@ See the main [Nextflow documentation](https://www.nextflow.io/docs/latest/config If you have any questions or issues please send us a message on [Slack](https://nf-co.re/join/slack) on the [`#configs` channel](https://nfcore.slack.com/channels/configs). +## Azure Resource Requests + +To be used with the `azurebatch` profile by specifying the `-profile azurebatch`. +We recommend providing a compute `params.vm_type` of `Standard_D16_v3` VMs by default but these options can be changed if required. + +Note that the choice of VM size depends on your quota and the overall workload during the analysis. +For a thorough list, please refer the [Azure Sizes for virtual machines in Azure](https://docs.microsoft.com/en-us/azure/virtual-machines/sizes). + {% endif -%} ## Running in the background diff --git a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy index 2894a6dd23..2cb8b41388 100755 --- a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy +++ b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy @@ -32,6 +32,25 @@ class NfcoreTemplate { } } + // + // Generate version string + // + public static String version(workflow) { + String version_string = "" + + if (workflow.manifest.version) { + def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' + version_string += "${prefix_v}${workflow.manifest.version}" + } + + if (workflow.commitId) { + def git_shortsha = workflow.commitId.substring(0, 7) + version_string += "-g${git_shortsha}" + } + + return version_string + } + // // Construct and send completion email // @@ -61,7 +80,7 @@ class NfcoreTemplate { misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp def email_fields = [:] - email_fields['version'] = workflow.manifest.version + email_fields['version'] = NfcoreTemplate.version(workflow) email_fields['runName'] = workflow.runName email_fields['success'] = workflow.success email_fields['dateComplete'] = workflow.complete @@ -145,6 +164,64 @@ class NfcoreTemplate { output_tf.withWriter { w -> w << email_txt } } + // + // Construct and send a notification to a web server as JSON + // e.g. Microsoft Teams and Slack + // + public static void IM_notification(workflow, params, summary_params, projectDir, log) { + def hook_url = params.hook_url + + def summary = [:] + for (group in summary_params.keySet()) { + summary << summary_params[group] + } + + def misc_fields = [:] + misc_fields['start'] = workflow.start + misc_fields['complete'] = workflow.complete + misc_fields['scriptfile'] = workflow.scriptFile + misc_fields['scriptid'] = workflow.scriptId + if (workflow.repository) misc_fields['repository'] = workflow.repository + if (workflow.commitId) misc_fields['commitid'] = workflow.commitId + if (workflow.revision) misc_fields['revision'] = workflow.revision + misc_fields['nxf_version'] = workflow.nextflow.version + misc_fields['nxf_build'] = workflow.nextflow.build + misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp + + def msg_fields = [:] + msg_fields['version'] = NfcoreTemplate.version(workflow) + msg_fields['runName'] = workflow.runName + msg_fields['success'] = workflow.success + msg_fields['dateComplete'] = workflow.complete + msg_fields['duration'] = workflow.duration + msg_fields['exitStatus'] = workflow.exitStatus + msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None') + msg_fields['errorReport'] = (workflow.errorReport ?: 'None') + msg_fields['commandLine'] = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "") + msg_fields['projectDir'] = workflow.projectDir + msg_fields['summary'] = summary << misc_fields + + // Render the JSON template + def engine = new groovy.text.GStringTemplateEngine() + // Different JSON depending on the service provider + // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format + def json_path = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json" + def hf = new File("$projectDir/assets/${json_path}") + def json_template = engine.createTemplate(hf).make(msg_fields) + def json_message = json_template.toString() + + // POST + def post = new URL(hook_url).openConnection(); + post.setRequestMethod("POST") + post.setDoOutput(true) + post.setRequestProperty("Content-Type", "application/json") + post.getOutputStream().write(json_message.getBytes("UTF-8")); + def postRC = post.getResponseCode(); + if (! postRC.equals(200)) { + log.warn(post.getErrorStream().getText()); + } + } + // // Print pipeline summary on completion // @@ -154,7 +231,7 @@ class NfcoreTemplate { if (workflow.stats.ignoredCount == 0) { log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" + log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" } } else { log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" @@ -242,6 +319,7 @@ class NfcoreTemplate { // public static String logo(workflow, monochrome_logs) { Map colors = logColours(monochrome_logs) + String workflow_version = NfcoreTemplate.version(workflow) String.format( """\n ${dashedLine(monochrome_logs)}{% if branded %} @@ -250,7 +328,7 @@ class NfcoreTemplate { ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} ${colors.green}`._,._,\'${colors.reset}{% endif %} - ${colors.purple} ${workflow.manifest.name} v${workflow.manifest.version}${colors.reset} + ${colors.purple} ${workflow.manifest.name} ${workflow_version}${colors.reset} ${dashedLine(monochrome_logs)} """.stripIndent() ) diff --git a/nf_core/pipeline-template/lib/Utils.groovy b/nf_core/pipeline-template/lib/Utils.groovy old mode 100755 new mode 100644 index 28567bd70d..8d030f4e84 --- a/nf_core/pipeline-template/lib/Utils.groovy +++ b/nf_core/pipeline-template/lib/Utils.groovy @@ -21,19 +21,26 @@ class Utils { } // Check that all channels are present - def required_channels = ['conda-forge', 'bioconda', 'defaults'] - def conda_check_failed = !required_channels.every { ch -> ch in channels } + // This channel list is ordered by required channel priority. + def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults'] + def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean // Check that they are in the right order - conda_check_failed |= !(channels.indexOf('conda-forge') < channels.indexOf('bioconda')) - conda_check_failed |= !(channels.indexOf('bioconda') < channels.indexOf('defaults')) + def channel_priority_violation = false + def n = required_channels_in_order.size() + for (int i = 0; i < n - 1; i++) { + channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1])) + } - if (conda_check_failed) { + if (channels_missing | channel_priority_violation) { log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + " There is a problem with your Conda configuration!\n\n" + " You will need to set-up the conda-forge and bioconda channels correctly.\n" + - " Please refer to https://bioconda.github.io/user/install.html#set-up-channels\n" + - " NB: The order of the channels matters!\n" + + " Please refer to https://bioconda.github.io/\n" + + " The observed channel order is \n" + + " ${channels}\n" + + " but the following channel order is required:\n" + + " ${required_channels_in_order}\n" + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" } } diff --git a/nf_core/pipeline-template/lib/WorkflowMain.groovy b/nf_core/pipeline-template/lib/WorkflowMain.groovy index 2de013f2aa..8eab649681 100755 --- a/nf_core/pipeline-template/lib/WorkflowMain.groovy +++ b/nf_core/pipeline-template/lib/WorkflowMain.groovy @@ -22,12 +22,33 @@ class WorkflowMain { // Validate parameters and print summary to screen // public static void initialise(workflow, params, log) { + // Print help to screen if required + if (params.help) { + log.info help(workflow, params, log) + System.exit(0) + } + + // Print workflow version and exit on --version + if (params.version) { + String workflow_version = NfcoreTemplate.version(workflow) + log.info "${workflow.manifest.name} ${workflow_version}" + System.exit(0) + } + + // Validate workflow parameters via the JSON schema + if (params.validate_params) { + NfcoreSchema.validateParameters(workflow, params, log) + } + + // Print parameter summary log to screen + + log.info paramsSummaryLog(workflow, params, log) // Check that a -profile or Nextflow config has been provided to run the pipeline NfcoreTemplate.checkConfigProvided(workflow, log) // Check that conda channels are set-up correctly - if (params.enable_conda) { + if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { Utils.checkCondaChannels(log) } diff --git a/nf_core/pipeline-template/lib/WorkflowPipeline.groovy b/nf_core/pipeline-template/lib/WorkflowPipeline.groovy index ba9199e6fc..252f127d80 100755 --- a/nf_core/pipeline-template/lib/WorkflowPipeline.groovy +++ b/nf_core/pipeline-template/lib/WorkflowPipeline.groovy @@ -2,6 +2,8 @@ // This file holds several functions specific to the workflow/{{ short_name }}.nf in the {{ name }} pipeline // +import groovy.text.SimpleTemplateEngine + class Workflow{{ short_name[0]|upper }}{{ short_name[1:] }} { // @@ -45,6 +47,23 @@ class Workflow{{ short_name[0]|upper }}{{ short_name[1:] }} { return yaml_file_text } + public static String methodsDescriptionText(run_workflow, mqc_methods_yaml) { + // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file + def meta = [:] + meta.workflow = run_workflow.toMap() + meta["manifest_map"] = run_workflow.manifest.toMap() + + meta["doi_text"] = meta.manifest_map.doi ? "(doi: ${meta.manifest_map.doi})" : "" + meta["nodoi_text"] = meta.manifest_map.doi ? "": "
  • If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used.
  • " + + def methods_text = mqc_methods_yaml.text + + def engine = new SimpleTemplateEngine() + def description_html = engine.createTemplate(methods_text).make(meta) + + return description_html + } + {%- if igenomes -%} // // Exit pipeline if incorrect --genome key provided diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index 539bcf2bf8..74dddd590d 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -4,7 +4,7 @@ {{ name }} ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Github : https://github.com/{{ name }} -{% if branded -%} +{% if branded %} Website: https://nf-co.re/{{ short_name }} Slack : https://nfcore.slack.com/channels/{{ short_name }} {% endif -%} diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 9c8d724aef..08116ecbac 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -2,20 +2,24 @@ "name": "{{ name }}", "homePage": "https://github.com/{{ name }}", "repos": { - "nf-core/modules": { - "git_url": "https://github.com/nf-core/modules.git", + "https://github.com/nf-core/modules.git": { "modules": { - "custom/dumpsoftwareversions": { - "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d", - "branch": "master" - }, - "fastqc": { - "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d", - "branch": "master" - }, - "multiqc": { - "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d", - "branch": "master" + "nf-core": { + "custom/dumpsoftwareversions": { + "branch": "master", + "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", + "installed_by": ["modules"] + }, + "fastqc": { + "branch": "master", + "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", + "installed_by": ["modules"] + }, + "multiqc": { + "branch": "master", + "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", + "installed_by": ["modules"] + } } } } diff --git a/nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/main.nf b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf similarity index 79% rename from nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/main.nf rename to nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf index 327d510056..3df21765b9 100644 --- a/nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf @@ -1,11 +1,11 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { - label 'process_low' + label 'process_single' // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container - conda (params.enable_conda ? "bioconda::multiqc=1.11" : null) + conda "bioconda::multiqc=1.13" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0' : - 'quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' : + 'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }" input: path versions diff --git a/nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml similarity index 100% rename from nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml rename to nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py new file mode 100755 index 0000000000..e55b8d43a9 --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python + + +"""Provide functions to merge multiple versions.yml files.""" + + +import platform +from textwrap import dedent + +import yaml + + +def _make_versions_html(versions): + """Generate a tabular HTML output of all versions for MultiQC.""" + html = [ + dedent( + """\\ + + + + + + + + + + """ + ) + ] + for process, tmp_versions in sorted(versions.items()): + html.append("") + for i, (tool, version) in enumerate(sorted(tmp_versions.items())): + html.append( + dedent( + f"""\\ + + + + + + """ + ) + ) + html.append("") + html.append("
    Process Name Software Version
    {process if (i == 0) else ''}{tool}{version}
    ") + return "\\n".join(html) + + +def main(): + """Load all version files and generate merged output.""" + versions_this_module = {} + versions_this_module["${task.process}"] = { + "python": platform.python_version(), + "yaml": yaml.__version__, + } + + with open("$versions") as f: + versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module + + # aggregate versions by the module name (derived from fully-qualified process name) + versions_by_module = {} + for process, process_versions in versions_by_process.items(): + module = process.split(":")[-1] + try: + if versions_by_module[module] != process_versions: + raise AssertionError( + "We assume that software versions are the same between all modules. " + "If you see this error-message it means you discovered an edge-case " + "and should open an issue in nf-core/tools. " + ) + except KeyError: + versions_by_module[module] = process_versions + + versions_by_module["Workflow"] = { + "Nextflow": "$workflow.nextflow.version", + "$workflow.manifest.name": "$workflow.manifest.version", + } + + versions_mqc = { + "id": "software_versions", + "section_name": "${workflow.manifest.name} Software Versions", + "section_href": "https://github.com/${workflow.manifest.name}", + "plot_type": "html", + "description": "are collected at run time from the software output.", + "data": _make_versions_html(versions_by_module), + } + + with open("software_versions.yml", "w") as f: + yaml.dump(versions_by_module, f, default_flow_style=False) + with open("software_versions_mqc.yml", "w") as f: + yaml.dump(versions_mqc, f, default_flow_style=False) + + with open("versions.yml", "w") as f: + yaml.dump(versions_this_module, f, default_flow_style=False) + + +if __name__ == "__main__": + main() diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf new file mode 100644 index 0000000000..9ae5838158 --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf @@ -0,0 +1,51 @@ +process FASTQC { + tag "$meta.id" + label 'process_medium' + + conda "bioconda::fastqc=0.11.9" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' : + 'quay.io/biocontainers/fastqc:0.11.9--0' }" + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("*.html"), emit: html + tuple val(meta), path("*.zip") , emit: zip + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + // Make list of old name and new name pairs to use for renaming in the bash while loop + def old_new_pairs = reads instanceof Path || reads.size() == 1 ? [[ reads, "${prefix}.${reads.extension}" ]] : reads.withIndex().collect { entry, index -> [ entry, "${prefix}_${index + 1}.${entry.extension}" ] } + def rename_to = old_new_pairs*.join(' ').join(' ') + def renamed_files = old_new_pairs.collect{ old_name, new_name -> new_name }.join(' ') + """ + printf "%s %s\\n" $rename_to | while read old_name new_name; do + [ -f "\${new_name}" ] || ln -s \$old_name \$new_name + done + fastqc $args --threads $task.cpus $renamed_files + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + END_VERSIONS + """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.html + touch ${prefix}.zip + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + END_VERSIONS + """ +} diff --git a/nf_core/pipeline-template/modules/nf-core/modules/fastqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml similarity index 100% rename from nf_core/pipeline-template/modules/nf-core/modules/fastqc/meta.yml rename to nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml diff --git a/nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py deleted file mode 100644 index 787bdb7b1b..0000000000 --- a/nf_core/pipeline-template/modules/nf-core/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env python - -import platform -from textwrap import dedent - -import yaml - - -def _make_versions_html(versions): - html = [ - dedent( - """\\ - - - - - - - - - - """ - ) - ] - for process, tmp_versions in sorted(versions.items()): - html.append("") - for i, (tool, version) in enumerate(sorted(tmp_versions.items())): - html.append( - dedent( - f"""\\ - - - - - - """ - ) - ) - html.append("") - html.append("
    Process Name Software Version
    {process if (i == 0) else ''}{tool}{version}
    ") - return "\\n".join(html) - - -versions_this_module = {} -versions_this_module["${task.process}"] = { - "python": platform.python_version(), - "yaml": yaml.__version__, -} - -with open("$versions") as f: - versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module - -# aggregate versions by the module name (derived from fully-qualified process name) -versions_by_module = {} -for process, process_versions in versions_by_process.items(): - module = process.split(":")[-1] - try: - if versions_by_module[module] != process_versions: - raise AssertionError( - "We assume that software versions are the same between all modules. " - "If you see this error-message it means you discovered an edge-case " - "and should open an issue in nf-core/tools. " - ) - except KeyError: - versions_by_module[module] = process_versions - -versions_by_module["Workflow"] = { - "Nextflow": "$workflow.nextflow.version", - "$workflow.manifest.name": "$workflow.manifest.version", -} - -versions_mqc = { - "id": "software_versions", - "section_name": "${workflow.manifest.name} Software Versions", - "section_href": "https://github.com/${workflow.manifest.name}", - "plot_type": "html", - "description": "are collected at run time from the software output.", - "data": _make_versions_html(versions_by_module), -} - -with open("software_versions.yml", "w") as f: - yaml.dump(versions_by_module, f, default_flow_style=False) -with open("software_versions_mqc.yml", "w") as f: - yaml.dump(versions_mqc, f, default_flow_style=False) - -with open("versions.yml", "w") as f: - yaml.dump(versions_this_module, f, default_flow_style=False) diff --git a/nf_core/pipeline-template/modules/nf-core/modules/fastqc/main.nf b/nf_core/pipeline-template/modules/nf-core/modules/fastqc/main.nf deleted file mode 100644 index ed6b8c50b1..0000000000 --- a/nf_core/pipeline-template/modules/nf-core/modules/fastqc/main.nf +++ /dev/null @@ -1,47 +0,0 @@ -process FASTQC { - tag "$meta.id" - label 'process_medium' - - conda (params.enable_conda ? "bioconda::fastqc=0.11.9" : null) - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' : - 'quay.io/biocontainers/fastqc:0.11.9--0' }" - - input: - tuple val(meta), path(reads) - - output: - tuple val(meta), path("*.html"), emit: html - tuple val(meta), path("*.zip") , emit: zip - path "versions.yml" , emit: versions - - when: - task.ext.when == null || task.ext.when - - script: - def args = task.ext.args ?: '' - // Add soft-links to original FastQs for consistent naming in pipeline - def prefix = task.ext.prefix ?: "${meta.id}" - if (meta.single_end) { - """ - [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz - fastqc $args --threads $task.cpus ${prefix}.fastq.gz - - cat <<-END_VERSIONS > versions.yml - "${task.process}": - fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) - END_VERSIONS - """ - } else { - """ - [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz - [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz - fastqc $args --threads $task.cpus ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz - - cat <<-END_VERSIONS > versions.yml - "${task.process}": - fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) - END_VERSIONS - """ - } -} diff --git a/nf_core/pipeline-template/modules/nf-core/modules/multiqc/main.nf b/nf_core/pipeline-template/modules/nf-core/modules/multiqc/main.nf deleted file mode 100644 index 1264aac1eb..0000000000 --- a/nf_core/pipeline-template/modules/nf-core/modules/multiqc/main.nf +++ /dev/null @@ -1,31 +0,0 @@ -process MULTIQC { - label 'process_medium' - - conda (params.enable_conda ? 'bioconda::multiqc=1.12' : null) - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.12--pyhdfd78af_0' : - 'quay.io/biocontainers/multiqc:1.12--pyhdfd78af_0' }" - - input: - path multiqc_files - - output: - path "*multiqc_report.html", emit: report - path "*_data" , emit: data - path "*_plots" , optional:true, emit: plots - path "versions.yml" , emit: versions - - when: - task.ext.when == null || task.ext.when - - script: - def args = task.ext.args ?: '' - """ - multiqc -f $args . - - cat <<-END_VERSIONS > versions.yml - "${task.process}": - multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) - END_VERSIONS - """ -} diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf new file mode 100644 index 0000000000..68f66bea74 --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf @@ -0,0 +1,53 @@ +process MULTIQC { + label 'process_single' + + conda "bioconda::multiqc=1.13" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' : + 'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }" + + input: + path multiqc_files, stageAs: "?/*" + path(multiqc_config) + path(extra_multiqc_config) + path(multiqc_logo) + + output: + path "*multiqc_report.html", emit: report + path "*_data" , emit: data + path "*_plots" , optional:true, emit: plots + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def config = multiqc_config ? "--config $multiqc_config" : '' + def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : '' + """ + multiqc \\ + --force \\ + $args \\ + $config \\ + $extra_config \\ + . + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) + END_VERSIONS + """ + + stub: + """ + touch multiqc_data + touch multiqc_plots + touch multiqc_report.html + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) + END_VERSIONS + """ +} diff --git a/nf_core/pipeline-template/modules/nf-core/modules/multiqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml similarity index 73% rename from nf_core/pipeline-template/modules/nf-core/modules/multiqc/meta.yml rename to nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml index 6fa891efc2..ebc29b279d 100644 --- a/nf_core/pipeline-template/modules/nf-core/modules/multiqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml @@ -12,11 +12,25 @@ tools: homepage: https://multiqc.info/ documentation: https://multiqc.info/docs/ licence: ["GPL-3.0-or-later"] + input: - multiqc_files: type: file description: | List of reports / files recognised by MultiQC, for example the html and zip output of FastQC + - multiqc_config: + type: file + description: Optional config yml for MultiQC + pattern: "*.{yml,yaml}" + - extra_multiqc_config: + type: file + description: Second optional config yml for MultiQC. Will override common sections in multiqc_config. + pattern: "*.{yml,yaml}" + - multiqc_logo: + type: file + description: Optional logo file for MultiQC + pattern: "*.{png}" + output: - report: type: file @@ -38,3 +52,4 @@ authors: - "@abhi18av" - "@bunop" - "@drpatelh" + - "@jfy133" diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 606c3a2003..1c11a8b715 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -23,7 +23,9 @@ params { // MultiQC options multiqc_config = null multiqc_title = null + multiqc_logo = null max_multiqc_email_size = '25.MB' + multiqc_methods_description = null // Boilerplate options outdir = null @@ -33,11 +35,12 @@ params { email_on_fail = null plaintext_email = false monochrome_logs = false + hook_url = null help = false + version = false validate_params = true show_hidden_params = false schema_ignore_params = 'genomes' - enable_conda = false {% if nf_core_configs %} // Config options @@ -60,10 +63,11 @@ params { lenient_mode = false } -{% if nf_core_configs %} + // Load base.config by default for all pipelines includeConfig 'conf/base.config' +{% if nf_core_configs -%} // Load nf-core custom profiles from different Institutions try { includeConfig "${params.custom_config_base}/nfcore_custom.config" @@ -78,13 +82,12 @@ try { // } catch (Exception e) { // System.err.println("WARNING: Could not load nf-core/config/{{ short_name }} profiles: ${params.custom_config_base}/pipeline/{{ short_name }}.config") // } - - {% endif %} + profiles { debug { process.beforeScript = 'echo $HOSTNAME' } conda { - params.enable_conda = true + conda.enabled = true docker.enabled = false singularity.enabled = false podman.enabled = false @@ -92,7 +95,7 @@ profiles { charliecloud.enabled = false } mamba { - params.enable_conda = true + conda.enabled = true conda.useMamba = true docker.enabled = false singularity.enabled = false @@ -103,14 +106,19 @@ profiles { docker { docker.enabled = true docker.userEmulation = true + conda.enabled = false singularity.enabled = false podman.enabled = false shifter.enabled = false charliecloud.enabled = false } + arm { + docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64' + } singularity { singularity.enabled = true singularity.autoMounts = true + conda.enabled = false docker.enabled = false podman.enabled = false shifter.enabled = false @@ -118,6 +126,7 @@ profiles { } podman { podman.enabled = true + conda.enabled = false docker.enabled = false singularity.enabled = false shifter.enabled = false @@ -125,6 +134,7 @@ profiles { } shifter { shifter.enabled = true + conda.enabled = false docker.enabled = false singularity.enabled = false podman.enabled = false @@ -132,6 +142,7 @@ profiles { } charliecloud { charliecloud.enabled = true + conda.enabled = false docker.enabled = false singularity.enabled = false podman.enabled = false @@ -189,12 +200,13 @@ dag { manifest { name = '{{ name }}' - author = '{{ author }}' + author = """{{ author }}""" homePage = 'https://github.com/{{ name }}' - description = '{{ description }}' + description = """{{ description }}""" mainScript = 'main.nf' - nextflowVersion = '!>=21.10.3' + nextflowVersion = '!>=22.10.1' version = '{{ version }}' + doi = '' } // Load modules.config for DSL2 module specific options diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 5cd8ac489a..2743562d6c 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -176,6 +176,12 @@ "fa_icon": "fas fa-question-circle", "hidden": true }, + "version": { + "type": "boolean", + "description": "Display version and exit.", + "fa_icon": "fas fa-question-circle", + "hidden": true + }, "publish_dir_mode": { "type": "string", "default": "copy", @@ -213,12 +219,30 @@ "fa_icon": "fas fa-palette", "hidden": true }, + "hook_url": { + "type": "string", + "description": "Incoming hook URL for messaging service", + "fa_icon": "fas fa-people-group", + "help_text": "Incoming hook URL for messaging service. Currently, MS Teams and Slack are supported.", + "hidden": true + }, "multiqc_config": { "type": "string", "description": "Custom config file to supply to MultiQC.", "fa_icon": "fas fa-cog", "hidden": true }, + "multiqc_logo": { + "type": "string", + "description": "Custom logo file to supply to MultiQC. File name must also be set in the MultiQC config file", + "fa_icon": "fas fa-image", + "hidden": true + }, + "multiqc_methods_description": { + "type": "string", + "description": "Custom MultiQC yaml file containing HTML including a methods description.", + "fa_icon": "fas fa-cog" + }, "tracedir": { "type": "string", "description": "Directory to keep pipeline Nextflow logs and reports.", @@ -239,12 +263,6 @@ "description": "Show all params when using `--help`", "hidden": true, "help_text": "By default, parameters set as _hidden_ in the schema are not shown on the command line when a user runs with `--help`. Specifying this option will tell the pipeline to show all parameters." - }, - "enable_conda": { - "type": "boolean", - "description": "Run this workflow with Conda. You can also use '-profile conda' instead of providing this parameter.", - "hidden": true, - "fa_icon": "fas fa-bacon" } } } diff --git a/nf_core/pipeline-template/pyproject.toml b/nf_core/pipeline-template/pyproject.toml new file mode 100644 index 0000000000..0d62beb6f9 --- /dev/null +++ b/nf_core/pipeline-template/pyproject.toml @@ -0,0 +1,10 @@ +# Config file for Python. Mostly used to configure linting of bin/check_samplesheet.py with Black. +# Should be kept the same as nf-core/tools to avoid fighting with template synchronisation. +[tool.black] +line-length = 120 +target_version = ["py37", "py38", "py39", "py310"] + +[tool.isort] +profile = "black" +known_first_party = ["nf_core"] +multi_line_output = 3 diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index 075abfb67f..6f643e6163 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -41,8 +41,10 @@ for (param in checkPathParamList) { if (param) { file(param, checkIfExists: true ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -ch_multiqc_config = file("$projectDir/assets/multiqc_config.yml", checkIfExists: true) -ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath(params.multiqc_config) : Channel.empty() +ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) +ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config, checkIfExists: true ) : Channel.empty() +ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath( params.multiqc_logo, checkIfExists: true ) : Channel.empty() +ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description, checkIfExists: true) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -63,9 +65,9 @@ ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath(params.multi // // MODULE: Installed directly from nf-core/modules // -include { FASTQC } from '../modules/nf-core/modules/fastqc/main' -include { MULTIQC } from '../modules/nf-core/modules/multiqc/main' -include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/modules/custom/dumpsoftwareversions/main' +include { FASTQC } from '../modules/nf-core/fastqc/main' +include { MULTIQC } from '../modules/nf-core/multiqc/main' +include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoftwareversions/main' /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -106,18 +108,22 @@ workflow {{ short_name|upper }} { workflow_summary = Workflow{{ short_name[0]|upper }}{{ short_name[1:] }}.paramsSummaryMultiqc(workflow, summary_params) ch_workflow_summary = Channel.value(workflow_summary) + methods_description = Workflow{{ short_name[0]|upper }}{{ short_name[1:] }}.methodsDescriptionText(workflow, ch_multiqc_custom_methods_description) + ch_methods_description = Channel.value(methods_description) + ch_multiqc_files = Channel.empty() - ch_multiqc_files = ch_multiqc_files.mix(Channel.from(ch_multiqc_config)) - ch_multiqc_files = ch_multiqc_files.mix(ch_multiqc_custom_config.collect().ifEmpty([])) ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) + ch_multiqc_files = ch_multiqc_files.mix(ch_methods_description.collectFile(name: 'methods_description_mqc.yaml')) ch_multiqc_files = ch_multiqc_files.mix(CUSTOM_DUMPSOFTWAREVERSIONS.out.mqc_yml.collect()) ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}.ifEmpty([])) MULTIQC ( - ch_multiqc_files.collect() + ch_multiqc_files.collect(), + ch_multiqc_config.toList(), + ch_multiqc_custom_config.toList(), + ch_multiqc_logo.toList() ) multiqc_report = MULTIQC.out.report.toList() - ch_versions = ch_versions.mix(MULTIQC.out.versions) } /* @@ -131,6 +137,9 @@ workflow.onComplete { NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report) } NfcoreTemplate.summary(workflow, params, log) + if (params.hook_url) { + NfcoreTemplate.IM_notification(workflow, params, summary_params, projectDir, log) + } } /* diff --git a/nf_core/refgenie.py b/nf_core/refgenie.py index 6221efce68..a10e4fecdf 100644 --- a/nf_core/refgenie.py +++ b/nf_core/refgenie.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Update a nextflow.config file with refgenie genomes """ @@ -83,7 +82,7 @@ def _update_nextflow_home_config(refgenie_genomes_config_file, nxf_home): with open(nxf_home_config, "r") as fh: lines = fh.readlines() for line in lines: - if re.match(f"\s*includeConfig\s*'{os.path.abspath(refgenie_genomes_config_file)}'", line): + if re.match(rf"\s*includeConfig\s*'{os.path.abspath(refgenie_genomes_config_file)}'", line): has_include_statement = True break diff --git a/nf_core/schema.py b/nf_core/schema.py index 1dab8c2b0e..ba88e762ea 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Code to deal with pipeline JSON Schema """ from __future__ import print_function @@ -7,21 +6,25 @@ import json import logging import os +import tempfile import webbrowser import jinja2 import jsonschema import markdown +import rich.console import yaml from rich.prompt import Confirm +from rich.syntax import Syntax import nf_core.list import nf_core.utils +from nf_core.lint_utils import dump_json_with_prettier, run_prettier_on_file log = logging.getLogger(__name__) -class PipelineSchema(object): +class PipelineSchema: """Class to generate a schema object with functions to handle pipeline JSON Schema""" @@ -171,9 +174,7 @@ def save_schema(self, suppress_logging=False): num_params += sum(len(d.get("properties", {})) for d in self.schema.get("definitions", {}).values()) if not suppress_logging: log.info(f"Writing schema with {num_params} params: '{self.schema_filename}'") - with open(self.schema_filename, "w") as fh: - json.dump(self.schema, fh, indent=4) - fh.write("\n") + dump_json_with_prettier(self.schema_filename, self.schema) def load_input_params(self, params_path): """Load a given a path to a parameters file (JSON/YAML) @@ -184,7 +185,10 @@ def load_input_params(self, params_path): # First, try to load as JSON try: with open(params_path, "r") as fh: - params = json.load(fh) + try: + params = json.load(fh) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{params_path}' due to error {e}") self.input_params.update(params) log.debug(f"Loaded JSON input params: {params_path}") except Exception as json_e: @@ -228,7 +232,7 @@ def validate_default_params(self): schema_no_required = copy.deepcopy(self.schema) if "required" in schema_no_required: schema_no_required.pop("required") - for group_key, group in schema_no_required["definitions"].items(): + for group_key, group in schema_no_required.get("definitions", {}).items(): if "required" in group: schema_no_required["definitions"][group_key].pop("required") jsonschema.validate(self.schema_defaults, schema_no_required) @@ -247,7 +251,7 @@ def validate_default_params(self): params_ignore = [] # Go over group keys - for group_key, group in schema_no_required["definitions"].items(): + for group_key, group in schema_no_required.get("definitions", {}).items(): group_properties = group.get("properties") for param in group_properties: if param in params_ignore: @@ -343,7 +347,7 @@ def validate_schema(self, schema=None): if "allOf" not in schema: raise AssertionError("Schema has definitions, but no allOf key") in_allOf = False - for allOf in schema["allOf"]: + for allOf in schema.get("allOf", []): if allOf["$ref"] == f"#/definitions/{d_key}": in_allOf = True if not in_allOf: @@ -361,7 +365,7 @@ def validate_schema(self, schema=None): if "definitions" not in schema: raise AssertionError("Schema has allOf, but no definitions") def_key = allOf["$ref"][14:] - if def_key not in schema["definitions"]: + if def_key not in schema.get("definitions", {}): raise AssertionError(f"Subschema `{def_key}` found in `allOf` but not `definitions`") # Check that the schema describes at least one parameter @@ -434,10 +438,10 @@ def check_for_input_mimetype(self): # Check that the input parameter is defined if "input" not in self.schema_params: - raise LookupError(f"Parameter `input` not found in schema") + raise LookupError("Parameter `input` not found in schema") # Check that the input parameter is defined in the right place if "input" not in self.schema.get("definitions", {}).get("input_output_options", {}).get("properties", {}): - raise LookupError(f"Parameter `input` is not defined in the correct subschema (input_output_options)") + raise LookupError("Parameter `input` is not defined in the correct subschema (input_output_options)") input_entry = self.schema["definitions"]["input_output_options"]["properties"]["input"] if "mimetype" not in input_entry: return None @@ -463,13 +467,21 @@ def print_documentation( if format == "html": output = self.markdown_to_html(output) - # Print to file - if output_fn: + with tempfile.NamedTemporaryFile(mode="w+") as fh: + fh.write(output) + run_prettier_on_file(fh.name) + fh.seek(0) + prettified_docs = fh.read() + + if not output_fn: + console = rich.console.Console() + console.print("\n", Syntax(prettified_docs, format), "\n") + else: if os.path.exists(output_fn) and not force: log.error(f"File '{output_fn}' exists! Please delete first, or use '--force'") return - with open(output_fn, "w") as file: - file.write(output) + with open(output_fn, "w") as fh: + fh.write(prettified_docs) log.info(f"Documentation written to '{output_fn}'") # Return as a string @@ -485,46 +497,55 @@ def schema_to_markdown(self, columns): for definition in self.schema.get("definitions", {}).values(): out += f"\n## {definition.get('title', {})}\n\n" out += f"{definition.get('description', '')}\n\n" - out += "".join([f"| {column.title()} " for column in columns]) - out += "|\n" - out += "".join(["|-----------" for columns in columns]) - out += "|\n" - for p_key, param in definition.get("properties", {}).items(): - for column in columns: - if column == "parameter": - out += f"| `{p_key}` " - elif column == "description": - out += f"| {param.get('description', '')} " - if param.get("help_text", "") != "": - out += f"
    Help{param['help_text']}
    " - elif column == "type": - out += f"| `{param.get('type', '')}` " - else: - out += f"| {param.get(column, '')} " - out += "|\n" + required = definition.get("required", []) + properties = definition.get("properties", {}) + param_table = self.markdown_param_table(properties, required, columns) + out += param_table # Top-level ungrouped parameters if len(self.schema.get("properties", {})) > 0: out += "\n## Other parameters\n\n" - out += "".join([f"| {column.title()} " for column in columns]) - out += "|\n" - out += "".join(["|-----------" for columns in columns]) - out += "|\n" + required = self.schema.get("required", []) + properties = self.schema.get("properties", {}) + param_table = self.markdown_param_table(properties, required, columns) + out += param_table - for p_key, param in self.schema.get("properties", {}).items(): - for column in columns: - if column == "parameter": - out += f"| `{p_key}` " - elif column == "description": - out += f"| {param.get('description', '')} " - if param.get("help_text", "") != "": - out += f"
    Help{param['help_text']}
    " - elif column == "type": - out += f"| `{param.get('type', '')}` " - else: - out += f"| {param.get(column, '')} " - out += "|\n" + return out + + def markdown_param_table(self, properties, required, columns): + """Creates a markdown table for params from jsonschema properties section + Args: + properties (dict): A jsonschema properties dictionary + required (list): A list of the required fields. + Should come from the same level of the jsonschema as properties + columns (list): A list of columns to write + + Returns: + str: A string with the markdown table + """ + out = "" + out += "".join([f"| {column.title()} " for column in columns]) + out += "|\n" + out += "".join(["|-----------" for _ in columns]) + out += "|\n" + for p_key, param in properties.items(): + for column in columns: + if column == "parameter": + out += f"| `{p_key}` " + elif column == "description": + desc = param.get("description", "").replace("\n", "
    ") + out += f"| {desc} " + if param.get("help_text", "") != "": + help_txt = param["help_text"].replace("\n", "
    ") + out += f"
    Help{help_txt}
    " + elif column == "type": + out += f"| `{param.get('type', '')}` " + elif column == "required": + out += f"| {p_key in required or ''} " + else: + out += f"| {param.get(column, '')} " + out += "|\n" return out def markdown_to_html(self, markdown_str): @@ -667,6 +688,14 @@ def remove_schema_empty_definitions(self): if allOf in self.schema.get("allOf", []): self.schema["allOf"].remove(allOf) + # If we don't have anything left in "allOf", remove it + if self.schema.get("allOf") == []: + del self.schema["allOf"] + + # If we don't have anything left in "definitions", remove it + if self.schema.get("definitions") == {}: + del self.schema["definitions"] + def remove_schema_notfound_configs(self): """ Go through top-level schema and all definitions sub-schemas to remove diff --git a/nf_core/subworkflow-template/subworkflows/main.nf b/nf_core/subworkflow-template/subworkflows/main.nf new file mode 100644 index 0000000000..dfebaa94fe --- /dev/null +++ b/nf_core/subworkflow-template/subworkflows/main.nf @@ -0,0 +1,36 @@ +// TODO nf-core: If in doubt look at other nf-core/subworkflows to see how we are doing things! :) +// https://github.com/nf-core/modules/tree/master/subworkflows +// You can also ask for help via your pull request or on the #subworkflows channel on the nf-core Slack workspace: +// https://nf-co.re/join +// TODO nf-core: A subworkflow SHOULD import at least two modules + +include { SAMTOOLS_SORT } from '../../../modules/nf-core/samtools/sort/main' +include { SAMTOOLS_INDEX } from '../../../modules/nf-core/samtools/index/main' + +workflow {{ subworkflow_name|upper }} { + + take: + // TODO nf-core: edit input (take) channels + ch_bam // channel: [ val(meta), [ bam ] ] + + main: + + ch_versions = Channel.empty() + + // TODO nf-core: substitute modules here for the modules of your subworkflow + + SAMTOOLS_SORT ( ch_bam ) + ch_versions = ch_versions.mix(SAMTOOLS_SORT.out.versions.first()) + + SAMTOOLS_INDEX ( SAMTOOLS_SORT.out.bam ) + ch_versions = ch_versions.mix(SAMTOOLS_INDEX.out.versions.first()) + + emit: + // TODO nf-core: edit emitted channels + bam = SAMTOOLS_SORT.out.bam // channel: [ val(meta), [ bam ] ] + bai = SAMTOOLS_INDEX.out.bai // channel: [ val(meta), [ bai ] ] + csi = SAMTOOLS_INDEX.out.csi // channel: [ val(meta), [ csi ] ] + + versions = ch_versions // channel: [ versions.yml ] +} + diff --git a/nf_core/subworkflow-template/subworkflows/meta.yml b/nf_core/subworkflow-template/subworkflows/meta.yml new file mode 100644 index 0000000000..3db57b6fb1 --- /dev/null +++ b/nf_core/subworkflow-template/subworkflows/meta.yml @@ -0,0 +1,48 @@ +name: "{{ subworkflow_name }}" +## TODO nf-core: Add a description of the subworkflow and list keywords +description: Sort SAM/BAM/CRAM file +keywords: + - sort + - bam + - sam + - cram +## TODO nf-core: Add a list of the modules used in the subworkflow +modules: + - samtools/sort + - samtools/index +## TODO nf-core: List all of the variables used as input, including their types and descriptions +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - bam: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" +## TODO nf-core: List all of the variables used as output, including their types and descriptions +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - bam: + type: file + description: Sorted BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - bai: + type: file + description: BAM/CRAM/SAM samtools index + pattern: "*.{bai,crai,sai}" + - csi: + type: file + description: CSI samtools index + pattern: "*.csi" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "{{ author }}" diff --git a/nf_core/subworkflow-template/tests/main.nf b/nf_core/subworkflow-template/tests/main.nf new file mode 100644 index 0000000000..2bf63da2f3 --- /dev/null +++ b/nf_core/subworkflow-template/tests/main.nf @@ -0,0 +1,18 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { {{ subworkflow_name|upper }} } from '../../../../subworkflows/{{ org }}/{{ subworkflow_dir }}/main.nf' + +workflow test_{{ subworkflow_name }} { + {% if has_meta %} + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + {%- else %} + input = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + {%- endif %} + + {{ subworkflow_name|upper }} ( input ) +} diff --git a/nf_core/subworkflow-template/tests/nextflow.config b/nf_core/subworkflow-template/tests/nextflow.config new file mode 100644 index 0000000000..8730f1c4b9 --- /dev/null +++ b/nf_core/subworkflow-template/tests/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/nf_core/subworkflow-template/tests/test.yml b/nf_core/subworkflow-template/tests/test.yml new file mode 100644 index 0000000000..23059412da --- /dev/null +++ b/nf_core/subworkflow-template/tests/test.yml @@ -0,0 +1,12 @@ +## TODO nf-core: Please run the following command to build this file: +# nf-core subworkflows create-test-yml {{ subworkflow_name_underscore }} +- name: "{{ subworkflow_name }}" + command: nextflow run ./tests/subworkflows/{{ org }}/{{ subworkflow_dir }} -entry test_{{ subworkflow_name }} -c ./tests/config/nextflow.config + tags: + - "subworkflows" + - "subworkflows/{{ subworkflow_name }}" + files: + - path: "output/{{ subworkflow_name }}/test.bam" + md5sum: e667c7caad0bc4b7ac383fd023c654fc + - path: output/{{ subworkflow_name }}/versions.yml + md5sum: a01fe51bc4c6a3a6226fbf77b2c7cf3b diff --git a/nf_core/subworkflows/__init__.py b/nf_core/subworkflows/__init__.py new file mode 100644 index 0000000000..3c93138852 --- /dev/null +++ b/nf_core/subworkflows/__init__.py @@ -0,0 +1,8 @@ +from .create import SubworkflowCreate +from .info import SubworkflowInfo +from .install import SubworkflowInstall +from .list import SubworkflowList +from .remove import SubworkflowRemove +from .subworkflows_test import SubworkflowsTest +from .test_yml_builder import SubworkflowTestYmlBuilder +from .update import SubworkflowUpdate diff --git a/nf_core/subworkflows/create.py b/nf_core/subworkflows/create.py new file mode 100644 index 0000000000..963076455e --- /dev/null +++ b/nf_core/subworkflows/create.py @@ -0,0 +1,22 @@ +import logging + +from nf_core.components.create import ComponentCreate + +log = logging.getLogger(__name__) + + +class SubworkflowCreate(ComponentCreate): + def __init__( + self, + pipeline_dir, + component="", + author=None, + force=False, + ): + super().__init__( + "subworkflows", + pipeline_dir, + component, + author, + force=force, + ) diff --git a/nf_core/subworkflows/info.py b/nf_core/subworkflows/info.py new file mode 100644 index 0000000000..48d78d41e8 --- /dev/null +++ b/nf_core/subworkflows/info.py @@ -0,0 +1,17 @@ +import logging + +from nf_core.components.info import ComponentInfo + +log = logging.getLogger(__name__) + + +class SubworkflowInfo(ComponentInfo): + def __init__( + self, + pipeline_dir, + component_name, + remote_url=None, + branch=None, + no_pull=False, + ): + super().__init__("subworkflows", pipeline_dir, component_name, remote_url, branch, no_pull) diff --git a/nf_core/subworkflows/install.py b/nf_core/subworkflows/install.py new file mode 100644 index 0000000000..6c5cfb12b2 --- /dev/null +++ b/nf_core/subworkflows/install.py @@ -0,0 +1,26 @@ +from nf_core.components.install import ComponentInstall + + +class SubworkflowInstall(ComponentInstall): + def __init__( + self, + pipeline_dir, + force=False, + prompt=False, + sha=None, + remote_url=None, + branch=None, + no_pull=False, + installed_by=False, + ): + super().__init__( + pipeline_dir, + "subworkflows", + force=force, + prompt=prompt, + sha=sha, + remote_url=remote_url, + branch=branch, + no_pull=no_pull, + installed_by=installed_by, + ) diff --git a/nf_core/subworkflows/list.py b/nf_core/subworkflows/list.py new file mode 100644 index 0000000000..ddf144ee00 --- /dev/null +++ b/nf_core/subworkflows/list.py @@ -0,0 +1,10 @@ +import logging + +from nf_core.components.list import ComponentList + +log = logging.getLogger(__name__) + + +class SubworkflowList(ComponentList): + def __init__(self, pipeline_dir, remote=True, remote_url=None, branch=None, no_pull=False): + super().__init__("subworkflows", pipeline_dir, remote, remote_url, branch, no_pull) diff --git a/nf_core/subworkflows/remove.py b/nf_core/subworkflows/remove.py new file mode 100644 index 0000000000..bfe61a87c1 --- /dev/null +++ b/nf_core/subworkflows/remove.py @@ -0,0 +1,10 @@ +import logging + +from nf_core.components.remove import ComponentRemove + +log = logging.getLogger(__name__) + + +class SubworkflowRemove(ComponentRemove): + def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False): + super().__init__("subworkflows", pipeline_dir, remote_url=remote_url, branch=branch, no_pull=no_pull) diff --git a/nf_core/subworkflows/subworkflows_test.py b/nf_core/subworkflows/subworkflows_test.py new file mode 100644 index 0000000000..d072ff678a --- /dev/null +++ b/nf_core/subworkflows/subworkflows_test.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +""" +The SubworkflowsTest class runs the tests locally +""" + +from nf_core.components.components_test import ComponentsTest + + +class SubworkflowsTest(ComponentsTest): + """ + Class to run module pytests. + """ + + def __init__( + self, + subworkflow_name=None, + no_prompts=False, + pytest_args="", + remote_url=None, + branch=None, + no_pull=False, + ): + super().__init__( + component_type="subworkflows", + component_name=subworkflow_name, + no_prompts=no_prompts, + pytest_args=pytest_args, + remote_url=remote_url, + branch=branch, + no_pull=no_pull, + ) diff --git a/nf_core/subworkflows/test_yml_builder.py b/nf_core/subworkflows/test_yml_builder.py new file mode 100644 index 0000000000..3090b22c05 --- /dev/null +++ b/nf_core/subworkflows/test_yml_builder.py @@ -0,0 +1,398 @@ +""" +The ModulesTestYmlBuilder class handles automatic generation of the modules test.yml file +along with running the tests and creating md5 sums +""" + +from __future__ import print_function + +import errno +import gzip +import hashlib +import io +import logging +import operator +import os +import re +import shlex +import subprocess +import tempfile +from pathlib import Path + +import questionary +import rich +import yaml +from rich.syntax import Syntax + +import nf_core.utils +from nf_core.components.components_command import ComponentCommand +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import ModulesRepo + +from ..lint_utils import run_prettier_on_file + +log = logging.getLogger(__name__) + + +class SubworkflowTestYmlBuilder(ComponentCommand): + def __init__( + self, + subworkflow=None, + directory=".", + run_tests=False, + test_yml_output_path=None, + force_overwrite=False, + no_prompts=False, + remote_url=None, + branch=None, + ): + super().__init__("subworkflows", directory) + self.dir = directory + self.subworkflow = subworkflow + self.remote_url = remote_url + self.branch = branch + self.run_tests = run_tests + self.test_yml_output_path = test_yml_output_path + self.force_overwrite = force_overwrite + self.no_prompts = no_prompts + self.subworkflow_dir = None + self.subworkflow_test_main = None + self.entry_points = [] + self.tests = [] + self.errors = [] + self.modules_repo = ModulesRepo(remote_url=self.remote_url, branch=self.branch) + self.modules_json = ModulesJson(self.dir) + + def run(self): + """Run build steps""" + if not self.no_prompts: + log.info( + "[yellow]Press enter to use default values [cyan bold](shown in brackets) [yellow]or type your own responses" + ) + self.check_inputs() + self.scrape_workflow_entry_points() + self.build_all_tests() + self.print_test_yml() + if len(self.errors) > 0: + errors = "\n - ".join(self.errors) + raise UserWarning(f"Ran, but found errors:\n - {errors}") + + def check_inputs(self): + """Do more complex checks about supplied flags.""" + # Get the tool name if not specified + if self.subworkflow is None: + self.subworkflow = questionary.autocomplete( + "Subworkflow name:", + choices=self.components_from_repo(self.org), + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + self.subworkflow_dir = os.path.join("subworkflows", self.modules_repo.repo_path, self.subworkflow) + self.subworkflow_test_main = os.path.join( + "tests", "subworkflows", self.modules_repo.repo_path, self.subworkflow, "main.nf" + ) + + # First, sanity check that the module directory exists + if not os.path.isdir(self.subworkflow_dir): + raise UserWarning(f"Cannot find directory '{self.subworkflow_dir}'.") + if not os.path.exists(self.subworkflow_test_main): + raise UserWarning(f"Cannot find module test workflow '{self.subworkflow_test_main}'") + + # Check that we're running tests if no prompts + if not self.run_tests and self.no_prompts: + log.debug("Setting run_tests to True as running without prompts") + self.run_tests = True + + # Get the output YAML file / check it does not already exist + while self.test_yml_output_path is None: + default_val = f"tests/subworkflows/{self.modules_repo.repo_path}/{self.subworkflow}/test.yml" + if self.no_prompts: + self.test_yml_output_path = default_val + else: + self.test_yml_output_path = rich.prompt.Prompt.ask( + "[violet]Test YAML output path[/] (- for stdout)", default=default_val + ).strip() + if self.test_yml_output_path == "": + self.test_yml_output_path = None + # Check that the output YAML file does not already exist + if ( + self.test_yml_output_path is not None + and self.test_yml_output_path != "-" + and os.path.exists(self.test_yml_output_path) + and not self.force_overwrite + ): + if rich.prompt.Confirm.ask( + f"[red]File exists! [green]'{self.test_yml_output_path}' [violet]Overwrite?" + ): + self.force_overwrite = True + else: + self.test_yml_output_path = None + if os.path.exists(self.test_yml_output_path) and not self.force_overwrite: + raise UserWarning( + f"Test YAML file already exists! '{self.test_yml_output_path}'. Use '--force' to overwrite." + ) + + def scrape_workflow_entry_points(self): + """Find the test workflow entry points from main.nf""" + log.info(f"Looking for test workflow entry points: '{self.subworkflow_test_main}'") + with open(self.subworkflow_test_main, "r") as fh: + for line in fh: + match = re.match(r"workflow\s+(\S+)\s+{", line) + if match: + self.entry_points.append(match.group(1)) + if len(self.entry_points) == 0: + raise UserWarning("No workflow entry points found in 'self.module_test_main'") + + def build_all_tests(self): + """ + Go over each entry point and build structure + """ + for entry_point in self.entry_points: + ep_test = self.build_single_test(entry_point) + if ep_test: + self.tests.append(ep_test) + + def build_single_test(self, entry_point): + """Given the supplied cli flags, prompt for any that are missing. + + Returns: Test command + """ + ep_test = { + "name": "", + "command": "", + "tags": [], + "files": [], + } + + # Print nice divider line + console = rich.console.Console() + console.print("[black]" + "─" * console.width) + + log.info(f"Building test meta for entry point '{entry_point}'") + + while ep_test["name"] == "": + default_val = f"{self.subworkflow} {entry_point}" + if self.no_prompts: + ep_test["name"] = default_val + else: + ep_test["name"] = rich.prompt.Prompt.ask("[violet]Test name", default=default_val).strip() + + while ep_test["command"] == "": + default_val = f"nextflow run ./tests/subworkflows/{self.modules_repo.repo_path}/{self.subworkflow} -entry {entry_point} -c ./tests/config/nextflow.config" + if self.no_prompts: + ep_test["command"] = default_val + else: + ep_test["command"] = rich.prompt.Prompt.ask("[violet]Test command", default=default_val).strip() + + while len(ep_test["tags"]) == 0: + tag_defaults = ["subworkflows"] + tag_defaults.append("subworkflows/" + self.subworkflow) + tag_defaults += self.parse_module_tags() + if self.no_prompts: + ep_test["tags"] = sorted(tag_defaults) + else: + while len(ep_test["tags"]) == 0: + prompt_tags = rich.prompt.Prompt.ask( + "[violet]Test tags[/] (comma separated)", default=",".join(sorted(tag_defaults)) + ).strip() + ep_test["tags"] = [t.strip() for t in prompt_tags.split(",")] + + ep_test["files"] = self.get_md5_sums(entry_point, ep_test["command"]) + + return ep_test + + def parse_module_tags(self): + """ + Parse the subworkflow test main.nf file to retrieve all imported modules for adding tags. + """ + tags = [] + with open(Path(self.subworkflow_dir, "main.nf"), "r") as fh: + for line in fh: + regex = re.compile( + r"include(?: *{ *)([a-zA-Z\_0-9]*)(?: *as *)?(?:[a-zA-Z\_0-9]*)?(?: *})(?: *from *)(?:'|\")(.*)(?:'|\")" + ) + match = regex.match(line) + if match and len(match.groups()) == 2: + name, link = match.groups() + if link.startswith("../../../"): + name_split = name.lower().split("_") + tags.append("/".join(name_split)) + if len(name_split) > 1: + tags.append(name_split[0]) + elif link.startswith("../"): + tags.append("subworkflows/" + name.lower()) + return list(set(tags)) + + def check_if_empty_file(self, fname): + """Check if the file is empty, or compressed empty""" + if os.path.getsize(fname) == 0: + return True + try: + with open(fname, "rb") as fh: + g_f = gzip.GzipFile(fileobj=fh, mode="rb") + if g_f.read() == b"": + return True + except Exception as e: + # Python 3.8+ + if hasattr(gzip, "BadGzipFile"): + if isinstance(e, gzip.BadGzipFile): + pass + # Python 3.7 + elif isinstance(e, OSError): + pass + else: + raise e + return False + + def _md5(self, fname): + """Generate md5 sum for file""" + hash_md5 = hashlib.md5() + with open(fname, "rb") as f: + for chunk in iter(lambda: f.read(io.DEFAULT_BUFFER_SIZE), b""): + hash_md5.update(chunk) + md5sum = hash_md5.hexdigest() + return md5sum + + def create_test_file_dict(self, results_dir, is_repeat=False): + """Walk through directory and collect md5 sums""" + test_files = [] + for root, _, files in os.walk(results_dir, followlinks=True): + for filename in files: + # Check that the file is not versions.yml + if filename == "versions.yml": + continue + file_path = os.path.join(root, filename) + # add the key here so that it comes first in the dict + test_file = {"path": file_path} + # Check that this isn't an empty file + if self.check_if_empty_file(file_path): + if not is_repeat: + self.errors.append(f"Empty file found! '{os.path.basename(file_path)}'") + # Add the md5 anyway, linting should fail later and can be manually removed if needed. + # Originally we skipped this if empty, but then it's too easy to miss the warning. + # Equally, if a file is legitimately empty we don't want to prevent this from working. + file_md5 = self._md5(file_path) + test_file["md5sum"] = file_md5 + # Switch out the results directory path with the expected 'output' directory + test_file["path"] = file_path.replace(results_dir, "output") + test_files.append(test_file) + + test_files = sorted(test_files, key=operator.itemgetter("path")) + + return test_files + + def get_md5_sums(self, entry_point, command, results_dir=None, results_dir_repeat=None): + """ + Recursively go through directories and subdirectories + and generate tuples of (, ) + returns: list of tuples + """ + + run_this_test = False + while results_dir is None: + if self.run_tests or run_this_test: + results_dir, results_dir_repeat = self.run_tests_workflow(command) + else: + results_dir = rich.prompt.Prompt.ask( + "[violet]Test output folder with results[/] (leave blank to run test)" + ) + if results_dir == "": + results_dir = None + run_this_test = True + elif not os.path.isdir(results_dir): + log.error(f"Directory '{results_dir}' does not exist") + results_dir = None + + test_files = self.create_test_file_dict(results_dir=results_dir) + + # If test was repeated, compare the md5 sums + if results_dir_repeat: + test_files_repeat = self.create_test_file_dict(results_dir=results_dir_repeat, is_repeat=True) + + # Compare both test.yml files + for i in range(len(test_files)): + if test_files[i].get("md5sum") and not test_files[i].get("md5sum") == test_files_repeat[i]["md5sum"]: + test_files[i].pop("md5sum") + test_files[i]["contains"] = [ + " # TODO nf-core: file md5sum was variable, please replace this text with a string found in the file instead " + ] + + if len(test_files) == 0: + raise UserWarning(f"Could not find any test result files in '{results_dir}'") + + return test_files + + def run_tests_workflow(self, command): + """Given a test workflow and an entry point, run the test workflow""" + + # The config expects $PROFILE and Nextflow fails if it's not set + if os.environ.get("PROFILE") is None: + os.environ["PROFILE"] = "" + if self.no_prompts: + log.info( + "Setting env var '$PROFILE' to an empty string as not set.\n" + "Tests will run with Docker by default. " + "To use Singularity set 'export PROFILE=singularity' in your shell before running this command." + ) + else: + question = { + "type": "list", + "name": "profile", + "message": "Choose software profile", + "choices": ["Docker", "Singularity", "Conda"], + } + answer = questionary.unsafe_prompt([question], style=nf_core.utils.nfcore_question_style) + profile = answer["profile"].lower() + if profile in ["singularity", "conda"]: + os.environ["PROFILE"] = profile + log.info(f"Setting env var '$PROFILE' to '{profile}'") + + tmp_dir = tempfile.mkdtemp() + tmp_dir_repeat = tempfile.mkdtemp() + work_dir = tempfile.mkdtemp() + command_repeat = command + f" --outdir {tmp_dir_repeat} -work-dir {work_dir}" + command += f" --outdir {tmp_dir} -work-dir {work_dir}" + + log.info(f"Running '{self.subworkflow}' test with command:\n[violet]{command}") + try: + nfconfig_raw = subprocess.check_output(shlex.split(command)) + log.info("Repeating test ...") + nfconfig_raw = subprocess.check_output(shlex.split(command_repeat)) + + except OSError as e: + if e.errno == errno.ENOENT and command.strip().startswith("nextflow "): + raise AssertionError( + "It looks like Nextflow is not installed. It is required for most nf-core functions." + ) + except subprocess.CalledProcessError as e: + output = rich.markup.escape(e.output.decode()) + raise UserWarning(f"Error running test workflow (exit code {e.returncode})\n[red]{output}") + except Exception as e: + raise UserWarning(f"Error running test workflow: {e}") + else: + log.info("Test workflow finished!") + try: + log.debug(rich.markup.escape(nfconfig_raw)) + except TypeError: + log.debug(rich.markup.escape(nfconfig_raw.decode("utf-8"))) + + return tmp_dir, tmp_dir_repeat + + def print_test_yml(self): + """ + Generate the test yml file. + """ + with tempfile.NamedTemporaryFile(mode="w+") as fh: + yaml.dump(self.tests, fh, Dumper=nf_core.utils.custom_yaml_dumper(), width=10000000) + run_prettier_on_file(fh.name) + fh.seek(0) + prettified_yml = fh.read() + + if self.test_yml_output_path == "-": + console = rich.console.Console() + console.print("\n", Syntax(prettified_yml, "yaml"), "\n") + else: + try: + log.info(f"Writing to '{self.test_yml_output_path}'") + with open(self.test_yml_output_path, "w") as fh: + fh.write(prettified_yml) + except FileNotFoundError as e: + raise UserWarning(f"Could not create test.yml file: '{e}'") diff --git a/nf_core/subworkflows/update.py b/nf_core/subworkflows/update.py new file mode 100644 index 0000000000..3cd4ad59fd --- /dev/null +++ b/nf_core/subworkflows/update.py @@ -0,0 +1,32 @@ +from nf_core.components.update import ComponentUpdate + + +class SubworkflowUpdate(ComponentUpdate): + def __init__( + self, + pipeline_dir, + force=False, + prompt=False, + sha=None, + update_all=False, + show_diff=None, + save_diff_fn=None, + update_deps=False, + remote_url=None, + branch=None, + no_pull=False, + ): + super().__init__( + pipeline_dir, + "subworkflows", + force, + prompt, + sha, + update_all, + show_diff, + save_diff_fn, + update_deps, + remote_url, + branch, + no_pull, + ) diff --git a/nf_core/sync.py b/nf_core/sync.py index a663f1c7a7..332b524cbb 100644 --- a/nf_core/sync.py +++ b/nf_core/sync.py @@ -1,10 +1,10 @@ -#!/usr/bin/env python """Synchronise a pipeline TEMPLATE branch with the template. """ import json import logging import os +import re import shutil import git @@ -33,7 +33,7 @@ class PullRequestException(Exception): pass -class PipelineSync(object): +class PipelineSync: """Object to hold syncing information and results. Args: @@ -52,6 +52,7 @@ class PipelineSync(object): required_config_vars (list): List of nextflow variables required to make template pipeline gh_username (str): GitHub username gh_repo (str): GitHub repository name + template_yaml (str): Path to template.yml file for pipeline creation settings. """ def __init__( @@ -61,13 +62,15 @@ def __init__( make_pr=False, gh_repo=None, gh_username=None, + template_yaml_path=None, ): """Initialise syncing object""" self.pipeline_dir = os.path.abspath(pipeline_dir) self.from_branch = from_branch self.original_branch = None - self.merge_branch = f"nf-core-template-merge-{nf_core.__version__}" + self.original_merge_branch = f"nf-core-template-merge-{nf_core.__version__}" + self.merge_branch = self.original_merge_branch self.made_changes = False self.make_pr = make_pr self.gh_pr_returned_data = {} @@ -77,6 +80,12 @@ def __init__( self.gh_repo = gh_repo self.pr_url = "" + self.template_yaml_path = template_yaml_path + # Save contents of template.yml for using outside of git. + if self.template_yaml_path is not None: + with open(self.template_yaml_path, "r") as template_yaml: + self.template_yaml_cache = template_yaml.read() + # Set up the API auth if supplied on the command line self.gh_api = nf_core.utils.gh_api self.gh_api.lazy_init() @@ -204,7 +213,7 @@ def delete_template_branch_files(self): # Delete everything log.info("Deleting all files in 'TEMPLATE' branch") for the_file in os.listdir(self.pipeline_dir): - if the_file == ".git": + if the_file == ".git" or the_file == self.template_yaml_path: continue file_path = os.path.join(self.pipeline_dir, the_file) log.debug(f"Deleting {file_path}") @@ -225,16 +234,30 @@ def make_template_pipeline(self): # Only show error messages from pipeline creation logging.getLogger("nf_core.create").setLevel(logging.ERROR) - nf_core.create.PipelineCreate( - name=self.wf_config["manifest.name"].strip('"').strip("'"), - description=self.wf_config["manifest.description"].strip('"').strip("'"), - version=self.wf_config["manifest.version"].strip('"').strip("'"), - no_git=True, - force=True, - outdir=self.pipeline_dir, - author=self.wf_config["manifest.author"].strip('"').strip("'"), - plain=True, - ).init_pipeline() + # Re-write the template yaml from cache which may have been updated + if self.template_yaml_path and self.template_yaml_cache: + with open(self.template_yaml_path, "w") as template_path: + template_path.write(self.template_yaml_cache) + + try: + nf_core.create.PipelineCreate( + name=self.wf_config["manifest.name"].strip('"').strip("'"), + description=self.wf_config["manifest.description"].strip('"').strip("'"), + version=self.wf_config["manifest.version"].strip('"').strip("'"), + no_git=True, + force=True, + outdir=self.pipeline_dir, + author=self.wf_config["manifest.author"].strip('"').strip("'"), + template_yaml_path=self.template_yaml_path, + plain=True, + ).init_pipeline() + except Exception as err: + if self.template_yaml_path: + # If sync fails, remove template_yaml_path before raising error. + os.remove(self.template_yaml_path) + # Reset to where you were to prevent git getting messed up. + self.repo.git.reset("--hard") + raise SyncException(f"Failed to rebuild pipeline from template with error:\n{err}") def commit_template_changes(self): """If we have any changes with the new template files, make a git commit""" @@ -270,17 +293,18 @@ def create_merge_base_branch(self): # Check if branch exists already branch_list = [b.name for b in self.repo.branches] if self.merge_branch in branch_list: - original_merge_branch = self.merge_branch - # Try to create new branch with number at the end - # If -2 already exists, increase the number until branch is new - branch_no = 2 - self.merge_branch = f"{original_merge_branch}-{branch_no}" - while self.merge_branch in branch_list: - branch_no += 1 - self.merge_branch = f"{original_merge_branch}-{branch_no}" - log.info( - f"Branch already existed: '{original_merge_branch}', creating branch '{self.merge_branch}' instead." + merge_branch_format = re.compile(rf"{self.original_merge_branch}-(\d+)") + max_branch = max( + [1] + + [ + int(merge_branch_format.match(branch).groups()[0]) + for branch in branch_list + if merge_branch_format.match(branch) + ] ) + new_branch = f"{self.original_merge_branch}-{max_branch+1}" + log.info(f"Branch already existed: '{self.merge_branch}', creating branch '{new_branch}' instead.") + self.merge_branch = new_branch # Create new branch and checkout log.info(f"Checking out merge base branch '{self.merge_branch}'") @@ -423,4 +447,4 @@ def reset_target_dir(self): try: self.repo.git.checkout(self.original_branch) except GitCommandError as e: - raise SyncException(f"Could not reset to original branch `{self.from_branch}`:\n{e}") + raise SyncException(f"Could not reset to original branch `{self.original_branch}`:\n{e}") diff --git a/nf_core/utils.py b/nf_core/utils.py index 1c65e45bb8..55703ab450 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Common utility functions for the nf-core python package. """ @@ -16,6 +15,7 @@ import subprocess import sys import time +from pathlib import Path import git import prompt_toolkit @@ -90,7 +90,7 @@ def rich_force_colors(): return None -class Pipeline(object): +class Pipeline: """Object to hold information about a local pipeline. Args: @@ -248,7 +248,10 @@ def fetch_wf_config(wf_path, cache_config=True): if os.path.isfile(cache_path): log.debug(f"Found a config cache, loading: {cache_path}") with open(cache_path, "r") as fh: - config = json.load(fh) + try: + config = json.load(fh) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{cache_path}' due to error {e}") return config log.debug("No config cache found") @@ -339,7 +342,7 @@ def setup_requests_cachedir(): return config -def wait_cli_function(poll_func, poll_every=20): +def wait_cli_function(poll_func, refresh_per_second=20): """ Display a command-line spinner while calling a function repeatedly. @@ -347,14 +350,14 @@ def wait_cli_function(poll_func, poll_every=20): Arguments: poll_func (function): Function to call - poll_every (int): How many tenths of a second to wait between function calls. Default: 20. + refresh_per_second (int): Refresh this many times per second. Default: 20. Returns: None. Just sits in an infite loop until the function returns True. """ try: spinner = Spinner("dots2", "Use ctrl+c to stop waiting and force exit.") - with Live(spinner, refresh_per_second=20): + with Live(spinner, refresh_per_second=refresh_per_second): while True: if poll_func(): break @@ -409,7 +412,7 @@ class GitHub_API_Session(requests_cache.CachedSession): such as automatically setting up GitHub authentication if we can. """ - def __init__(self): + def __init__(self): # pylint: disable=super-init-not-called self.auth_mode = None self.return_ok = [200, 201] self.return_retry = [403] @@ -750,7 +753,7 @@ def increase_indent(self, flow=False, indentless=False): See https://github.com/yaml/pyyaml/issues/234#issuecomment-765894586 """ - return super().increase_indent(flow=flow, indentless=indentless) + return super(CustomDumper, self).increase_indent(flow=flow, indentless=False) # HACK: insert blank lines between top-level objects # inspired by https://stackoverflow.com/a/44284819/3786245 @@ -869,7 +872,6 @@ def get_repo_releases_branches(pipeline, wfs): # Repo is a nf-core pipeline for wf in wfs.remote_workflows: if wf.full_name == pipeline or wf.name == pipeline: - # Set to full name just in case it didn't have the nf-core/ prefix pipeline = wf.full_name @@ -880,7 +882,6 @@ def get_repo_releases_branches(pipeline, wfs): # Arbitrary GitHub repo else: if pipeline.count("/") == 1: - # Looks like a GitHub address - try working with this repo log.debug( f"Pipeline '{pipeline}' not in nf-core, but looks like a GitHub address - fetching releases from API" @@ -924,7 +925,11 @@ def get_repo_releases_branches(pipeline, wfs): return pipeline, wf_releases, wf_branches -def load_tools_config(dir="."): +CONFIG_PATHS = [".nf-core.yml", ".nf-core.yaml"] +DEPRECATED_CONFIG_PATHS = [".nf-core-lint.yml", ".nf-core-lint.yaml"] + + +def load_tools_config(directory="."): """ Parse the nf-core.yml configuration file @@ -936,32 +941,45 @@ def load_tools_config(dir="."): Returns the loaded config dict or False, if the file couldn't be loaded """ tools_config = {} - config_fn = os.path.join(dir, ".nf-core.yml") - # Check if old config file is used - old_config_fn_yml = os.path.join(dir, ".nf-core-lint.yml") - old_config_fn_yaml = os.path.join(dir, ".nf-core-lint.yaml") + config_fn = get_first_available_path(directory, CONFIG_PATHS) - if os.path.isfile(old_config_fn_yml) or os.path.isfile(old_config_fn_yaml): - log.error( - "Deprecated `nf-core-lint.yml` file found! The file will not be loaded. Please rename the file to `.nf-core.yml`." - ) - return {} + if config_fn is None: + depr_path = get_first_available_path(directory, DEPRECATED_CONFIG_PATHS) + if depr_path: + log.error( + f"Deprecated `{depr_path.name}` file found! The file will not be loaded. " + f"Please rename the file to `{CONFIG_PATHS[0]}`." + ) + else: + log.debug(f"No tools config file found: {CONFIG_PATHS[0]}") + return Path(directory, CONFIG_PATHS[0]), {} - if not os.path.isfile(config_fn): - config_fn = os.path.join(dir, ".nf-core.yaml") + with open(config_fn, "r") as fh: + tools_config = yaml.safe_load(fh) - # Load the YAML - try: - with open(config_fn, "r") as fh: - tools_config = yaml.safe_load(fh) - except FileNotFoundError: - log.debug(f"No tools config file found: {config_fn}") - return {} - if tools_config is None: - # If the file is empty - return {} - return tools_config + # If the file is empty + tools_config = tools_config or {} + + log.debug("Using config file: %s", config_fn) + return config_fn, tools_config + + +def determine_base_dir(directory="."): + base_dir = start_dir = Path(directory).absolute() + while not get_first_available_path(base_dir, CONFIG_PATHS) and base_dir != base_dir.parent: + base_dir = base_dir.parent + config_fn = get_first_available_path(base_dir, CONFIG_PATHS) + if config_fn: + break + return directory if base_dir == start_dir else base_dir + + +def get_first_available_path(directory, paths): + for p in paths: + if Path(directory, p).is_file(): + return Path(directory, p) + return None def sort_dictionary(d): diff --git a/requirements-dev.txt b/requirements-dev.txt index 011cbcc3c4..42ce780ce4 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,8 +1,9 @@ black isort myst_parser -pre-commit pytest-cov pytest-datafiles +requests-mock Sphinx -sphinx_rtd_theme +sphinx-rtd-theme +requests_mock diff --git a/requirements.txt b/requirements.txt index 0a4a5fb7e7..b3d1f251bf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,12 @@ click +filetype galaxy-tool-util GitPython jinja2 jsonschema>=3.0 markdown>=3.3 packaging +pre-commit prompt_toolkit>=3.0.3 pytest>=7.0.0 pytest-workflow>=1.6.0 diff --git a/setup.py b/setup.py index f24b87001e..38f36810b7 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "2.5dev" +version = "2.8dev" with open("README.md") as f: readme = f.read() @@ -35,6 +35,7 @@ "console_scripts": ["nf-core=nf_core.__main__:run_nf_core"], "refgenie.hooks.post_update": ["nf-core-refgenie=nf_core.refgenie:update_config"], }, + python_requires=">=3.7, <4", install_requires=required, packages=find_packages(exclude=("docs")), include_package_data=True, diff --git a/tests/lint/actions_awsfulltest.py b/tests/lint/actions_awsfulltest.py index 234628227d..30293e31a4 100644 --- a/tests/lint/actions_awsfulltest.py +++ b/tests/lint/actions_awsfulltest.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import os import yaml diff --git a/tests/lint/actions_awstest.py b/tests/lint/actions_awstest.py index 45d7e66c3d..0e19f781aa 100644 --- a/tests/lint/actions_awstest.py +++ b/tests/lint/actions_awstest.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import os import yaml diff --git a/tests/lint/actions_ci.py b/tests/lint/actions_ci.py index 81eb26f22c..d44dbb73b5 100644 --- a/tests/lint/actions_ci.py +++ b/tests/lint/actions_ci.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import os import yaml @@ -28,19 +26,6 @@ def test_actions_ci_fail_wrong_nf(self): assert results["failed"] == ["Minimum pipeline NF version '1.2.3' is not tested in '.github/workflows/ci.yml'"] -def test_actions_ci_fail_wrong_docker_ver(self): - """Lint test: actions_actions_ci - FAIL - wrong pipeline version used for docker commands""" - - self.lint_obj._load() - self.lint_obj.nf_config["process.container"] = "'nfcore/tools:0.4'" - results = self.lint_obj.actions_ci() - assert results["failed"] == [ - "CI is not building the correct docker image. Should be: `docker build --no-cache . -t nfcore/tools:0.4`", - "CI is not pulling the correct docker image. Should be: `docker pull nfcore/tools:dev`", - "CI is not tagging docker image correctly. Should be: `docker tag nfcore/tools:dev nfcore/tools:0.4`", - ] - - def test_actions_ci_fail_wrong_trigger(self): """Lint test: actions_actions_ci - FAIL - workflow triggered incorrectly, NF ver not checked at all""" diff --git a/tests/lint/actions_schema_validation.py b/tests/lint/actions_schema_validation.py index d71603a56e..48bb07e4dd 100644 --- a/tests/lint/actions_schema_validation.py +++ b/tests/lint/actions_schema_validation.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import os import yaml diff --git a/tests/lint/files_exist.py b/tests/lint/files_exist.py index 02686e9add..4e5e4d3c2b 100644 --- a/tests/lint/files_exist.py +++ b/tests/lint/files_exist.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import os import nf_core.lint diff --git a/tests/lint/merge_markers.py b/tests/lint/merge_markers.py index 6ea2882417..be0d076757 100644 --- a/tests/lint/merge_markers.py +++ b/tests/lint/merge_markers.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import os import nf_core.lint diff --git a/tests/modules/bump_versions.py b/tests/modules/bump_versions.py index 388b6be424..65569efd51 100644 --- a/tests/modules/bump_versions.py +++ b/tests/modules/bump_versions.py @@ -4,13 +4,13 @@ import pytest import nf_core.modules -from nf_core.modules.module_utils import ModuleException +from nf_core.modules.modules_utils import ModuleException def test_modules_bump_versions_single_module(self): """Test updating a single module""" # Change the bpipe/test version to an older version - main_nf_path = os.path.join(self.nfcore_modules, "modules", "bpipe", "test", "main.nf") + main_nf_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf") with open(main_nf_path, "r") as fh: content = fh.read() new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content) @@ -39,7 +39,7 @@ def test_modules_bump_versions_fail(self): def test_modules_bump_versions_fail_unknown_version(self): """Fail because of an unknown version""" # Change the bpipe/test version to an older version - main_nf_path = os.path.join(self.nfcore_modules, "modules", "bpipe", "test", "main.nf") + main_nf_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf") with open(main_nf_path, "r") as fh: content = fh.read() new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content) diff --git a/tests/modules/create.py b/tests/modules/create.py index 6c1767b138..61a8777b14 100644 --- a/tests/modules/create.py +++ b/tests/modules/create.py @@ -1,43 +1,55 @@ import os import pytest +import requests_mock import nf_core.modules +from tests.utils import mock_api_calls def test_modules_create_succeed(self): """Succeed at creating the TrimGalore! module""" - module_create = nf_core.modules.ModuleCreate( - self.pipeline_dir, "trimgalore", "@author", "process_low", True, True, conda_name="trim-galore" - ) - module_create.create() + with requests_mock.Mocker() as mock: + mock_api_calls(mock, "trim-galore", "0.6.7") + module_create = nf_core.modules.ModuleCreate( + self.pipeline_dir, "trimgalore", "@author", "process_single", True, True, conda_name="trim-galore" + ) + module_create.create() assert os.path.exists(os.path.join(self.pipeline_dir, "modules", "local", "trimgalore.nf")) def test_modules_create_fail_exists(self): """Fail at creating the same module twice""" - module_create = nf_core.modules.ModuleCreate( - self.pipeline_dir, "trimgalore", "@author", "process_low", False, False, conda_name="trim-galore" - ) - module_create.create() - with pytest.raises(UserWarning) as excinfo: + with requests_mock.Mocker() as mock: + mock_api_calls(mock, "trim-galore", "0.6.7") + module_create = nf_core.modules.ModuleCreate( + self.pipeline_dir, "trimgalore", "@author", "process_single", False, False, conda_name="trim-galore" + ) module_create.create() + with pytest.raises(UserWarning) as excinfo: + module_create.create() assert "Module file exists already" in str(excinfo.value) def test_modules_create_nfcore_modules(self): """Create a module in nf-core/modules clone""" - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "fastqc", "@author", "process_low", False, False) - module_create.create() - assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "fastqc", "main.nf")) - assert os.path.exists(os.path.join(self.nfcore_modules, "tests", "modules", "fastqc", "main.nf")) + with requests_mock.Mocker() as mock: + mock_api_calls(mock, "fastqc", "0.11.9") + module_create = nf_core.modules.ModuleCreate( + self.nfcore_modules, "fastqc", "@author", "process_low", False, False + ) + module_create.create() + assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "main.nf")) + assert os.path.exists(os.path.join(self.nfcore_modules, "tests", "modules", "nf-core", "fastqc", "main.nf")) def test_modules_create_nfcore_modules_subtool(self): """Create a tool/subtool module in a nf-core/modules clone""" - module_create = nf_core.modules.ModuleCreate( - self.nfcore_modules, "star/index", "@author", "process_medium", False, False - ) - module_create.create() - assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "star", "index", "main.nf")) - assert os.path.exists(os.path.join(self.nfcore_modules, "tests", "modules", "star", "index", "main.nf")) + with requests_mock.Mocker() as mock: + mock_api_calls(mock, "star", "2.8.10a") + module_create = nf_core.modules.ModuleCreate( + self.nfcore_modules, "star/index", "@author", "process_medium", False, False + ) + module_create.create() + assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "main.nf")) + assert os.path.exists(os.path.join(self.nfcore_modules, "tests", "modules", "nf-core", "star", "index", "main.nf")) diff --git a/tests/modules/create_test_yml.py b/tests/modules/create_test_yml.py index dfb5fb5c6c..d444ff841a 100644 --- a/tests/modules/create_test_yml.py +++ b/tests/modules/create_test_yml.py @@ -1,17 +1,18 @@ import os +from pathlib import Path import pytest import nf_core.modules -from ..utils import with_temporary_folder +from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL, with_temporary_folder @with_temporary_folder def test_modules_custom_yml_dumper(self, out_dir): """Try to create a yml file with the custom yml dumper""" yml_output_path = os.path.join(out_dir, "test.yml") - meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", False, "./", False, True) + meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", self.pipeline_dir, False, "./", False, True) meta_builder.test_yml_output_path = yml_output_path meta_builder.tests = [{"testname": "myname"}] meta_builder.print_test_yml() @@ -20,8 +21,8 @@ def test_modules_custom_yml_dumper(self, out_dir): @with_temporary_folder def test_modules_test_file_dict(self, test_file_dir): - """Creat dict of test files and create md5 sums""" - meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", False, "./", False, True) + """Create dict of test files and create md5 sums""" + meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", self.pipeline_dir, False, "./", False, True) with open(os.path.join(test_file_dir, "test_file.txt"), "w") as fh: fh.write("this line is just for testing") test_files = meta_builder.create_test_file_dict(test_file_dir) @@ -32,19 +33,19 @@ def test_modules_test_file_dict(self, test_file_dir): @with_temporary_folder def test_modules_create_test_yml_get_md5(self, test_file_dir): """Get md5 sums from a dummy output""" - meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", False, "./", False, True) + meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", self.pipeline_dir, False, "./", False, True) with open(os.path.join(test_file_dir, "test_file.txt"), "w") as fh: fh.write("this line is just for testing") - test_files = meta_builder.get_md5_sums( - entry_point="dummy", command="dummy", results_dir=test_file_dir, results_dir_repeat=test_file_dir - ) + test_files = meta_builder.get_md5_sums(command="dummy", results_dir=test_file_dir, results_dir_repeat=test_file_dir) assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" def test_modules_create_test_yml_entry_points(self): """Test extracting test entry points from a main.nf file""" - meta_builder = nf_core.modules.ModulesTestYmlBuilder("bpipe/test", False, "./", False, True) - meta_builder.module_test_main = os.path.join(self.nfcore_modules, "tests", "modules", "bpipe", "test", "main.nf") + meta_builder = nf_core.modules.ModulesTestYmlBuilder("bpipe/test", self.pipeline_dir, False, "./", False, True) + meta_builder.module_test_main = os.path.join( + self.nfcore_modules, "tests", "modules", "nf-core", "bpipe", "test", "main.nf" + ) meta_builder.scrape_workflow_entry_points() assert meta_builder.entry_points[0] == "test_bpipe_test" @@ -53,7 +54,7 @@ def test_modules_create_test_yml_check_inputs(self): """Test the check_inputs() function - raise UserWarning because test.yml exists""" cwd = os.getcwd() os.chdir(self.nfcore_modules) - meta_builder = nf_core.modules.ModulesTestYmlBuilder("bpipe/test", False, "./", False, True) + meta_builder = nf_core.modules.ModulesTestYmlBuilder("bpipe/test", ".", False, "./", False, True) meta_builder.module_test_main = os.path.join(self.nfcore_modules, "tests", "modules", "bpipe", "test", "main.nf") with pytest.raises(UserWarning) as excinfo: meta_builder.check_inputs() diff --git a/tests/modules/info.py b/tests/modules/info.py new file mode 100644 index 0000000000..2dbd48b240 --- /dev/null +++ b/tests/modules/info.py @@ -0,0 +1,63 @@ +from rich.console import Console + +import nf_core.modules + +from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL + + +def test_modules_info_remote(self): + """Test getting info about a remote module""" + mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "fastqc") + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: fastqc" in output + assert "Inputs" in output + assert "Outputs" in output + + +def test_modules_info_remote_gitlab(self): + """Test getting info about a module in the remote gitlab repo""" + mods_info = nf_core.modules.ModuleInfo( + self.pipeline_dir, "fastqc", remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH + ) + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: fastqc" in output + assert "Inputs" in output + assert "Outputs" in output + assert "--git-remote" in output + + +def test_modules_info_local(self): + """Test getting info about a locally installed module""" + self.mods_install.install("trimgalore") + mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "trimgalore") + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: trimgalore" in output + assert "Inputs" in output + assert "Outputs" in output + assert "Location" in output + + +def test_modules_info_in_modules_repo(self): + """Test getting info about a module in the modules repo""" + mods_info = nf_core.modules.ModuleInfo(self.nfcore_modules, "fastqc") + mods_info.local = True + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: fastqc" in output + assert "Inputs" in output + assert "Outputs" in output diff --git a/tests/modules/install.py b/tests/modules/install.py index d2b13c2aee..d01459f142 100644 --- a/tests/modules/install.py +++ b/tests/modules/install.py @@ -22,7 +22,8 @@ def test_modules_install_nopipeline(self): @with_temporary_folder def test_modules_install_emptypipeline(self, tmpdir): """Test installing a module - empty dir given""" - self.mods_install.dir = tmpdir + os.mkdir(os.path.join(tmpdir, "nf-core-pipe")) + self.mods_install.dir = os.path.join(tmpdir, "nf-core-pipe") with pytest.raises(UserWarning) as excinfo: self.mods_install.install("foo") assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) @@ -36,7 +37,7 @@ def test_modules_install_nomodule(self): def test_modules_install_trimgalore(self): """Test installing a module - TrimGalore!""" assert self.mods_install.install("trimgalore") is not False - module_path = os.path.join(self.mods_install.dir, "modules", "nf-core", "modules", "trimgalore") + module_path = os.path.join(self.mods_install.dir, "modules", "nf-core", "trimgalore") assert os.path.exists(module_path) @@ -66,4 +67,19 @@ def test_modules_install_different_branch_succeed(self): # Verify that the branch entry was added correctly modules_json = ModulesJson(self.pipeline_dir) - assert modules_json.get_module_branch("fastp", GITLAB_REPO) == GITLAB_BRANCH_TEST_BRANCH + assert ( + modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + + +def test_modules_install_tracking(self): + """Test installing a module and finding 'modules' in the installed_by section of modules.json""" + self.mods_install.install("trimgalore") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["trimgalore"][ + "installed_by" + ] == ["modules"] diff --git a/tests/modules/lint.py b/tests/modules/lint.py index d5793dfd05..476481a109 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -1,11 +1,26 @@ import os +from pathlib import Path import pytest import nf_core.modules -from ..utils import GITLAB_URL -from .patch import BISMARK_ALIGN, PATCH_BRANCH, setup_patch +from ..utils import GITLAB_URL, set_wd +from .patch import BISMARK_ALIGN, CORRECT_SHA, PATCH_BRANCH, REPO_NAME, modify_main_nf + + +def setup_patch(pipeline_dir, modify_module): + install_obj = nf_core.modules.ModuleInstall( + pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=PATCH_BRANCH, sha=CORRECT_SHA + ) + + # Install the module + install_obj.install(BISMARK_ALIGN) + + if modify_module: + # Modify the module + module_path = Path(pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + modify_main_nf(module_path / "main.nf") def test_modules_lint_trimgalore(self): @@ -20,9 +35,9 @@ def test_modules_lint_trimgalore(self): def test_modules_lint_empty(self): """Test linting a pipeline with no modules installed""" - self.mods_remove.remove("fastqc") - self.mods_remove.remove("multiqc") - self.mods_remove.remove("custom/dumpsoftwareversions") + self.mods_remove.remove("fastqc", force=True) + self.mods_remove.remove("multiqc", force=True) + self.mods_remove.remove("custom/dumpsoftwareversions", force=True) with pytest.raises(LookupError): nf_core.modules.ModuleLint(dir=self.pipeline_dir) @@ -38,6 +53,9 @@ def test_modules_lint_new_modules(self): def test_modules_lint_no_gitlab(self): """Test linting a pipeline with no modules installed""" + self.mods_remove.remove("fastqc", force=True) + self.mods_remove.remove("multiqc", force=True) + self.mods_remove.remove("custom/dumpsoftwareversions", force=True) with pytest.raises(LookupError): nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) @@ -53,6 +71,17 @@ def test_modules_lint_gitlab_modules(self): assert len(module_lint.warned) >= 0 +def test_modules_lint_multiple_remotes(self): + """Lint modules from a different remote""" + self.mods_install.install("fastqc") + self.mods_install_gitlab.install("multiqc") + module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint.lint(print_results=False, all_modules=True) + assert len(module_lint.failed) == 0 + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_patched_modules(self): """ Test creating a patch file and applying it to a new version of the the files @@ -65,11 +94,14 @@ def test_modules_lint_patched_modules(self): # change temporarily working directory to the pipeline directory # to avoid error from try_apply_patch() during linting - wd_old = os.getcwd() - os.chdir(self.pipeline_dir) - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) - module_lint.lint(print_results=False, all_modules=True) - os.chdir(wd_old) + with set_wd(self.pipeline_dir): + module_lint = nf_core.modules.ModuleLint( + dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=PATCH_BRANCH, hide_progress=True + ) + module_lint.lint( + print_results=False, + all_modules=True, + ) assert len(module_lint.failed) == 0 assert len(module_lint.passed) > 0 diff --git a/tests/modules/list.py b/tests/modules/list.py index 1d18b6da72..d92cd58dd5 100644 --- a/tests/modules/list.py +++ b/tests/modules/list.py @@ -2,13 +2,13 @@ import nf_core.modules -from ..utils import GITLAB_URL +from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL def test_modules_list_remote(self): """Test listing available modules""" mods_list = nf_core.modules.ModuleList(None, remote=True) - listed_mods = mods_list.list_modules() + listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) output = console.export_text() @@ -17,8 +17,8 @@ def test_modules_list_remote(self): def test_modules_list_remote_gitlab(self): """Test listing the modules in the remote gitlab repo""" - mods_list = nf_core.modules.ModuleList(None, remote=True, remote_url=GITLAB_URL) - listed_mods = mods_list.list_modules() + mods_list = nf_core.modules.ModuleList(None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) + listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) output = console.export_text() @@ -28,7 +28,7 @@ def test_modules_list_remote_gitlab(self): def test_modules_list_pipeline(self): """Test listing locally installed modules""" mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_modules() + listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) output = console.export_text() @@ -40,7 +40,7 @@ def test_modules_install_and_list_pipeline(self): """Test listing locally installed modules""" self.mods_install.install("trimgalore") mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_modules() + listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) output = console.export_text() @@ -51,7 +51,7 @@ def test_modules_install_gitlab_and_list_pipeline(self): """Test listing locally installed modules""" self.mods_install_gitlab.install("fastqc") mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_modules() + listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) output = console.export_text() diff --git a/tests/modules/module_test.py b/tests/modules/module_test.py deleted file mode 100644 index ef955d0061..0000000000 --- a/tests/modules/module_test.py +++ /dev/null @@ -1,45 +0,0 @@ -"""Test the 'modules test' command which runs module pytests.""" -import os -import shutil -from pathlib import Path - -import pytest - -import nf_core.modules - - -def test_modules_test_check_inputs(self): - """Test the check_inputs() function - raise UserWarning because module doesn't exist""" - cwd = os.getcwd() - os.chdir(self.nfcore_modules) - meta_builder = nf_core.modules.ModulesTest("none", True, "") - with pytest.raises(UserWarning) as excinfo: - meta_builder._check_inputs() - os.chdir(cwd) - assert "Cannot find directory" in str(excinfo.value) - - -def test_modules_test_no_name_no_prompts(self): - """Test the check_inputs() function - raise UserWarning prompts are deactivated and module name is not provided.""" - cwd = os.getcwd() - os.chdir(self.nfcore_modules) - meta_builder = nf_core.modules.ModulesTest(None, True, "") - with pytest.raises(UserWarning) as excinfo: - meta_builder._check_inputs() - os.chdir(cwd) - assert "Tool name not provided and prompts deactivated." in str(excinfo.value) - - -def test_modules_test_no_installed_modules(self): - """Test the check_inputs() function - raise UserWarning because installed modules were not found""" - cwd = os.getcwd() - os.chdir(self.nfcore_modules) - module_dir = Path(self.nfcore_modules, "modules") - shutil.rmtree(module_dir) - module_dir.mkdir() - meta_builder = nf_core.modules.ModulesTest(None, False, "") - meta_builder.repo_type = "modules" - with pytest.raises(UserWarning) as excinfo: - meta_builder._check_inputs() - os.chdir(cwd) - assert "No installed modules were found" in str(excinfo.value) diff --git a/tests/modules/modules_json.py b/tests/modules/modules_json.py index 2412f9bd2d..63ee4e743d 100644 --- a/tests/modules/modules_json.py +++ b/tests/modules/modules_json.py @@ -18,7 +18,10 @@ def test_get_modules_json(self): """Checks that the get_modules_json function returns the correct result""" mod_json_path = os.path.join(self.pipeline_dir, "modules.json") with open(mod_json_path, "r") as fh: - mod_json_sb = json.load(fh) + try: + mod_json_sb = json.load(fh) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") mod_json_obj = ModulesJson(self.pipeline_dir) mod_json = mod_json_obj.get_modules_json() @@ -32,12 +35,16 @@ def test_mod_json_update(self): mod_json_obj = ModulesJson(self.pipeline_dir) # Update the modules.json file mod_repo_obj = ModulesRepo() - mod_json_obj.update(mod_repo_obj, "MODULE_NAME", "GIT_SHA", False) + mod_json_obj.update("modules", mod_repo_obj, "MODULE_NAME", "GIT_SHA", "modules", write_file=False) mod_json = mod_json_obj.get_modules_json() - assert "MODULE_NAME" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"] - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]["MODULE_NAME"] - assert "GIT_SHA" == mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]["MODULE_NAME"]["git_sha"] - assert NF_CORE_MODULES_DEFAULT_BRANCH == mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]["MODULE_NAME"]["branch"] + assert "MODULE_NAME" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"] + assert "GIT_SHA" == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["git_sha"] + assert ( + NF_CORE_MODULES_DEFAULT_BRANCH + == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["branch"] + ) + assert "modules" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["installed_by"] def test_mod_json_create(self): @@ -59,9 +66,9 @@ def test_mod_json_create(self): mods = ["fastqc", "multiqc"] for mod in mods: - assert mod in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"] - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"][mod] - assert "branch" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"][mod] + assert mod in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"][mod] + assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"][mod] def modify_main_nf(path): @@ -79,11 +86,11 @@ def test_mod_json_create_with_patch(self): mod_json_path = Path(self.pipeline_dir, "modules.json") # Modify the module - module_path = Path(self.pipeline_dir, "modules", "nf-core", "modules", "fastqc") + module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") modify_main_nf(module_path / "main.nf") # Try creating a patch file - patch_obj = ModulePatch(self.pipeline_dir) + patch_obj = ModulePatch(self.pipeline_dir, NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_DEFAULT_BRANCH) patch_obj.patch("fastqc") # Remove the existing modules.json file @@ -100,9 +107,9 @@ def test_mod_json_create_with_patch(self): mod_json = mod_json_obj.get_modules_json() # Check that fastqc is in the file - assert "fastqc" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"] - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]["fastqc"] - assert "branch" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]["fastqc"] + assert "fastqc" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] + assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] # Check that fastqc/main.nf maintains the changes with open(module_path / "main.nf", "r") as fh: @@ -151,7 +158,7 @@ def test_mod_json_up_to_date_reinstall_fails(self): mod_json_obj = ModulesJson(self.pipeline_dir) # Update the fastqc module entry to an invalid git_sha - mod_json_obj.update(ModulesRepo(), "fastqc", "INVALID_GIT_SHA", True) + mod_json_obj.update("modules", ModulesRepo(), "fastqc", "INVALID_GIT_SHA", "modules", write_file=True) # Remove the fastqc module fastqc_path = os.path.join(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") @@ -162,14 +169,14 @@ def test_mod_json_up_to_date_reinstall_fails(self): mod_json = mod_json_obj.get_modules_json() # Check that the module has been removed from the modules.json - assert "fastqc" not in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"] + assert "fastqc" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] def test_mod_json_repo_present(self): """Tests the repo_present function""" mod_json_obj = ModulesJson(self.pipeline_dir) - assert mod_json_obj.repo_present(NF_CORE_MODULES_NAME) is True + assert mod_json_obj.repo_present(NF_CORE_MODULES_REMOTE) is True assert mod_json_obj.repo_present("INVALID_REPO") is False @@ -177,10 +184,10 @@ def test_mod_json_module_present(self): """Tests the module_present function""" mod_json_obj = ModulesJson(self.pipeline_dir) - assert mod_json_obj.module_present("fastqc", NF_CORE_MODULES_NAME) is True - assert mod_json_obj.module_present("INVALID_MODULE", NF_CORE_MODULES_NAME) is False - assert mod_json_obj.module_present("fastqc", "INVALID_REPO") is False - assert mod_json_obj.module_present("INVALID_MODULE", "INVALID_REPO") is False + assert mod_json_obj.module_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is True + assert mod_json_obj.module_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is False + assert mod_json_obj.module_present("fastqc", "INVALID_REPO", "INVALID_DIR") is False + assert mod_json_obj.module_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR") is False def test_mod_json_get_module_version(self): @@ -188,17 +195,10 @@ def test_mod_json_get_module_version(self): mod_json_obj = ModulesJson(self.pipeline_dir) mod_json = mod_json_obj.get_modules_json() assert ( - mod_json_obj.get_module_version("fastqc", NF_CORE_MODULES_NAME) - == mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]["fastqc"]["git_sha"] + mod_json_obj.get_module_version("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) + == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"]["git_sha"] ) - assert mod_json_obj.get_module_version("INVALID_MODULE", NF_CORE_MODULES_NAME) is None - - -def test_mod_json_get_git_url(self): - """Tests the get_git_url function""" - mod_json_obj = ModulesJson(self.pipeline_dir) - assert mod_json_obj.get_git_url(NF_CORE_MODULES_NAME) == NF_CORE_MODULES_REMOTE - assert mod_json_obj.get_git_url("INVALID_REPO") is None + assert mod_json_obj.get_module_version("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is None def test_mod_json_dump(self): @@ -215,16 +215,20 @@ def test_mod_json_dump(self): # Check that the dump function writes the correct content with open(mod_json_path, "r") as f: - mod_json_new = json.load(f) + try: + mod_json_new = json.load(f) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") assert mod_json == mod_json_new def test_mod_json_with_empty_modules_value(self): # Load module.json and remove the modules entry mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json_obj.create() # Create modules.json explicitly to get correct module sha mod_json_orig = mod_json_obj.get_modules_json() mod_json = copy.deepcopy(mod_json_orig) - mod_json["repos"]["nf-core/modules"]["modules"] = "" + mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"] = "" # save the altered module.json and load it again to check if it will fix itself mod_json_obj.modules_json = mod_json mod_json_obj.dump() @@ -237,9 +241,10 @@ def test_mod_json_with_empty_modules_value(self): def test_mod_json_with_missing_modules_entry(self): # Load module.json and remove the modules entry mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json_obj.create() # Create modules.json explicitly to get correct module sha mod_json_orig = mod_json_obj.get_modules_json() mod_json = copy.deepcopy(mod_json_orig) - mod_json["repos"]["nf-core/modules"].pop("modules") + mod_json["repos"][NF_CORE_MODULES_REMOTE].pop("modules") # save the altered module.json and load it again to check if it will fix itself mod_json_obj.modules_json = mod_json mod_json_obj.dump() diff --git a/tests/modules/modules_test.py b/tests/modules/modules_test.py new file mode 100644 index 0000000000..eb207fa28b --- /dev/null +++ b/tests/modules/modules_test.py @@ -0,0 +1,41 @@ +"""Test the 'modules test' command which runs module pytests.""" +import os +import shutil +from pathlib import Path + +import pytest + +import nf_core.modules + +from ..utils import set_wd + + +def test_modules_test_check_inputs(self): + """Test the check_inputs() function - raise UserWarning because module doesn't exist""" + with set_wd(self.nfcore_modules): + meta_builder = nf_core.modules.ModulesTest("none", True, "") + with pytest.raises(UserWarning) as excinfo: + meta_builder._check_inputs() + assert "Cannot find directory" in str(excinfo.value) + + +def test_modules_test_no_name_no_prompts(self): + """Test the check_inputs() function - raise UserWarning prompts are deactivated and module name is not provided.""" + with set_wd(self.nfcore_modules): + meta_builder = nf_core.modules.ModulesTest(None, True, "") + with pytest.raises(UserWarning) as excinfo: + meta_builder._check_inputs() + assert "Module name not provided and prompts deactivated." in str(excinfo.value) + + +def test_modules_test_no_installed_modules(self): + """Test the check_inputs() function - raise UserWarning because installed modules were not found""" + with set_wd(self.nfcore_modules): + module_dir = Path(self.nfcore_modules, "modules") + shutil.rmtree(module_dir) + module_dir.mkdir() + meta_builder = nf_core.modules.ModulesTest(None, False, "") + meta_builder.repo_type = "modules" + with pytest.raises(UserWarning) as excinfo: + meta_builder._check_inputs() + assert "No installed modules were found" in str(excinfo.value) diff --git a/tests/modules/patch.py b/tests/modules/patch.py index cb87c85d7a..09b892e2c8 100644 --- a/tests/modules/patch.py +++ b/tests/modules/patch.py @@ -1,12 +1,11 @@ -import json import os import tempfile from pathlib import Path import pytest +import nf_core.components.components_command import nf_core.modules -import nf_core.modules.modules_command from ..utils import GITLAB_URL @@ -17,17 +16,19 @@ testing if the update commands works correctly with patch files """ -ORG_SHA = "22c7c12dc21e2f633c00862c1291ceda0a3b7066" -SUCCEED_SHA = "f7d3a3894f67db2e2f3f8c9ba76f8e33356be8e0" -FAIL_SHA = "b4596169055700533865cefb7542108418f53100" +ORG_SHA = "775fcd090fb776a0be695044f8ab1af8896c8452" +CORRECT_SHA = "335cd32405568ca3b6d4c05ab1e8a98c21e18a4d" +SUCCEED_SHA = "f1566140c752e9c68fffc189fbe8cb9ee942b3ca" +FAIL_SHA = "1fc8b0f953d915d66ee40d28bc337ff0998d05bd" BISMARK_ALIGN = "bismark/align" -REPO_NAME = "nf-core/modules-test" +REPO_NAME = "nf-core" PATCH_BRANCH = "patch-tester" +REPO_URL = "https://gitlab.com/nf-core/modules-test.git" def setup_patch(pipeline_dir, modify_module): install_obj = nf_core.modules.ModuleInstall( - pipeline_dir, prompt=False, force=True, remote_url=GITLAB_URL, branch=PATCH_BRANCH, sha=ORG_SHA + pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=PATCH_BRANCH, sha=ORG_SHA ) # Install the module @@ -47,8 +48,12 @@ def modify_main_nf(path): # - tuple val(meta), path(reads) # - path index # + tuple val(meta), path(reads), path(index) - lines[10] = " tuple val(meta), path(reads), path(index)\n" - lines.pop(11) + for line_index in range(len(lines)): + if lines[line_index] == " tuple val(meta), path(reads)\n": + lines[line_index] = " tuple val(meta), path(reads), path(index)\n" + elif lines[line_index] == " path index\n": + to_pop = line_index + lines.pop(to_pop) with open(path, "w") as fh: fh.writelines(lines) @@ -69,7 +74,7 @@ def test_create_patch_no_change(self): # Check the 'modules.json' contains no patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_NAME) is None + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None def test_create_patch_change(self): @@ -88,7 +93,7 @@ def test_create_patch_change(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -121,7 +126,7 @@ def test_create_patch_try_apply_successful(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -130,7 +135,7 @@ def test_create_patch_try_apply_successful(self): ) # Install the new files install_dir = Path(tempfile.mkdtemp()) - update_obj.install_module_files(BISMARK_ALIGN, SUCCEED_SHA, update_obj.modules_repo, install_dir) + update_obj.install_component_files(BISMARK_ALIGN, SUCCEED_SHA, update_obj.modules_repo, install_dir) # Try applying the patch module_install_dir = install_dir / BISMARK_ALIGN @@ -138,14 +143,14 @@ def test_create_patch_try_apply_successful(self): assert update_obj.try_apply_patch(BISMARK_ALIGN, REPO_NAME, patch_relpath, module_path, module_install_dir) is True # Move the files from the temporary directory - update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, module_path, install_dir, REPO_NAME, SUCCEED_SHA) + update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, install_dir, REPO_NAME, SUCCEED_SHA) # Check that a patch file with the correct name has been created assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -187,7 +192,7 @@ def test_create_patch_try_apply_failed(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -196,7 +201,7 @@ def test_create_patch_try_apply_failed(self): ) # Install the new files install_dir = Path(tempfile.mkdtemp()) - update_obj.install_module_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, install_dir) + update_obj.install_component_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, install_dir) # Try applying the patch module_install_dir = install_dir / BISMARK_ALIGN @@ -224,24 +229,29 @@ def test_create_patch_update_success(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) # Update the module update_obj = nf_core.modules.ModuleUpdate( - self.pipeline_dir, sha=SUCCEED_SHA, show_diff=False, remote_url=GITLAB_URL, branch=PATCH_BRANCH + self.pipeline_dir, + sha=SUCCEED_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=PATCH_BRANCH, ) - update_obj.update(BISMARK_ALIGN) + assert update_obj.update(BISMARK_ALIGN) # Check that a patch file with the correct name has been created assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_NAME) + ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) # Check that the correct lines are in the patch file with open(module_path / patch_fn, "r") as fh: @@ -280,7 +290,7 @@ def test_create_patch_update_fail(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -289,15 +299,15 @@ def test_create_patch_update_fail(self): patch_contents = fh.read() update_obj = nf_core.modules.ModuleUpdate( - self.pipeline_dir, sha=FAIL_SHA, show_diff=False, remote_url=GITLAB_URL, branch=PATCH_BRANCH + self.pipeline_dir, sha=FAIL_SHA, show_diff=False, update_deps=True, remote_url=GITLAB_URL, branch=PATCH_BRANCH ) update_obj.update(BISMARK_ALIGN) # Check that the installed files have not been affected by the attempted patch temp_dir = Path(tempfile.mkdtemp()) - nf_core.modules.modules_command.ModuleCommand(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH).install_module_files( - BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, temp_dir - ) + nf_core.components.components_command.ComponentCommand( + "modules", self.pipeline_dir, GITLAB_URL, PATCH_BRANCH + ).install_component_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, temp_dir) temp_module_dir = temp_dir / BISMARK_ALIGN for file in os.listdir(temp_module_dir): diff --git a/tests/modules/remove.py b/tests/modules/remove.py index 5e62863bef..b343a02da7 100644 --- a/tests/modules/remove.py +++ b/tests/modules/remove.py @@ -14,10 +14,9 @@ def test_modules_remove_trimgalore_uninstalled(self): assert self.mods_remove.remove("trimgalore") is False -# TODO Remove comments once external repository to have same structure as nf-core/modules -# def test_modules_remove_trimgalore_alternative_source(self): -# """Test removing TrimGalore! module after installing it from an alternative source""" -# self.mods_install_alt.install("trimgalore") -# module_path = os.path.join(self.mods_install.dir, "modules", "external", "trimgalore") -# assert self.mods_remove_alt.remove("trimgalore") -# assert os.path.exists(module_path) is False +def test_modules_remove_multiqc_from_gitlab(self): + """Test removing multiqc module after installing it from an alternative source""" + self.mods_install_gitlab.install("multiqc") + module_path = os.path.join(self.mods_install_gitlab.dir, "modules", "nf-core", "multiqc") + assert self.mods_remove_gitlab.remove("multiqc", force=True) + assert os.path.exists(module_path) is False diff --git a/tests/modules/update.py b/tests/modules/update.py index f49d2be257..fcfd92fc39 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -2,13 +2,17 @@ import os import shutil import tempfile +from pathlib import Path +from unittest import mock +import questionary import yaml import nf_core.utils from nf_core.modules.install import ModuleInstall from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME +from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE +from nf_core.modules.patch import ModulePatch from nf_core.modules.update import ModuleUpdate from ..utils import ( @@ -18,6 +22,7 @@ GITLAB_DEFAULT_BRANCH, GITLAB_REPO, GITLAB_URL, + OLD_TRIMGALORE_BRANCH, OLD_TRIMGALORE_SHA, ) @@ -39,13 +44,15 @@ def test_install_and_update(self): def test_install_at_hash_and_update(self): """Installs an old version of a module in the pipeline and updates it""" - self.mods_install_old.install("trimgalore") - update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) + assert self.mods_install_old.install("trimgalore") + update_obj = ModuleUpdate( + self.pipeline_dir, show_diff=False, update_deps=True, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH + ) # Copy the module files and check that they are affected by the update tmpdir = tempfile.mkdtemp() trimgalore_tmpdir = os.path.join(tmpdir, "trimgalore") - trimgalore_path = os.path.join(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "trimgalore") + trimgalore_path = os.path.join(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") shutil.copytree(trimgalore_path, trimgalore_tmpdir) assert update_obj.update("trimgalore") is True @@ -55,8 +62,8 @@ def test_install_at_hash_and_update(self): mod_json_obj = ModulesJson(self.pipeline_dir) mod_json = mod_json_obj.get_modules_json() # Get the up-to-date git_sha for the module from the ModulesRepo object - correct_git_sha = update_obj.modules_repo.get_latest_module_version("trimgalore") - current_git_sha = mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]["trimgalore"]["git_sha"] + correct_git_sha = update_obj.modules_repo.get_latest_component_version("trimgalore", "modules") + current_git_sha = mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] assert correct_git_sha == current_git_sha @@ -64,12 +71,18 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): """Installs an old version of a module in the pipeline and updates it""" self.mods_install_old.install("trimgalore") patch_path = os.path.join(self.pipeline_dir, "trimgalore.patch") - update_obj = ModuleUpdate(self.pipeline_dir, save_diff_fn=patch_path) + update_obj = ModuleUpdate( + self.pipeline_dir, + save_diff_fn=patch_path, + sha=OLD_TRIMGALORE_SHA, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + ) # Copy the module files and check that they are affected by the update tmpdir = tempfile.mkdtemp() trimgalore_tmpdir = os.path.join(tmpdir, "trimgalore") - trimgalore_path = os.path.join(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "trimgalore") + trimgalore_path = os.path.join(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") shutil.copytree(trimgalore_path, trimgalore_tmpdir) assert update_obj.update("trimgalore") is True @@ -88,33 +101,35 @@ def test_update_all(self): mod_json_obj = ModulesJson(self.pipeline_dir) mod_json = mod_json_obj.get_modules_json() # Loop through all modules and check that they are updated (according to the modules.json file) - for mod in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]: - correct_git_sha = list(update_obj.modules_repo.get_module_git_log(mod, depth=1))[0]["git_sha"] - current_git_sha = mod_json["repos"][NF_CORE_MODULES_NAME]["modules"][mod]["git_sha"] + for mod in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]: + correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] assert correct_git_sha == current_git_sha def test_update_with_config_fixed_version(self): """Try updating when there are entries in the .nf-core.yml""" # Install trimgalore at the latest version - self.mods_install.install("trimgalore") + assert self.mods_install_trimgalore.install("trimgalore") # Fix the trimgalore version in the .nf-core.yml to an old version - update_config = {"nf-core/modules": {"trimgalore": OLD_TRIMGALORE_SHA}} - tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": OLD_TRIMGALORE_SHA}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) tools_config["update"] = update_config - with open(os.path.join(self.pipeline_dir, ".nf-core.yml"), "w") as f: + with open(os.path.join(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config, f) # Update all modules in the pipeline - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) + update_obj = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH + ) assert update_obj.update() is True # Check that the git sha for trimgalore is correctly downgraded mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "trimgalore" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"] - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]["trimgalore"] - assert mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA + assert "trimgalore" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO] + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] + assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA def test_update_with_config_dont_update(self): @@ -123,43 +138,51 @@ def test_update_with_config_dont_update(self): self.mods_install_old.install("trimgalore") # Set the trimgalore field to no update in the .nf-core.yml - update_config = {"nf-core/modules": {"trimgalore": False}} - tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": False}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) tools_config["update"] = update_config - with open(os.path.join(self.pipeline_dir, ".nf-core.yml"), "w") as f: + with open(os.path.join(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config, f) # Update all modules in the pipeline - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) + update_obj = ModuleUpdate( + self.pipeline_dir, + update_all=True, + show_diff=False, + sha=OLD_TRIMGALORE_SHA, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + ) assert update_obj.update() is True # Check that the git sha for trimgalore is correctly downgraded mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "trimgalore" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"] - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]["trimgalore"] - assert mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA + assert "trimgalore" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO] + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] + assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA def test_update_with_config_fix_all(self): """Fix the version of all nf-core modules""" - self.mods_install.install("trimgalore") + self.mods_install_trimgalore.install("trimgalore") # Fix the version of all nf-core modules in the .nf-core.yml to an old version - update_config = {"nf-core/modules": OLD_TRIMGALORE_SHA} - tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + update_config = {GITLAB_URL: OLD_TRIMGALORE_SHA} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) tools_config["update"] = update_config - with open(os.path.join(self.pipeline_dir, ".nf-core.yml"), "w") as f: + with open(os.path.join(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config, f) # Update all modules in the pipeline - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) + update_obj = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH + ) assert update_obj.update() is True # Check that the git sha for trimgalore is correctly downgraded mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - for module in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]: - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"][module] - assert mod_json["repos"][NF_CORE_MODULES_NAME]["modules"][module]["git_sha"] == OLD_TRIMGALORE_SHA + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] + assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA def test_update_with_config_no_updates(self): @@ -168,54 +191,66 @@ def test_update_with_config_no_updates(self): old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() # Fix the version of all nf-core modules in the .nf-core.yml to an old version - update_config = {"nf-core/modules": False} - tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + update_config = {GITLAB_URL: False} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) tools_config["update"] = update_config - with open(os.path.join(self.pipeline_dir, ".nf-core.yml"), "w") as f: + with open(os.path.join(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config, f) # Update all modules in the pipeline - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) + update_obj = ModuleUpdate( + self.pipeline_dir, + update_all=True, + show_diff=False, + sha=OLD_TRIMGALORE_SHA, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + ) assert update_obj.update() is True - # Check that the git sha for trimgalore is correctly downgraded + # Check that the git sha for trimgalore is correctly downgraded and none of the modules has changed mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - for module in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]: - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"][module] + for module in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]: + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module] assert ( - mod_json["repos"][NF_CORE_MODULES_NAME]["modules"][module]["git_sha"] - == old_mod_json["repos"][NF_CORE_MODULES_NAME]["modules"][module]["git_sha"] + mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module]["git_sha"] + == old_mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module]["git_sha"] ) def test_update_different_branch_single_module(self): """Try updating a module in a specific branch""" install_obj = ModuleInstall( - self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, sha=GITLAB_BRANCH_TEST_OLD_SHA + self.pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=GITLAB_BRANCH_TEST_BRANCH, + sha=GITLAB_BRANCH_TEST_OLD_SHA, ) - install_obj.install("fastp") + assert install_obj.install("fastp") + update_obj = ModuleUpdate( - self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, show_diff=False + self.pipeline_dir, update_deps=True, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, show_diff=False ) update_obj.update("fastp") # Verify that the branch entry was updated correctly modules_json = ModulesJson(self.pipeline_dir) - assert modules_json.get_module_branch("fastp", GITLAB_REPO) == GITLAB_BRANCH_TEST_BRANCH - assert modules_json.get_module_version("fastp", GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + assert ( + modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + assert modules_json.get_module_version("fastp", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA def test_update_different_branch_mixed_modules_main(self): """Try updating all modules where MultiQC is installed from main branch""" # Install fastp - install_obj = ModuleInstall( - self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, sha=GITLAB_BRANCH_TEST_OLD_SHA - ) - install_obj.install("fastp") + assert self.mods_install_gitlab_old.install("fastp") # Install MultiQC from gitlab default branch - install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) - install_obj.install("multiqc") + assert self.mods_install_gitlab.install("multiqc") # Try updating update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) @@ -223,25 +258,118 @@ def test_update_different_branch_mixed_modules_main(self): modules_json = ModulesJson(self.pipeline_dir) # Verify that the branch entry was updated correctly - assert modules_json.get_module_branch("fastp", GITLAB_REPO) == GITLAB_BRANCH_TEST_BRANCH - assert modules_json.get_module_version("fastp", GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + assert ( + modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + assert modules_json.get_module_version("fastp", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA # MultiQC is present in both branches but should've been updated using the 'main' branch - assert modules_json.get_module_branch("multiqc", GITLAB_REPO) == GITLAB_DEFAULT_BRANCH + assert ( + modules_json.get_component_branch(self.component_type, "multiqc", GITLAB_URL, GITLAB_REPO) + == GITLAB_DEFAULT_BRANCH + ) def test_update_different_branch_mix_modules_branch_test(self): """Try updating all modules where MultiQC is installed from branch-test branch""" # Install multiqc from the branch-test branch - install_obj = ModuleInstall( - self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, sha=GITLAB_BRANCH_TEST_OLD_SHA + assert self.mods_install_gitlab_old.install( + "multiqc" + ) # Force as the same module is installed from github nf-core modules repo + modules_json = ModulesJson(self.pipeline_dir) + update_obj = ModuleUpdate( + self.pipeline_dir, + update_all=True, + show_diff=False, + remote_url=GITLAB_URL, + branch=GITLAB_BRANCH_TEST_BRANCH, + sha=GITLAB_BRANCH_TEST_NEW_SHA, ) - install_obj.install("multiqc") - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) - update_obj.update() + assert update_obj.update() + + assert ( + modules_json.get_component_branch(self.component_type, "multiqc", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + assert modules_json.get_module_version("multiqc", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + +# Mock questionary answer: do not update module, only show diffs +@mock.patch.object(questionary.Question, "unsafe_ask", return_value=False) +def test_update_only_show_differences(self, mock_prompt): + """Try updating all modules showing differences. + Don't update some of them. + Check that the sha in modules.json is not changed.""" modules_json = ModulesJson(self.pipeline_dir) - assert modules_json.get_module_branch("multiqc", GITLAB_REPO) == GITLAB_BRANCH_TEST_BRANCH - assert modules_json.get_module_version("multiqc", GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) + + # Update modules to a fixed old SHA + update_old = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, sha="5e34754d42cd2d5d248ca8673c0a53cdf5624905" + ) + update_old.update() + + tmpdir = tempfile.mkdtemp() + shutil.rmtree(tmpdir) + shutil.copytree(Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME), tmpdir) + + assert update_obj.update() is True + + mod_json = modules_json.get_modules_json() + # Loop through all modules and check that they are NOT updated (according to the modules.json file) + # Modules that can be updated but shouldn't are custom/dumpsoftwareversions and fastqc + # Module multiqc is already up to date so don't check + for mod in ["custom/dumpsoftwareversions", "fastqc"]: + correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + assert correct_git_sha != current_git_sha + assert cmp_module(Path(tmpdir, mod), Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, mod)) is True + + +# Mock questionary answer: do not update module, only show diffs +@mock.patch.object(questionary.Question, "unsafe_ask", return_value=False) +def test_update_only_show_differences_when_patch(self, mock_prompt): + """Try updating all modules showing differences when there's a patched module. + Don't update some of them. + Check that the sha in modules.json is not changed.""" + modules_json = ModulesJson(self.pipeline_dir) + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) + + # Update modules to a fixed old SHA + update_old = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, sha="5e34754d42cd2d5d248ca8673c0a53cdf5624905" + ) + update_old.update() + + # Modify fastqc module, it will have a patch which will be applied during update + # We modify fastqc because it's one of the modules that can be updated and there's another one before it (custom/dumpsoftwareversions) + module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") + main_path = Path(module_path, "main.nf") + with open(main_path, "r") as fh: + lines = fh.readlines() + for line_index in range(len(lines)): + if lines[line_index] == " label 'process_medium'\n": + lines[line_index] = " label 'process_low'\n" + break + with open(main_path, "w") as fh: + fh.writelines(lines) + # Create a patch file + patch_obj = ModulePatch(self.pipeline_dir) + patch_obj.patch("fastqc") + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "fastqc.diff"} + + # Update all modules + assert update_obj.update() is True + + mod_json = modules_json.get_modules_json() + # Loop through all modules and check that they are NOT updated (according to the modules.json file) + # Modules that can be updated but shouldn't are custom/dumpsoftwareversions and fastqc + # Module multiqc is already up to date so don't check + for mod in ["custom/dumpsoftwareversions", "fastqc"]: + correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + assert correct_git_sha != current_git_sha def cmp_module(dir1, dir2): diff --git a/tests/subworkflows/__init__.py b/tests/subworkflows/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/subworkflows/create.py b/tests/subworkflows/create.py new file mode 100644 index 0000000000..60ee6add9a --- /dev/null +++ b/tests/subworkflows/create.py @@ -0,0 +1,37 @@ +import os + +import pytest + +import nf_core.subworkflows + + +def test_subworkflows_create_succeed(self): + """Succeed at creating a subworkflow from the template inside a pipeline""" + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.pipeline_dir, "test_subworkflow_local", "@author", True + ) + subworkflow_create.create() + assert os.path.exists(os.path.join(self.pipeline_dir, "subworkflows", "local", "test_subworkflow_local.nf")) + + +def test_subworkflows_create_fail_exists(self): + """Fail at creating the same subworkflow twice""" + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.pipeline_dir, "test_subworkflow2", "@author", False + ) + subworkflow_create.create() + with pytest.raises(UserWarning) as excinfo: + subworkflow_create.create() + assert "Subworkflow file exists already" in str(excinfo.value) + + +def test_subworkflows_create_nfcore_modules(self): + """Create a subworkflow in nf-core/modules clone""" + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.nfcore_modules, "test_subworkflow", "@author", force=True + ) + subworkflow_create.create() + assert os.path.exists(os.path.join(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf")) + assert os.path.exists( + os.path.join(self.nfcore_modules, "tests", "subworkflows", "nf-core", "test_subworkflow", "main.nf") + ) diff --git a/tests/subworkflows/info.py b/tests/subworkflows/info.py new file mode 100644 index 0000000000..688120ac02 --- /dev/null +++ b/tests/subworkflows/info.py @@ -0,0 +1,64 @@ +from rich.console import Console + +import nf_core.subworkflows + +from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + + +def test_subworkflows_info_remote(self): + """Test getting info about a remote subworkflow""" + mods_info = nf_core.subworkflows.SubworkflowInfo(self.pipeline_dir, "bam_sort_stats_samtools") + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output + + +def test_subworkflows_info_remote_gitlab(self): + """Test getting info about a subworkflow in the remote gitlab repo""" + mods_info = nf_core.subworkflows.SubworkflowInfo( + self.pipeline_dir, "bam_sort_stats_samtools", remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output + assert "--git-remote" in output + + +def test_subworkflows_info_local(self): + """Test getting info about a locally installed subworkflow""" + self.subworkflow_install.install("bam_sort_stats_samtools") + mods_info = nf_core.subworkflows.SubworkflowInfo(self.pipeline_dir, "bam_sort_stats_samtools") + mods_info.local = True + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output + + +def test_subworkflows_info_in_modules_repo(self): + """Test getting info about a locally subworkflow in the modules repo""" + self.subworkflow_install.install("bam_sort_stats_samtools") + mods_info = nf_core.subworkflows.SubworkflowInfo(self.nfcore_modules, "bam_sort_stats_samtools") + mods_info.local = True + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output diff --git a/tests/subworkflows/install.py b/tests/subworkflows/install.py new file mode 100644 index 0000000000..6c04c9ad22 --- /dev/null +++ b/tests/subworkflows/install.py @@ -0,0 +1,142 @@ +import os + +import pytest + +from nf_core.modules.modules_json import ModulesJson +from nf_core.subworkflows.install import SubworkflowInstall + +from ..utils import ( + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_REPO, + GITLAB_SUBWORKFLOWS_BRANCH, + GITLAB_URL, + with_temporary_folder, +) + + +def test_subworkflow_install_nopipeline(self): + """Test installing a subworkflow - no pipeline given""" + self.subworkflow_install.dir = None + assert self.subworkflow_install.install("foo") is False + + +@with_temporary_folder +def test_subworkflows_install_emptypipeline(self, tmpdir): + """Test installing a subworkflow - empty dir given""" + os.mkdir(os.path.join(tmpdir, "nf-core-pipe")) + self.subworkflow_install.dir = os.path.join(tmpdir, "nf-core-pipe") + with pytest.raises(UserWarning) as excinfo: + self.subworkflow_install.install("foo") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) + + +def test_subworkflows_install_nosubworkflow(self): + """Test installing a subworkflow - unrecognised subworkflow given""" + assert self.subworkflow_install.install("foo") is False + + +def test_subworkflows_install_bam_sort_stats_samtools(self): + """Test installing a subworkflow - bam_sort_stats_samtools""" + assert self.subworkflow_install.install("bam_sort_stats_samtools") is not False + subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + sub_subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_stats_samtools") + samtools_index_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + samtools_sort_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "sort") + samtools_stats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") + samtools_idxstats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "idxstats") + samtools_flagstat_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "flagstat") + assert os.path.exists(subworkflow_path) + assert os.path.exists(sub_subworkflow_path) + assert os.path.exists(samtools_index_path) + assert os.path.exists(samtools_sort_path) + assert os.path.exists(samtools_stats_path) + assert os.path.exists(samtools_idxstats_path) + assert os.path.exists(samtools_flagstat_path) + + +def test_subworkflows_install_bam_sort_stats_samtools_twice(self): + """Test installing a subworkflow - bam_sort_stats_samtools already there""" + self.subworkflow_install.install("bam_sort_stats_samtools") + assert self.subworkflow_install.install("bam_sort_stats_samtools") is False + + +def test_subworkflows_install_from_gitlab(self): + """Test installing a subworkflow from GitLab""" + assert self.subworkflow_install_gitlab.install("bam_stats_samtools") is True + # Verify that the branch entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + assert ( + modules_json.get_component_branch(self.component_type, "bam_stats_samtools", GITLAB_URL, GITLAB_REPO) + == GITLAB_SUBWORKFLOWS_BRANCH + ) + + +def test_subworkflows_install_different_branch_fail(self): + """Test installing a subworkflow from a different branch""" + install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) + # The bam_stats_samtools subworkflow does not exists in the branch-test branch + assert install_obj.install("bam_stats_samtools") is False + + +def test_subworkflows_install_tracking(self): + """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" + self.subworkflow_install.install("bam_sort_stats_samtools") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_sort_stats_samtools" + ]["installed_by"] == ["subworkflows"] + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"]["bam_stats_samtools"][ + "installed_by" + ] == ["bam_sort_stats_samtools"] + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/stats"][ + "installed_by" + ] == ["bam_stats_samtools"] + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/sort"][ + "installed_by" + ] == ["bam_sort_stats_samtools"] + + # Clean directory + self.subworkflow_remove.remove("bam_sort_stats_samtools") + + +def test_subworkflows_install_tracking_added_already_installed(self): + """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" + self.subworkflow_install.install("bam_sort_stats_samtools") + self.subworkflow_install.install("bam_stats_samtools") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_sort_stats_samtools" + ]["installed_by"] == ["subworkflows"] + assert sorted( + mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"]["bam_stats_samtools"][ + "installed_by" + ] + ) == sorted(["bam_sort_stats_samtools", "subworkflows"]) + + # Clean directory + self.subworkflow_remove.remove("bam_sort_stats_samtools") + self.subworkflow_remove.remove("bam_stats_samtools") + + +def test_subworkflows_install_tracking_added_super_subworkflow(self): + """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" + self.subworkflow_install.install("bam_stats_samtools") + self.subworkflow_install.install("bam_sort_stats_samtools") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_sort_stats_samtools" + ]["installed_by"] == ["subworkflows"] + assert sorted( + mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"]["bam_stats_samtools"][ + "installed_by" + ] + ) == sorted(["subworkflows", "bam_sort_stats_samtools"]) diff --git a/tests/subworkflows/list.py b/tests/subworkflows/list.py new file mode 100644 index 0000000000..c65999d42c --- /dev/null +++ b/tests/subworkflows/list.py @@ -0,0 +1,49 @@ +from rich.console import Console + +import nf_core.subworkflows + +from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + + +def test_subworkflows_list_remote(self): + """Test listing available subworkflows""" + subworkflows_list = nf_core.subworkflows.SubworkflowList(None, remote=True) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output + + +def test_subworkflows_list_remote_gitlab(self): + """Test listing the subworkflows in the remote gitlab repo""" + subworkflows_list = nf_core.subworkflows.SubworkflowList( + None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output + + +def test_subworkflows_install_and_list_subworkflows(self): + """Test listing locally installed subworkflows""" + self.subworkflow_install.install("bam_sort_stats_samtools") + subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output + + +def test_subworkflows_install_gitlab_and_list_subworkflows(self): + """Test listing locally installed subworkflows""" + self.subworkflow_install_gitlab.install("bam_sort_stats_samtools") + subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output diff --git a/tests/subworkflows/remove.py b/tests/subworkflows/remove.py new file mode 100644 index 0000000000..53a948778b --- /dev/null +++ b/tests/subworkflows/remove.py @@ -0,0 +1,98 @@ +from pathlib import Path + +from rich.console import Console + +from nf_core.modules.modules_json import ModulesJson + + +def test_subworkflows_remove_uninstalled_subworkflow(self): + """Test removing subworkflow without installing it""" + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") is False + + +def test_subworkflows_remove_subworkflow(self): + """Test removing subworkflow and all it's dependencies after installing it""" + self.subworkflow_install.install("bam_sort_stats_samtools") + + subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") + mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() + assert Path.exists(subworkflow_path) is False + assert Path.exists(bam_sort_stats_samtools_path) is False + assert Path.exists(bam_stats_samtools_path) is False + assert Path.exists(samtools_index_path) is False + assert mod_json_before != mod_json_after + # assert subworkflows key is removed from modules.json + assert "subworkflows" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"].keys() + assert "samtools/index" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"].keys() + + +def test_subworkflows_remove_subworkflow_keep_installed_module(self): + """Test removing subworkflow and all it's dependencies after installing it, except for a separately installed module""" + self.subworkflow_install.install("bam_sort_stats_samtools") + self.mods_install.install("samtools/index") + + subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + + mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") + mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() + + assert Path.exists(subworkflow_path) is False + assert Path.exists(bam_sort_stats_samtools_path) is False + assert Path.exists(bam_stats_samtools_path) is False + assert Path.exists(samtools_index_path) is True + assert mod_json_before != mod_json_after + # assert subworkflows key is removed from modules.json + assert "subworkflows" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"].keys() + assert ( + "samtools/index" + in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"].keys() + ) + + +def test_subworkflows_remove_one_of_two_subworkflow(self): + """Test removing subworkflow and all it's dependencies after installing it""" + self.subworkflow_install.install("bam_sort_stats_samtools") + self.subworkflow_install.install("bam_stats_samtools") + subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") + + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") + + assert Path.exists(subworkflow_path) is True + assert Path.exists(bam_sort_stats_samtools_path) is False + assert Path.exists(bam_stats_samtools_path) is True + assert Path.exists(samtools_index_path) is False + assert Path.exists(samtools_stats_path) is True + self.subworkflow_remove.remove("bam_stats_samtools") + + +def test_subworkflows_remove_included_subworkflow(self): + """Test removing subworkflow which is installed by another subworkflow and all it's dependencies.""" + self.subworkflow_install.install("bam_sort_stats_samtools") + subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") + + assert self.subworkflow_remove.remove("bam_stats_samtools") is False + + assert Path.exists(subworkflow_path) is True + assert Path.exists(bam_sort_stats_samtools_path) is True + assert Path.exists(bam_stats_samtools_path) is True + assert Path.exists(samtools_index_path) is True + assert Path.exists(samtools_stats_path) is True + self.subworkflow_remove.remove("bam_sort_stats_samtools") diff --git a/tests/subworkflows/subworkflows_test.py b/tests/subworkflows/subworkflows_test.py new file mode 100644 index 0000000000..adb0989b33 --- /dev/null +++ b/tests/subworkflows/subworkflows_test.py @@ -0,0 +1,41 @@ +"""Test the 'subworkflows test' command which runs module pytests.""" +import os +import shutil +from pathlib import Path + +import pytest + +import nf_core.subworkflows + +from ..utils import set_wd + + +def test_subworkflows_test_check_inputs(self): + """Test the check_inputs() function - raise UserWarning because module doesn't exist""" + with set_wd(self.nfcore_modules): + meta_builder = nf_core.subworkflows.SubworkflowsTest("none", True, "") + with pytest.raises(UserWarning) as excinfo: + meta_builder._check_inputs() + assert "Cannot find directory" in str(excinfo.value) + + +def test_subworkflows_test_no_name_no_prompts(self): + """Test the check_inputs() function - raise UserWarning prompts are deactivated and module name is not provided.""" + with set_wd(self.nfcore_modules): + meta_builder = nf_core.subworkflows.SubworkflowsTest(None, True, "") + with pytest.raises(UserWarning) as excinfo: + meta_builder._check_inputs() + assert "Subworkflow name not provided and prompts deactivated." in str(excinfo.value) + + +def test_subworkflows_test_no_installed_subworkflows(self): + """Test the check_inputs() function - raise UserWarning because installed modules were not found""" + with set_wd(self.nfcore_modules): + module_dir = Path(self.nfcore_modules, "subworkflows") + shutil.rmtree(module_dir) + module_dir.mkdir() + meta_builder = nf_core.subworkflows.SubworkflowsTest(None, False, "") + meta_builder.repo_type = "modules" + with pytest.raises(UserWarning) as excinfo: + meta_builder._check_inputs() + assert "No installed subworkflows were found" in str(excinfo.value) diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py new file mode 100644 index 0000000000..698086e186 --- /dev/null +++ b/tests/subworkflows/update.py @@ -0,0 +1,334 @@ +import filecmp +import shutil +import tempfile +from pathlib import Path + +import yaml + +import nf_core.utils +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE +from nf_core.modules.remove import ModuleRemove +from nf_core.modules.update import ModuleUpdate +from nf_core.subworkflows.update import SubworkflowUpdate + +from ..utils import OLD_SUBWORKFLOWS_SHA + + +def test_install_and_update(self): + """Installs a subworkflow in the pipeline and updates it (no change)""" + self.subworkflow_install.install("bam_stats_samtools") + update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False) + + # Copy the sw files and check that they are unaffected by the update + tmpdir = tempfile.mkdtemp() + shutil.rmtree(tmpdir) + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "bam_stats_samtools") + shutil.copytree(sw_path, tmpdir) + + assert update_obj.update("bam_stats_samtools") is True + assert cmp_component(tmpdir, sw_path) is True + + +def test_install_at_hash_and_update(self): + """Installs an old version of a subworkflow in the pipeline and updates it""" + assert self.subworkflow_install_old.install("fastq_align_bowtie2") + update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False, update_deps=True) + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Copy the sw files and check that they are affected by the update + tmpdir = tempfile.mkdtemp() + shutil.rmtree(tmpdir) + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") + shutil.copytree(sw_path, tmpdir) + + assert update_obj.update("fastq_align_bowtie2") is True + assert cmp_component(tmpdir, sw_path) is False + + # Check that the modules.json is correctly updated + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + # Get the up-to-date git_sha for the sw from the ModulesRepo object + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + ) + + +def test_install_at_hash_and_update_and_save_diff_to_file(self): + """Installs an old version of a sw in the pipeline and updates it. Save differences to a file.""" + assert self.subworkflow_install_old.install("fastq_align_bowtie2") + patch_path = Path(self.pipeline_dir, "fastq_align_bowtie2.patch") + update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True) + + # Copy the sw files and check that they are affected by the update + tmpdir = tempfile.mkdtemp() + shutil.rmtree(tmpdir) + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") + shutil.copytree(sw_path, tmpdir) + + assert update_obj.update("fastq_align_bowtie2") is True + assert cmp_component(tmpdir, sw_path) is True + + with open(patch_path, "r") as fh: + line = fh.readline() + assert line.startswith( + "Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" + ) + + +def test_update_all(self): + """Updates all subworkflows present in the pipeline""" + # Install subworkflows fastq_align_bowtie2, bam_sort_stats_samtools, bam_stats_samtools + self.subworkflow_install.install("fastq_align_bowtie2") + # Update all subworkflows + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # We must reload the modules.json to get the updated version + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + # Loop through all subworkflows and check that they are updated (according to the modules.json file) + for sw in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]: + correct_git_sha = list(update_obj.modules_repo.get_component_git_log(sw, "subworkflows", depth=1))[0]["git_sha"] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + assert correct_git_sha == current_git_sha + + +def test_update_with_config_fixed_version(self): + """Try updating when there are entries in the .nf-core.yml""" + # Install subworkflow at the latest version + assert self.subworkflow_install.install("fastq_align_bowtie2") + + # Fix the subworkflow version in the .nf-core.yml to an old version + update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": OLD_SUBWORKFLOWS_SHA}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all subworkflows in the pipeline + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "fastq_align_bowtie2" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME] + assert ( + "git_sha" + in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] + ) + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + == OLD_SUBWORKFLOWS_SHA + ) + + +def test_update_with_config_dont_update(self): + """Try updating when sw is to be ignored""" + # Install an old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") + + # Set the fastq_align_bowtie2 field to no update in the .nf-core.yml + update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": False}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all modules in the pipeline + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "fastq_align_bowtie2" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME] + assert ( + "git_sha" + in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] + ) + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + == OLD_SUBWORKFLOWS_SHA + ) + + +def test_update_with_config_fix_all(self): + """Fix the version of all nf-core subworkflows""" + # Install subworkflow at the latest version + assert self.subworkflow_install.install("fastq_align_bowtie2") + + # Fix the version of all nf-core subworkflows in the .nf-core.yml to an old version + update_config = {NF_CORE_MODULES_REMOTE: OLD_SUBWORKFLOWS_SHA} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all subworkflows in the pipeline + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert ( + "git_sha" + in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] + ) + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + == OLD_SUBWORKFLOWS_SHA + ) + + +def test_update_with_config_no_updates(self): + """Don't update any nf-core subworkflows""" + # Install an old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Set all repository updates to False + update_config = {NF_CORE_MODULES_REMOTE: False} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all subworkflows in the pipeline + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded and none of the subworkflows has changed + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + for sw in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]: + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw] + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + == old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + ) + + +def test_update_all_linked_components_from_subworkflow(self): + """Update a subworkflow and all modules and subworkflows used on it""" + # Install an old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Copy the sw files and check that they are affected by the update + tmpdir = tempfile.mkdtemp() + shutil.rmtree(tmpdir) + subworkflows_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME) + modules_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME) + shutil.copytree(subworkflows_path, Path(tmpdir, "subworkflows")) + shutil.copytree(modules_path, Path(tmpdir, "modules")) + + # Update fastq_align_bowtie2 and all modules and subworkflows used by that + update_obj = SubworkflowUpdate(self.pipeline_dir, update_deps=True, show_diff=False) + assert update_obj.update("fastq_align_bowtie2") is True + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + # Loop through all modules and subworkflows used in fastq_align_bowtie2 + # check that they are updated (according to the modules.json file) + for sw in ["fastq_align_bowtie2", "bam_sort_stats_samtools", "bam_stats_samtools"]: + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + ) + for mod in [ + "bowtie2/align", + "samtools/index", + "samtools/sort", + "samtools/flagstat", + "samtools/idxstats", + "samtools/stats", + ]: + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + ) + # Check that the subworkflow files are updated + assert ( + cmp_component( + Path(tmpdir, "subworkflows", "fastq_align_bowtie2"), Path(subworkflows_path, "fastq_align_bowtie2") + ) + is False + ) + + +def test_update_all_subworkflows_from_module(self): + """Update a module and all subworkflows that use this module""" + # Install an old version of fastq_align_bowtie2 and thus all modules used by it (bowtie2/align) + self.subworkflow_install_old.install("fastq_align_bowtie2") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Copy the sw files and check that they are affected by the update + tmpdir = tempfile.mkdtemp() + shutil.rmtree(tmpdir) + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") + shutil.copytree(sw_path, Path(tmpdir, "fastq_align_bowtie2")) + + # Update bowtie2/align and all subworkflows using it + update_obj = ModuleUpdate(self.pipeline_dir, update_deps=True, show_diff=False) + assert update_obj.update("bowtie2/align") is True + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + # Check that bowtie2/align and fastq_align_bowtie2 are updated + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + ) + assert cmp_component(Path(tmpdir, "fastq_align_bowtie2"), sw_path) is False + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]["bowtie2/align"]["git_sha"] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]["bowtie2/align"]["git_sha"] + ) + + +def test_update_change_of_included_modules(self): + """Update a subworkflow which has a module change in the new version.""" + # Install an old version of vcf_annotate_ensemblvep with tabix/bgziptabix and without tabix/tabix + self.subworkflow_install_module_change.install("vcf_annotate_ensemblvep") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Check that tabix/bgziptabix is there + assert "tabix/bgziptabix" in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() + # Check that tabix/tabix is not there + assert "tabix/tabix" not in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() + + # Update vcf_annotate_ensemblvep without tabix/bgziptabix and with tabix/tabix + update_obj = SubworkflowUpdate(self.pipeline_dir, update_deps=True, show_diff=False) + assert update_obj.update("vcf_annotate_ensemblvep") is True + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Check that tabix/bgziptabix is not there + assert "tabix/bgziptabix" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() + # Check that tabix/tabix is there + assert "tabix/tabix" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() + # Check that ensemblevep is not there but instead we have ensemblevep/vep (due to a file re-naming) + assert "ensemblvep" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert "ensemblvep/vep" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "ensemblvep/vep").is_dir() + + +def cmp_component(dir1, dir2): + """Compare two versions of the same component""" + files = ["main.nf", "meta.yml"] + return all(filecmp.cmp(Path(dir1, f), Path(dir2, f), shallow=False) for f in files) diff --git a/tests/test_bump_version.py b/tests/test_bump_version.py index 96eb1240a0..658a2339d4 100644 --- a/tests/test_bump_version.py +++ b/tests/test_bump_version.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """Some tests covering the bump_version code. """ import os diff --git a/tests/test_cli.py b/tests/test_cli.py index a98fe8a407..2bd8af5c59 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,12 +1,17 @@ -#!/usr/bin/env python """ Tests covering the command-line code. + +Most tests check the cli arguments are passed along and that some action is +taken. """ +import tempfile +import unittest from unittest import mock from click.testing import CliRunner import nf_core.__main__ +import nf_core.utils @mock.patch("nf_core.__main__.nf_core_cli") @@ -15,18 +20,333 @@ def test_header(mock_cli): nf_core.__main__.run_nf_core() -def test_cli_help(): - """Test the main launch function with --help""" - runner = CliRunner() - result = runner.invoke(nf_core.__main__.nf_core_cli, ["--help"]) - assert result.exit_code == 0 - assert "Show the version and exit." in result.output +@mock.patch("nf_core.__main__.nf_core_cli") +@mock.patch("nf_core.utils.check_if_outdated", return_value=(True, None, "dummy_version")) +def test_header_outdated(mock_check_outdated, mock_nf_core_cli, capsys): + """Check cli notifies the user when nf_core is outdated""" + nf_core.__main__.run_nf_core() + captured = capsys.readouterr() + assert "There is a new version of nf-core/tools available! (dummy_version)" in captured.err + + +class TestCli(unittest.TestCase): + """Class for testing the commandline interface""" + + def setUp(self): + self.runner = CliRunner() + + def assemble_params(self, params): + """Assemble a dictionnary of parameters into a list of arguments for the cli + + Note: + if the value of a parameter is None, it will be considered a flag. + Booleans were not used to avoid conflicting with the click.BOOL type. + + Args: + params (dict): dict of parameters to assemble""" + arg_list = [] + for key, value in params.items(): + if value is not None: + arg_list += [f"--{key}", value] + else: + arg_list += [f"--{key}"] + + return arg_list + + def invoke_cli(self, cmd): + """Invoke the commandline interface using a list of parameters + + Args: + cmd (list): commandline to execute + """ + return self.runner.invoke(nf_core.__main__.nf_core_cli, cmd) + + def test_cli_help(self): + """Test the main launch function with --help""" + result = self.invoke_cli(["--help"]) + assert result.exit_code == 0 + assert "Show the version and exit." in result.output + + def test_cli_bad_subcommand(self): + """Test the main launch function with an unrecognised argument""" + result = self.invoke_cli(["foo"]) + assert result.exit_code == 2 + + def test_cli_verbose(self): + """Test the main launch function with verbose flag""" + result = self.invoke_cli(["-v"]) + # Checks that -v was considered valid + assert "No such option: -v" not in nf_core.utils.strip_ansi_codes(result.output) + + @mock.patch("nf_core.list.list_workflows", return_value="pipeline test list") + def test_cli_list(self, mock_list_workflows): + """Test nf-core pipelines are listed and cli parameters are passed on.""" + params = { + "sort": "name", + "json": None, + "show-archived": None, + } + cmd = ["list"] + self.assemble_params(params) + ["kw1", "kw2"] + result = self.invoke_cli(cmd) + + mock_list_workflows.assert_called_once_with( + tuple(cmd[-2:]), params["sort"], "json" in params, "show-archived" in params + ) + assert result.exit_code == 0 + assert "pipeline test list" in result.output + + @mock.patch("nf_core.launch.Launch") + def test_cli_launch(self, mock_launcher): + """Test nf-core pipeline is launched and cli parameters are passed on.""" + mock_launcher.return_value.launch_pipeline.return_value = True + + temp_params_in = tempfile.NamedTemporaryFile() + params = { + "revision": "abcdef", + "id": "idgui", + "command-only": None, + "params-out": "/path/params/out", + "params-in": temp_params_in.name, + "save-all": None, + "show-hidden": None, + "url": "builder_url", + } + cmd = ["launch"] + self.assemble_params(params) + ["pipeline_name"] + result = self.invoke_cli(cmd) + + assert result.exit_code == 0 + + mock_launcher.assert_called_once_with( + cmd[-1], + params["revision"], + "command-only" in params, + params["params-in"], + params["params-out"], + "save-all" in params, + "show-hidden" in params, + params["url"], + params["id"], + ) + + mock_launcher.return_value.launch_pipeline.assert_called_once() + + @mock.patch("nf_core.launch.Launch") + def test_cli_launch_no_params_in(self, mock_launcher): + """Test nf-core pipeline fails when params-in does not exist""" + mock_launcher.return_value.launch_pipeline.return_value = True + + params = { + "params-in": "/fake/path", + } + cmd = ["launch"] + self.assemble_params(params) + ["pipeline_name"] + result = self.invoke_cli(cmd) + + assert result.exit_code == 2 + assert ( + f"Invalid value for '-p' / '--params-in': Path '{params['params-in']}' does not exist." + in nf_core.utils.strip_ansi_codes(result.output) + ) + + mock_launcher.assert_not_called() + + @mock.patch("nf_core.launch.Launch") + def test_cli_launch_fail(self, mock_launcher): + """Test nf-core pipeline fails with exit code 1 when pipeline fails.""" + mock_launcher.return_value.launch_pipeline.return_value = False + cmd = ["launch", "pipeline_name"] + result = self.invoke_cli(cmd) + assert result.exit_code == 1 + + @mock.patch("nf_core.download.DownloadWorkflow") + def test_cli_download(self, mock_dl): + """Test nf-core pipeline is downloaded and cli parameters are passed on.""" + params = { + "revision": "abcdef", + "outdir": "/path/outdir", + "compress": "tar.gz", + "force": None, + "container": "singularity", + "singularity-cache-only": None, + "parallel-downloads": 2, + } + + cmd = ["download"] + self.assemble_params(params) + ["pipeline_name"] + result = self.invoke_cli(cmd) + + assert result.exit_code == 0 + + mock_dl.assert_called_once_with( + cmd[-1], + params["revision"], + params["outdir"], + params["compress"], + "force" in params, + params["container"], + "singularity-cache-only" in params, + params["parallel-downloads"], + ) + + mock_dl.return_value.download_workflow.assert_called_once() + + @mock.patch("nf_core.licences.WorkflowLicences") + def test_licences(self, mock_lic): + """Test nf-core pipeline licence is printed out and cli parameters are passed on.""" + licence_text = "dummy licence text" + mock_lic.return_value.run_licences.return_value = licence_text + + params = { + "json": None, + } + + cmd = ["licences"] + self.assemble_params(params) + ["pipeline_name"] + result = self.invoke_cli(cmd) + + assert result.exit_code == 0 + assert licence_text in result.output + + mock_lic.assert_called_once_with(cmd[-1]) + + @mock.patch("nf_core.licences.WorkflowLicences") + def test_licences_log_error(self, mock_lic): + """Test LookupError is logged""" + error_txt = "LookupError has been raised" + mock_lic.return_value.run_licences.side_effect = LookupError(error_txt) + + cmd = ["licences", "pipeline_name"] + with self.assertLogs() as captured_logs: + result = self.invoke_cli(cmd) + + assert result.exit_code == 1 + assert error_txt in captured_logs.output[-1] + assert captured_logs.records[-1].levelname == "ERROR" + + @mock.patch("nf_core.create.PipelineCreate") + def test_create(self, mock_create): + """Test nf-core pipeline is created and cli parameters are passed on.""" + params = { + "name": "pipeline name", + "description": "pipeline description", + "author": "Kalle Anka", + "version": "1.2.3", + "no-git": None, + "force": None, + "outdir": "/path/outdir", + "template-yaml": "file.yaml", + "plain": None, + } + + cmd = ["create"] + self.assemble_params(params) + result = self.invoke_cli(cmd) + + assert result.exit_code == 0 + mock_create.assert_called_once_with( + params["name"], + params["description"], + params["author"], + params["version"], + "no-git" in params, + "force" in params, + params["outdir"], + params["template-yaml"], + "plain" in params, + ) + mock_create.return_value.init_pipeline.assert_called_once() + + @mock.patch("nf_core.utils.is_pipeline_directory") + @mock.patch("nf_core.lint.run_linting") + def test_lint(self, mock_lint, mock_is_pipeline): + """Test nf-core lint""" + mock_lint_results = (mock.MagicMock, mock.MagicMock) + mock_lint_results[0].failed = [] + mock_lint_results[1].failed = [] + mock_lint.return_value = mock_lint_results + + temp_pipeline_dir = tempfile.NamedTemporaryFile() + params = { + "dir": temp_pipeline_dir.name, + "release": None, + "fix": "fix test", + "key": "key test", + "show-passed": None, + "fail-ignored": None, + "fail-warned": None, + "markdown": "output_file.md", + "json": "output_file.json", + } + + cmd = ["lint"] + self.assemble_params(params) + result = self.invoke_cli(cmd) + + assert result.exit_code == 0 + mock_lint.assert_called_once_with( + params["dir"], + "release" in params, + (params["fix"],), + (params["key"],), + "show-passed" in params, + "fail-ignored" in params, + "fail-warned" in params, + "test", + params["markdown"], + params["json"], + "hide-progress" in params, + ) + + def test_lint_no_dir(self): + """Test nf-core lint fails if --dir does not exist""" + params = { + "dir": "/bad/path", + } + + cmd = ["lint"] + self.assemble_params(params) + result = self.invoke_cli(cmd) + + assert result.exit_code == 2 + assert ( + f"Invalid value for '-d' / '--dir': Path '{params['dir']}' does not exist." + in nf_core.utils.strip_ansi_codes(result.output) + ) + + @mock.patch("nf_core.utils.is_pipeline_directory") + def test_lint_dir_is_not_pipeline(self, mock_is_pipeline): + """Test nf-core lint logs an error if not called from a pipeline directory.""" + error_txt = "UserWarning has been raised" + mock_is_pipeline.side_effect = UserWarning(error_txt) + + cmd = ["lint"] + with self.assertLogs() as captured_logs: + result = self.invoke_cli(cmd) + + assert result.exit_code == 1 + assert error_txt in captured_logs.output[-1] + assert captured_logs.records[-1].levelname == "ERROR" + + @mock.patch("nf_core.utils.is_pipeline_directory") + @mock.patch("nf_core.lint.run_linting") + def test_lint_log_assert_error(self, mock_lint, mock_is_pipeline): + """Test nf-core lint logs assertion errors""" + error_txt = "AssertionError has been raised" + mock_lint.side_effect = AssertionError(error_txt) + + cmd = ["lint"] + with self.assertLogs() as captured_logs: + result = self.invoke_cli(cmd) + + assert result.exit_code == 1 + assert error_txt in captured_logs.output[-1] + assert captured_logs.records[-1].levelname == "CRITICAL" + + @mock.patch("nf_core.utils.is_pipeline_directory") + @mock.patch("nf_core.lint.run_linting") + def test_lint_log_user_warning(self, mock_lint, mock_is_pipeline): + """Test nf-core lint logs assertion errors""" + error_txt = "AssertionError has been raised" + mock_lint.side_effect = UserWarning(error_txt) + cmd = ["lint"] + with self.assertLogs() as captured_logs: + result = self.invoke_cli(cmd) -def test_cli_bad_subcommand(): - """Test the main launch function with verbose flag and an unrecognised argument""" - runner = CliRunner() - result = runner.invoke(nf_core.__main__.nf_core_cli, ["-v", "foo"]) - assert result.exit_code == 2 - # Checks that -v was considered valid - assert "No such command" in result.output + assert result.exit_code == 1 + assert error_txt in captured_logs.output[-1] + assert captured_logs.records[-1].levelname == "ERROR" diff --git a/tests/test_create.py b/tests/test_create.py index 5f8f6546f2..baac509d74 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -1,9 +1,10 @@ -#!/usr/bin/env python """Some tests covering the pipeline creation sub command. """ import os import unittest +import git + import nf_core.create from .utils import with_temporary_folder @@ -16,6 +17,7 @@ def setUp(self, tmp_path): self.pipeline_description = "just for 4w3s0m3 tests" self.pipeline_author = "Chuck Norris" self.pipeline_version = "1.0.0" + self.default_branch = "default" self.pipeline = nf_core.create.PipelineCreate( name=self.pipeline_name, @@ -26,6 +28,7 @@ def setUp(self, tmp_path): force=True, outdir=tmp_path, plain=True, + default_branch=self.default_branch, ) def test_pipeline_creation(self): @@ -37,3 +40,4 @@ def test_pipeline_creation(self): def test_pipeline_creation_initiation(self): self.pipeline.init_pipeline() assert os.path.isdir(os.path.join(self.pipeline.outdir, ".git")) + assert f" {self.default_branch}\n" in git.Repo.init(self.pipeline.outdir).git.branch() diff --git a/tests/test_download.py b/tests/test_download.py index feb486e2c8..e2ae882394 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """Tests for the download subcommand of nf-core tools """ @@ -19,7 +18,6 @@ class DownloadTest(unittest.TestCase): - # # Tests for 'get_release_hash' # diff --git a/tests/test_launch.py b/tests/test_launch.py index a438f98c2f..d830311ba3 100644 --- a/tests/test_launch.py +++ b/tests/test_launch.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Tests covering the pipeline launch code. """ @@ -8,6 +7,8 @@ import unittest from unittest import mock +import pytest + import nf_core.create import nf_core.launch @@ -71,7 +72,7 @@ def test_make_pipeline_schema(self, tmp_path): """Create a workflow, but delete the schema file, then try to load it""" test_pipeline_dir = os.path.join(tmp_path, "wf") create_obj = nf_core.create.PipelineCreate( - "test_pipeline", "", "", outdir=test_pipeline_dir, no_git=True, plain=True + "testpipeline", "", "", outdir=test_pipeline_dir, no_git=True, plain=True ) create_obj.init_pipeline() os.remove(os.path.join(test_pipeline_dir, "nextflow_schema.json")) @@ -135,11 +136,9 @@ def test_launch_web_gui_missing_keys(self, mock_poll_nfcore_web_api): """Check the code that opens the web browser""" self.launcher.get_pipeline_schema() self.launcher.merge_nxf_flag_schema() - try: + with pytest.raises(AssertionError) as exc_info: self.launcher.launch_web_gui() - raise UserWarning("Should have hit an AssertionError") - except AssertionError as e: - assert e.args[0].startswith("Web launch response not recognised:") + assert exc_info.value.args[0].startswith("Web launch response not recognised:") @mock.patch( "nf_core.utils.poll_nfcore_web_api", side_effect=[{"api_url": "foo", "web_url": "bar", "status": "recieved"}] @@ -155,20 +154,16 @@ def test_launch_web_gui(self, mock_poll_nfcore_web_api, mock_webbrowser, mock_wa @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "error", "message": "foo"}]) def test_get_web_launch_response_error(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - status error""" - try: + with pytest.raises(AssertionError) as exc_info: self.launcher.get_web_launch_response() - raise UserWarning("Should have hit an AssertionError") - except AssertionError as e: - assert e.args[0] == "Got error from launch API (foo)" + assert exc_info.value.args[0] == "Got error from launch API (foo)" @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "foo"}]) def test_get_web_launch_response_unexpected(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - status error""" - try: + with pytest.raises(AssertionError) as exc_info: self.launcher.get_web_launch_response() - raise UserWarning("Should have hit an AssertionError") - except AssertionError as e: - assert e.args[0].startswith("Web launch GUI returned unexpected status (foo): ") + assert exc_info.value.args[0].startswith("Web launch GUI returned unexpected status (foo): ") @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "waiting_for_user"}]) def test_get_web_launch_response_waiting(self, mock_poll_nfcore_web_api): @@ -178,11 +173,9 @@ def test_get_web_launch_response_waiting(self, mock_poll_nfcore_web_api): @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "launch_params_complete"}]) def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - complete, but missing keys""" - try: + with pytest.raises(AssertionError) as exc_info: self.launcher.get_web_launch_response() - raise UserWarning("Should have hit an AssertionError") - except AssertionError as e: - assert e.args[0] == "Missing return key from web API: 'nxf_flags'" + assert exc_info.value.args[0] == "Missing return key from web API: 'nxf_flags'" @mock.patch( "nf_core.utils.poll_nfcore_web_api", @@ -334,7 +327,11 @@ def test_build_command_params(self): ) # Check saved parameters file with open(self.nf_params_fn, "r") as fh: - saved_json = json.load(fh) + try: + saved_json = json.load(fh) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{self.nf_params_fn}' due to error {e}") + assert saved_json == {"input": "custom_input"} def test_build_command_params_cl(self): diff --git a/tests/test_licenses.py b/tests/test_licenses.py index a2dde1639b..4fb58a107c 100644 --- a/tests/test_licenses.py +++ b/tests/test_licenses.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """Some tests covering the pipeline creation sub command. """ # import json diff --git a/tests/test_lint.py b/tests/test_lint.py index db9bf0fc36..e4e93bd1f4 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """Some tests covering the linting code. """ import fnmatch @@ -135,7 +134,10 @@ def test_json_output(self, tmp_dir): # Load created JSON file and check its contents with open(json_fn, "r") as fh: - saved_json = json.load(fh) + try: + saved_json = json.load(fh) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{json_fn}' due to error {e}") assert saved_json["num_tests_pass"] > 0 assert saved_json["num_tests_warned"] > 0 assert saved_json["num_tests_ignored"] == 0 @@ -186,7 +188,6 @@ def test_sphinx_md_files(self): test_actions_awstest_pass, ) from .lint.actions_ci import ( - test_actions_ci_fail_wrong_docker_ver, test_actions_ci_fail_wrong_nf, test_actions_ci_fail_wrong_trigger, test_actions_ci_pass, diff --git a/tests/test_lint_utils.py b/tests/test_lint_utils.py new file mode 100644 index 0000000000..2b624a3ecf --- /dev/null +++ b/tests/test_lint_utils.py @@ -0,0 +1,65 @@ +import shutil + +import git +import pytest + +import nf_core.lint_utils + +JSON_WITH_SYNTAX_ERROR = "{'a':1, 1}" +JSON_MALFORMED = "{'a':1}" +JSON_FORMATTED = '{ "a": 1 }\n' + +WHICH_PRE_COMMIT = shutil.which("pre-commit") + + +@pytest.fixture() +def temp_git_repo(tmp_path_factory): + tmp_git_dir = tmp_path_factory.mktemp("tmp_git_dir") + repo = git.Repo.init(tmp_git_dir) + return tmp_git_dir, repo + + +@pytest.fixture(name="formatted_json") +def git_dir_with_json(temp_git_repo): + tmp_git_dir, repo = temp_git_repo + file = tmp_git_dir / "formatted.json" + with open(file, "w", encoding="utf-8") as f: + f.write(JSON_FORMATTED) + repo.git.add(file) + return file + + +@pytest.fixture(name="malformed_json") +def git_dir_with_json_malformed(temp_git_repo): + tmp_git_dir, repo = temp_git_repo + file = tmp_git_dir / "malformed.json" + with open(file, "w", encoding="utf-8") as f: + f.write(JSON_MALFORMED) + repo.git.add(file) + return file + + +@pytest.fixture(name="syntax_error_json") +def git_dir_with_json_syntax_error(temp_git_repo): + tmp_git_dir, repo = temp_git_repo + file = tmp_git_dir / "synthax-error.json" + with open(file, "w", encoding="utf-8") as f: + f.write(JSON_WITH_SYNTAX_ERROR) + repo.git.add(file) + return file + + +def test_run_prettier_on_formatted_file(formatted_json): + nf_core.lint_utils.run_prettier_on_file(formatted_json) + assert formatted_json.read_text() == JSON_FORMATTED + + +def test_run_prettier_on_malformed_file(malformed_json): + nf_core.lint_utils.run_prettier_on_file(malformed_json) + assert malformed_json.read_text() == JSON_FORMATTED + + +def test_run_prettier_on_syntax_error_file(syntax_error_json, caplog): + nf_core.lint_utils.run_prettier_on_file(syntax_error_json) + expected_critical_log = "SyntaxError: Unexpected token (1:10)" + assert expected_critical_log in caplog.text diff --git a/tests/test_list.py b/tests/test_list.py index f71863cbca..70af3fada5 100644 --- a/tests/test_list.py +++ b/tests/test_list.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Tests covering the workflow listing code. """ @@ -22,7 +21,7 @@ tmp_nxf_str = str(tmp_nxf) -class TestLint(unittest.TestCase): +class TestList(unittest.TestCase): """Class for list tests""" @mock.patch("subprocess.check_output") diff --git a/tests/test_modules.py b/tests/test_modules.py index cf8c1c82f6..c50c1f2ba8 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Tests covering the modules commands """ @@ -7,27 +6,40 @@ import tempfile import unittest +import requests_mock + import nf_core.create import nf_core.modules -from .utils import GITLAB_URL, OLD_TRIMGALORE_SHA +from .utils import ( + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_BRANCH_TEST_OLD_SHA, + GITLAB_DEFAULT_BRANCH, + GITLAB_URL, + OLD_TRIMGALORE_BRANCH, + OLD_TRIMGALORE_SHA, + mock_api_calls, +) def create_modules_repo_dummy(tmp_dir): """Create a dummy copy of the nf-core/modules repo""" root_dir = os.path.join(tmp_dir, "modules") - os.makedirs(os.path.join(root_dir, "modules")) - os.makedirs(os.path.join(root_dir, "tests", "modules")) + os.makedirs(os.path.join(root_dir, "modules", "nf-core")) + os.makedirs(os.path.join(root_dir, "tests", "modules", "nf-core")) os.makedirs(os.path.join(root_dir, "tests", "config")) with open(os.path.join(root_dir, "tests", "config", "pytest_modules.yml"), "w") as fh: fh.writelines(["test:", "\n - modules/test/**", "\n - tests/modules/test/**"]) with open(os.path.join(root_dir, ".nf-core.yml"), "w") as fh: - fh.writelines(["repository_type: modules", "\n"]) + fh.writelines(["repository_type: modules", "\n", "org_path: nf-core", "\n"]) - # bpipe is a valid package on bioconda that is very unlikely to ever be added to nf-core/modules - module_create = nf_core.modules.ModuleCreate(root_dir, "bpipe/test", "@author", "process_medium", False, False) - module_create.create() + # mock biocontainers and anaconda response + with requests_mock.Mocker() as mock: + mock_api_calls(mock, "bpipe", "0.9.11--hdfd78af_0") + # bpipe is a valid package on bioconda that is very unlikely to ever be added to nf-core/modules + module_create = nf_core.modules.ModuleCreate(root_dir, "bpipe/test", "@author", "process_single", False, False) + module_create.create() return root_dir @@ -38,6 +50,7 @@ class TestModules(unittest.TestCase): def setUp(self): """Create a new PipelineSchema and Launch objects""" self.tmp_dir = tempfile.mkdtemp() + self.component_type = "modules" # Set up the schema root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) @@ -50,15 +63,43 @@ def setUp(self): self.mods_install = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=False, force=True) self.mods_install_alt = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=True, force=True) self.mods_install_old = nf_core.modules.ModuleInstall( - self.pipeline_dir, prompt=False, force=False, sha=OLD_TRIMGALORE_SHA + self.pipeline_dir, + prompt=False, + force=False, + sha=OLD_TRIMGALORE_SHA, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + ) + self.mods_install_trimgalore = nf_core.modules.ModuleInstall( + self.pipeline_dir, + prompt=False, + force=True, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, ) self.mods_install_gitlab = nf_core.modules.ModuleInstall( - self.pipeline_dir, prompt=False, force=True, remote_url=GITLAB_URL + self.pipeline_dir, + prompt=False, + force=True, + remote_url=GITLAB_URL, + branch=GITLAB_DEFAULT_BRANCH, + ) + self.mods_install_gitlab_old = nf_core.modules.ModuleInstall( + self.pipeline_dir, + prompt=False, + force=True, + remote_url=GITLAB_URL, + branch=GITLAB_BRANCH_TEST_BRANCH, + sha=GITLAB_BRANCH_TEST_OLD_SHA, ) # Set up remove objects self.mods_remove = nf_core.modules.ModuleRemove(self.pipeline_dir) - self.mods_remove_alt = nf_core.modules.ModuleRemove(self.pipeline_dir) + self.mods_remove_gitlab = nf_core.modules.ModuleRemove( + self.pipeline_dir, + remote_url=GITLAB_URL, + branch=GITLAB_DEFAULT_BRANCH, + ) # Set up the nf-core/modules repo dummy self.nfcore_modules = create_modules_repo_dummy(self.tmp_dir) @@ -72,7 +113,7 @@ def tearDown(self): def test_modulesrepo_class(self): """Initialise a modules repo object""" modrepo = nf_core.modules.ModulesRepo() - assert modrepo.fullname == "nf-core/modules" + assert modrepo.repo_path == "nf-core" assert modrepo.branch == "master" ############################################ @@ -98,6 +139,12 @@ def test_modulesrepo_class(self): test_modules_custom_yml_dumper, test_modules_test_file_dict, ) + from .modules.info import ( + test_modules_info_in_modules_repo, + test_modules_info_local, + test_modules_info_remote, + test_modules_info_remote_gitlab, + ) from .modules.install import ( test_modules_install_different_branch_fail, test_modules_install_different_branch_succeed, @@ -105,12 +152,14 @@ def test_modulesrepo_class(self): test_modules_install_from_gitlab, test_modules_install_nomodule, test_modules_install_nopipeline, + test_modules_install_tracking, test_modules_install_trimgalore, test_modules_install_trimgalore_twice, ) from .modules.lint import ( test_modules_lint_empty, test_modules_lint_gitlab_modules, + test_modules_lint_multiple_remotes, test_modules_lint_new_modules, test_modules_lint_no_gitlab, test_modules_lint_patched_modules, @@ -123,17 +172,11 @@ def test_modulesrepo_class(self): test_modules_list_remote, test_modules_list_remote_gitlab, ) - from .modules.module_test import ( - test_modules_test_check_inputs, - test_modules_test_no_installed_modules, - test_modules_test_no_name_no_prompts, - ) from .modules.modules_json import ( test_get_modules_json, test_mod_json_create, test_mod_json_create_with_patch, test_mod_json_dump, - test_mod_json_get_git_url, test_mod_json_get_module_version, test_mod_json_module_present, test_mod_json_repo_present, @@ -144,6 +187,11 @@ def test_modulesrepo_class(self): test_mod_json_with_empty_modules_value, test_mod_json_with_missing_modules_entry, ) + from .modules.modules_test import ( + test_modules_test_check_inputs, + test_modules_test_no_installed_modules, + test_modules_test_no_name_no_prompts, + ) from .modules.patch import ( test_create_patch_change, test_create_patch_no_change, @@ -153,6 +201,7 @@ def test_modulesrepo_class(self): test_create_patch_update_success, ) from .modules.remove import ( + test_modules_remove_multiqc_from_gitlab, test_modules_remove_trimgalore, test_modules_remove_trimgalore_uninstalled, ) @@ -164,6 +213,8 @@ def test_modulesrepo_class(self): test_update_different_branch_mix_modules_branch_test, test_update_different_branch_mixed_modules_main, test_update_different_branch_single_module, + test_update_only_show_differences, + test_update_only_show_differences_when_patch, test_update_with_config_dont_update, test_update_with_config_fix_all, test_update_with_config_fixed_version, diff --git a/tests/test_mullled.py b/tests/test_mulled.py similarity index 100% rename from tests/test_mullled.py rename to tests/test_mulled.py diff --git a/tests/test_refgenie.py b/tests/test_refgenie.py index 9314b44eef..1ff2683416 100644 --- a/tests/test_refgenie.py +++ b/tests/test_refgenie.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Tests covering the refgenie integration code """ diff --git a/tests/test_schema.py b/tests/test_schema.py index 4f829875e7..d3b4fda817 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Tests covering the pipeline schema code. """ @@ -31,7 +30,7 @@ def setUp(self): self.tmp_dir = tempfile.mkdtemp() self.template_dir = os.path.join(self.tmp_dir, "wf") create_obj = nf_core.create.PipelineCreate( - "test_pipeline", "", "", outdir=self.template_dir, no_git=True, plain=True + "testpipeline", "", "", outdir=self.template_dir, no_git=True, plain=True ) create_obj.init_pipeline() @@ -179,11 +178,9 @@ def test_validate_schema_fail_duplicate_ids(self): "definitions": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"foo": {}}}}, "allOf": [{"$ref": "#/definitions/groupOne"}, {"$ref": "#/definitions/groupTwo"}], } - try: + with pytest.raises(AssertionError) as exc_info: self.schema_obj.validate_schema(self.schema_obj.schema) - raise UserWarning("Expected AssertionError") - except AssertionError as e: - assert e.args[0] == "Duplicate parameter found in schema `definitions`: `foo`" + assert exc_info.value.args[0] == "Duplicate parameter found in schema `definitions`: `foo`" def test_validate_schema_fail_missing_def(self): """ @@ -193,11 +190,9 @@ def test_validate_schema_fail_missing_def(self): "definitions": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"bar": {}}}}, "allOf": [{"$ref": "#/definitions/groupOne"}], } - try: + with pytest.raises(AssertionError) as exc_info: self.schema_obj.validate_schema(self.schema_obj.schema) - raise UserWarning("Expected AssertionError") - except AssertionError as e: - assert e.args[0] == "Definition subschema `groupTwo` not included in schema `allOf`" + assert exc_info.value.args[0] == "Definition subschema `groupTwo` not included in schema `allOf`" def test_validate_schema_fail_unexpected_allof(self): """ @@ -211,11 +206,9 @@ def test_validate_schema_fail_unexpected_allof(self): {"$ref": "#/definitions/groupThree"}, ], } - try: + with pytest.raises(AssertionError) as exc_info: self.schema_obj.validate_schema(self.schema_obj.schema) - raise UserWarning("Expected AssertionError") - except AssertionError as e: - assert e.args[0] == "Subschema `groupThree` found in `allOf` but not `definitions`" + assert exc_info.value.args[0] == "Subschema `groupThree` found in `allOf` but not `definitions`" def test_make_skeleton_schema(self): """Test making a new schema skeleton""" @@ -383,20 +376,17 @@ def __init__(self, data, status_code): def test_launch_web_builder_404(self, mock_post): """Mock launching the web builder""" self.schema_obj.web_schema_build_url = "invalid_url" - try: + with pytest.raises(AssertionError) as exc_info: self.schema_obj.launch_web_builder() - raise UserWarning("Should have hit an AssertionError") - except AssertionError as e: - assert e.args[0] == "Could not access remote API results: invalid_url (HTML 404 Error)" + assert exc_info.value.args[0] == "Could not access remote API results: invalid_url (HTML 404 Error)" @mock.patch("requests.post", side_effect=mocked_requests_post) def test_launch_web_builder_invalid_status(self, mock_post): """Mock launching the web builder""" self.schema_obj.web_schema_build_url = "valid_url_error" - try: + with pytest.raises(AssertionError) as exc_info: self.schema_obj.launch_web_builder() - except AssertionError as e: - assert e.args[0].startswith("Pipeline schema builder response not recognised") + assert exc_info.value.args[0].startswith("Pipeline schema builder response not recognised") @mock.patch("requests.post", side_effect=mocked_requests_post) @mock.patch("requests.get") @@ -404,12 +394,10 @@ def test_launch_web_builder_invalid_status(self, mock_post): def test_launch_web_builder_success(self, mock_post, mock_get, mock_webbrowser): """Mock launching the web builder""" self.schema_obj.web_schema_build_url = "valid_url_success" - try: + # Assertion error comes from the get_web_builder_response() function + with pytest.raises(AssertionError) as exc_info: self.schema_obj.launch_web_builder() - raise UserWarning("Should have hit an AssertionError") - except AssertionError as e: - # Assertion error comes from get_web_builder_response() function - assert e.args[0].startswith("Could not access remote API results: https://nf-co.re") + assert exc_info.value.args[0].startswith("Could not access remote API results: https://nf-co.re") def mocked_requests_get(*args, **kwargs): """Helper function to emulate GET requests responses from the web""" @@ -438,21 +426,17 @@ def __init__(self, data, status_code): def test_get_web_builder_response_404(self, mock_post): """Mock launching the web builder""" self.schema_obj.web_schema_build_api_url = "invalid_url" - try: + with pytest.raises(AssertionError) as exc_info: self.schema_obj.get_web_builder_response() - raise UserWarning("Should have hit an AssertionError") - except AssertionError as e: - assert e.args[0] == "Could not access remote API results: invalid_url (HTML 404 Error)" + assert exc_info.value.args[0] == "Could not access remote API results: invalid_url (HTML 404 Error)" @mock.patch("requests.get", side_effect=mocked_requests_get) def test_get_web_builder_response_error(self, mock_post): """Mock launching the web builder""" self.schema_obj.web_schema_build_api_url = "valid_url_error" - try: + with pytest.raises(AssertionError) as exc_info: self.schema_obj.get_web_builder_response() - raise UserWarning("Should have hit an AssertionError") - except AssertionError as e: - assert e.args[0] == "Got error from schema builder: 'testing URL failure'" + assert exc_info.value.args[0] == "Got error from schema builder: 'testing URL failure'" @mock.patch("requests.get", side_effect=mocked_requests_get) def test_get_web_builder_response_waiting(self, mock_post): @@ -464,10 +448,7 @@ def test_get_web_builder_response_waiting(self, mock_post): def test_get_web_builder_response_saved(self, mock_post): """Mock launching the web builder""" self.schema_obj.web_schema_build_api_url = "valid_url_saved" - try: + with pytest.raises(AssertionError) as exc_info: self.schema_obj.get_web_builder_response() - raise UserWarning("Should have hit an AssertionError") - except AssertionError as e: - # Check that this is the expected AssertionError, as there are several - assert e.args[0].startswith("Response from schema builder did not pass validation") + assert exc_info.value.args[0].startswith("Response from schema builder did not pass validation") assert self.schema_obj.schema == {"foo": "bar"} diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py new file mode 100644 index 0000000000..552a2ab176 --- /dev/null +++ b/tests/test_subworkflows.py @@ -0,0 +1,143 @@ +""" Tests covering the subworkflows commands +""" + +import os +import shutil +import tempfile +import unittest + +import requests_mock + +import nf_core.create +import nf_core.modules +import nf_core.subworkflows + +from .utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL, OLD_SUBWORKFLOWS_SHA + + +def create_modules_repo_dummy(tmp_dir): + """Create a dummy copy of the nf-core/modules repo""" + + root_dir = os.path.join(tmp_dir, "modules") + os.makedirs(os.path.join(root_dir, "modules")) + os.makedirs(os.path.join(root_dir, "subworkflows")) + os.makedirs(os.path.join(root_dir, "subworkflows", "nf-core")) + os.makedirs(os.path.join(root_dir, "tests", "modules")) + os.makedirs(os.path.join(root_dir, "tests", "subworkflows")) + os.makedirs(os.path.join(root_dir, "tests", "config")) + with open(os.path.join(root_dir, "tests", "config", "pytest_modules.yml"), "w") as fh: + fh.writelines(["test:", "\n - modules/test/**", "\n - tests/modules/test/**"]) + with open(os.path.join(root_dir, ".nf-core.yml"), "w") as fh: + fh.writelines(["repository_type: modules", "\n", "org_path: nf-core", "\n"]) + + with requests_mock.Mocker() as mock: + subworkflow_create = nf_core.subworkflows.SubworkflowCreate(root_dir, "test_subworkflow", "@author", True) + subworkflow_create.create() + + return root_dir + + +class TestSubworkflows(unittest.TestCase): + """Class for subworkflows tests""" + + def setUp(self): + """Create a new PipelineStructure and Launch objects""" + self.tmp_dir = tempfile.mkdtemp() + self.component_type = "subworkflows" + + # Set up the pipeline structure + root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") + self.pipeline_dir = os.path.join(self.tmp_dir, "mypipeline") + nf_core.create.PipelineCreate( + "mypipeline", "it is mine", "me", no_git=True, outdir=self.pipeline_dir, plain=True + ).init_pipeline() + + # Set up the nf-core/modules repo dummy + self.nfcore_modules = create_modules_repo_dummy(self.tmp_dir) + + # Set up install objects + self.subworkflow_install = nf_core.subworkflows.SubworkflowInstall(self.pipeline_dir, prompt=False, force=False) + self.subworkflow_install_gitlab = nf_core.subworkflows.SubworkflowInstall( + self.pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + self.subworkflow_install_old = nf_core.subworkflows.SubworkflowInstall( + self.pipeline_dir, + prompt=False, + force=False, + sha=OLD_SUBWORKFLOWS_SHA, + ) + self.subworkflow_install_module_change = nf_core.subworkflows.SubworkflowInstall( + self.pipeline_dir, + prompt=False, + force=False, + sha="8c343b3c8a0925949783dc547666007c245c235b", + ) + self.mods_install = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=False, force=True) + + # Set up remove objects + self.subworkflow_remove = nf_core.subworkflows.SubworkflowRemove(self.pipeline_dir) + + def tearDown(self): + """Clean up temporary files and folders""" + + if os.path.exists(self.tmp_dir): + shutil.rmtree(self.tmp_dir) + + ################################################ + # Test of the individual subworkflow commands. # + ################################################ + + from .subworkflows.create import ( + test_subworkflows_create_fail_exists, + test_subworkflows_create_nfcore_modules, + test_subworkflows_create_succeed, + ) + from .subworkflows.info import ( + test_subworkflows_info_in_modules_repo, + test_subworkflows_info_local, + test_subworkflows_info_remote, + test_subworkflows_info_remote_gitlab, + ) + from .subworkflows.install import ( + test_subworkflow_install_nopipeline, + test_subworkflows_install_bam_sort_stats_samtools, + test_subworkflows_install_bam_sort_stats_samtools_twice, + test_subworkflows_install_different_branch_fail, + test_subworkflows_install_emptypipeline, + test_subworkflows_install_from_gitlab, + test_subworkflows_install_nosubworkflow, + test_subworkflows_install_tracking, + test_subworkflows_install_tracking_added_already_installed, + test_subworkflows_install_tracking_added_super_subworkflow, + ) + from .subworkflows.list import ( + test_subworkflows_install_and_list_subworkflows, + test_subworkflows_install_gitlab_and_list_subworkflows, + test_subworkflows_list_remote, + test_subworkflows_list_remote_gitlab, + ) + from .subworkflows.remove import ( + test_subworkflows_remove_included_subworkflow, + test_subworkflows_remove_one_of_two_subworkflow, + test_subworkflows_remove_subworkflow, + test_subworkflows_remove_subworkflow_keep_installed_module, + ) + from .subworkflows.subworkflows_test import ( + test_subworkflows_test_check_inputs, + test_subworkflows_test_no_installed_subworkflows, + test_subworkflows_test_no_name_no_prompts, + ) + from .subworkflows.update import ( + test_install_and_update, + test_install_at_hash_and_update, + test_install_at_hash_and_update_and_save_diff_to_file, + test_update_all, + test_update_all_linked_components_from_subworkflow, + test_update_all_subworkflows_from_module, + test_update_change_of_included_modules, + test_update_with_config_dont_update, + test_update_with_config_fix_all, + test_update_with_config_fixed_version, + test_update_with_config_no_updates, + ) diff --git a/tests/test_sync.py b/tests/test_sync.py index 2779f9e356..597e4375d3 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Tests covering the sync command """ @@ -7,8 +6,12 @@ import shutil import tempfile import unittest +from pathlib import Path from unittest import mock +import git +import pytest + import nf_core.create import nf_core.sync @@ -21,11 +24,17 @@ class TestModules(unittest.TestCase): def setUp(self): """Create a new pipeline to test""" self.tmp_dir = tempfile.mkdtemp() - self.pipeline_dir = os.path.join(self.tmp_dir, "test_pipeline") + self.pipeline_dir = os.path.join(self.tmp_dir, "testpipeline") + default_branch = "master" self.create_obj = nf_core.create.PipelineCreate( - "testing", "test pipeline", "tester", outdir=self.pipeline_dir, plain=True + "testing", "test pipeline", "tester", outdir=self.pipeline_dir, plain=True, default_branch=default_branch ) self.create_obj.init_pipeline() + self.remote_path = os.path.join(self.tmp_dir, "remote_repo") + self.remote_repo = git.Repo.init(self.remote_path, bare=True) + + if self.remote_repo.active_branch.name != "master": + self.remote_repo.active_branch.rename(default_branch) def tearDown(self): if os.path.exists(self.tmp_dir): @@ -35,54 +44,45 @@ def tearDown(self): def test_inspect_sync_dir_notgit(self, tmp_dir): """Try syncing an empty directory""" psync = nf_core.sync.PipelineSync(tmp_dir) - try: + with pytest.raises(nf_core.sync.SyncException) as exc_info: psync.inspect_sync_dir() - raise UserWarning("Should have hit an exception") - except nf_core.sync.SyncException as e: - assert "does not appear to be a git repository" in e.args[0] + assert "does not appear to be a git repository" in exc_info.value.args[0] def test_inspect_sync_dir_dirty(self): """Try syncing a pipeline with uncommitted changes""" # Add an empty file, uncommitted - test_fn = os.path.join(self.pipeline_dir, "uncommitted") - open(test_fn, "a").close() + test_fn = Path(self.pipeline_dir) / "uncommitted" + test_fn.touch() # Try to sync, check we halt with the right error psync = nf_core.sync.PipelineSync(self.pipeline_dir) try: - psync.inspect_sync_dir() - raise UserWarning("Should have hit an exception") - except nf_core.sync.SyncException as e: - os.remove(test_fn) - assert e.args[0].startswith("Uncommitted changes found in pipeline directory!") - except Exception as e: + with pytest.raises(nf_core.sync.SyncException) as exc_info: + psync.inspect_sync_dir() + assert exc_info.value.args[0].startswith("Uncommitted changes found in pipeline directory!") + finally: os.remove(test_fn) - raise e def test_get_wf_config_no_branch(self): """Try getting a workflow config when the branch doesn't exist""" # Try to sync, check we halt with the right error psync = nf_core.sync.PipelineSync(self.pipeline_dir, from_branch="foo") - try: + with pytest.raises(nf_core.sync.SyncException) as exc_info: psync.inspect_sync_dir() psync.get_wf_config() - raise UserWarning("Should have hit an exception") - except nf_core.sync.SyncException as e: - assert e.args[0] == "Branch `foo` not found!" + assert exc_info.value.args[0] == "Branch `foo` not found!" def test_get_wf_config_missing_required_config(self): """Try getting a workflow config, then make it miss a required config option""" # Try to sync, check we halt with the right error psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.required_config_vars = ["fakethisdoesnotexist"] - try: + with pytest.raises(nf_core.sync.SyncException) as exc_info: psync.inspect_sync_dir() psync.get_wf_config() - raise UserWarning("Should have hit an exception") - except nf_core.sync.SyncException as e: - # Check that we did actually get some config back - assert psync.wf_config["params.validate_params"] == "true" - # Check that we raised because of the missing fake config var - assert e.args[0] == "Workflow config variable `fakethisdoesnotexist` not found!" + # Check that we did actually get some config back + assert psync.wf_config["params.validate_params"] == "true" + # Check that we raised because of the missing fake config var + assert exc_info.value.args[0] == "Workflow config variable `fakethisdoesnotexist` not found!" def test_checkout_template_branch(self): """Try checking out the TEMPLATE branch of the pipeline""" @@ -91,6 +91,18 @@ def test_checkout_template_branch(self): psync.get_wf_config() psync.checkout_template_branch() + def test_checkout_template_branch_no_template(self): + """Try checking out the TEMPLATE branch of the pipeline when it does not exist""" + psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync.inspect_sync_dir() + psync.get_wf_config() + + psync.repo.delete_head("TEMPLATE") + + with pytest.raises(nf_core.sync.SyncException) as exc_info: + psync.checkout_template_branch() + assert exc_info.value.args[0] == "Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'" + def test_delete_template_branch_files(self): """Confirm that we can delete all files in the TEMPLATE branch""" psync = nf_core.sync.PipelineSync(self.pipeline_dir) @@ -132,8 +144,8 @@ def test_commit_template_changes_changes(self): psync.get_wf_config() psync.checkout_template_branch() # Add an empty file, uncommitted - test_fn = os.path.join(self.pipeline_dir, "uncommitted") - open(test_fn, "a").close() + test_fn = Path(self.pipeline_dir) / "uncommitted" + test_fn.touch() # Check that we have uncommitted changes assert psync.repo.is_dirty(untracked_files=True) is True # Function returns True if no changes were made @@ -149,15 +161,68 @@ def test_push_template_branch_error(self): psync.get_wf_config() psync.checkout_template_branch() # Add an empty file and commit it - test_fn = os.path.join(self.pipeline_dir, "uncommitted") - open(test_fn, "a").close() + test_fn = Path(self.pipeline_dir) / "uncommitted" + test_fn.touch() psync.commit_template_changes() # Try to push changes - try: + with pytest.raises(nf_core.sync.PullRequestException) as exc_info: psync.push_template_branch() - raise UserWarning("Should have hit an exception") - except nf_core.sync.PullRequestException as e: - assert e.args[0].startswith("Could not push TEMPLATE branch") + assert exc_info.value.args[0].startswith("Could not push TEMPLATE branch") + + def test_create_merge_base_branch(self): + """Try creating a merge base branch""" + psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync.inspect_sync_dir() + psync.get_wf_config() + + psync.create_merge_base_branch() + + assert psync.merge_branch in psync.repo.branches + + def test_create_merge_base_branch_thrice(self): + """Try creating a merge base branch thrice + + This is needed because the first time this function is called, the + merge branch does not exist yet (it is only created at the end of the + create_merge_base_branch function) and the if-statement is ignored. + Also, the second time this function is called, the existing merge + branch only has the base format, i.e. without the -{branch_no} at the + end, so it is needed to call it a third time to make sure this is + picked up. + """ + psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync.inspect_sync_dir() + psync.get_wf_config() + + for _ in range(3): + psync.create_merge_base_branch() + + assert psync.merge_branch in psync.repo.branches + for branch_no in [2, 3]: + assert f"{psync.original_merge_branch}-{branch_no}" in psync.repo.branches + + def test_push_merge_branch(self): + """Try pushing merge branch""" + psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync.inspect_sync_dir() + psync.get_wf_config() + psync.repo.create_remote("origin", self.remote_path) + + psync.create_merge_base_branch() + psync.push_merge_branch() + + assert psync.merge_branch in [b.name for b in self.remote_repo.branches] + + def test_push_merge_branch_without_create_branch(self): + """Try pushing merge branch without creating first""" + psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync.inspect_sync_dir() + psync.get_wf_config() + psync.repo.create_remote("origin", self.remote_path) + + with pytest.raises(nf_core.sync.PullRequestException) as exc_info: + psync.push_merge_branch() + assert exc_info.value.args[0].startswith(f"Could not push branch '{psync.merge_branch}'") def mocked_requests_get(url, **kwargs): """Helper function to emulate POST requests responses from the web""" @@ -175,10 +240,28 @@ def __init__(self, data, status_code): def json(self): return self.data - url_template = "https://api.github.com/repos/{}/response/pulls?head=TEMPLATE&base=None" - if url == url_template.format("no_existing_pr"): + url_template = "https://api.github.com/repos/{}/response/" + if url == os.path.join(url_template.format("no_existing_pr"), "pulls?head=TEMPLATE&base=None"): response_data = [] return MockResponse(response_data, 200) + if url == os.path.join(url_template.format("list_prs"), "pulls"): + response_data = [ + { + "state": "closed", + "head": {"ref": "nf-core-template-merge-2"}, + "base": {"ref": "master"}, + "html_url": "pr_url", + } + ] + [ + { + "state": "open", + "head": {"ref": f"nf-core-template-merge-{branch_no}"}, + "base": {"ref": "master"}, + "html_url": "pr_url", + } + for branch_no in range(3, 7) + ] + return MockResponse(response_data, 200) return MockResponse({"html_url": url}, 404) @@ -246,8 +329,97 @@ def test_make_pull_request_bad_response(self, mock_post, mock_get): psync.gh_username = "bad_url" psync.gh_repo = "bad_url/response" os.environ["GITHUB_AUTH_TOKEN"] = "test" - try: + with pytest.raises(nf_core.sync.PullRequestException) as exc_info: psync.make_pull_request() - raise UserWarning("Should have hit an exception") - except nf_core.sync.PullRequestException as e: - assert e.args[0].startswith("Something went badly wrong - GitHub API PR failed - got return code 404") + assert exc_info.value.args[0].startswith( + "Something went badly wrong - GitHub API PR failed - got return code 404" + ) + + @mock.patch("nf_core.utils.gh_api.get", side_effect=mocked_requests_get) + def test_close_open_template_merge_prs(self, mock_get): + """Try closing all open prs""" + psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync.inspect_sync_dir() + psync.get_wf_config() + psync.gh_api.get = mock_get + psync.gh_username = "list_prs" + psync.gh_repo = "list_prs/response" + os.environ["GITHUB_AUTH_TOKEN"] = "test" + + with mock.patch("nf_core.sync.PipelineSync.close_open_pr") as mock_close_open_pr: + psync.close_open_template_merge_prs() + + prs = mock_get(f"https://api.github.com/repos/{psync.gh_repo}/pulls").data + for pr in prs: + if pr["state"] == "open": + mock_close_open_pr.assert_any_call(pr) + + @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) + @mock.patch("nf_core.utils.gh_api.patch", side_effect=mocked_requests_patch) + def test_close_open_pr(self, mock_patch, mock_post): + psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync.inspect_sync_dir() + psync.get_wf_config() + psync.gh_api.post = mock_post + psync.gh_api.patch = mock_patch + psync.gh_username = "bad_url" + psync.gh_repo = "bad_url/response" + os.environ["GITHUB_AUTH_TOKEN"] = "test" + pr = { + "state": "open", + "head": {"ref": "nf-core-template-merge-3"}, + "base": {"ref": "master"}, + "html_url": "pr_html_url", + "url": "url_to_update_pr", + "comments_url": "pr_comments_url", + } + + assert psync.close_open_pr(pr) + assert mock_patch.called_once_with("url_to_update_pr") + + @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) + @mock.patch("nf_core.utils.gh_api.patch", side_effect=mocked_requests_patch) + def test_close_open_pr_fail(self, mock_patch, mock_post): + psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync.inspect_sync_dir() + psync.get_wf_config() + psync.gh_api.post = mock_post + psync.gh_api.patch = mock_patch + psync.gh_username = "bad_url" + psync.gh_repo = "bad_url/response" + os.environ["GITHUB_AUTH_TOKEN"] = "test" + pr = { + "state": "open", + "head": {"ref": "nf-core-template-merge-3"}, + "base": {"ref": "master"}, + "html_url": "pr_html_url", + "url": "bad_url_to_update_pr", + "comments_url": "pr_comments_url", + } + + assert not psync.close_open_pr(pr) + assert mock_patch.called_once_with("bad_url_to_update_pr") + + def test_reset_target_dir(self): + """Try resetting target pipeline directory""" + psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync.inspect_sync_dir() + psync.get_wf_config() + + psync.repo.git.checkout("dev") + + psync.reset_target_dir() + + assert psync.repo.heads[0].name == "TEMPLATE" + + def test_reset_target_dir_fake_branch(self): + """Try resetting target pipeline directory but original branch does not exist""" + psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync.inspect_sync_dir() + psync.get_wf_config() + + psync.original_branch = "fake_branch" + + with pytest.raises(nf_core.sync.SyncException) as exc_info: + psync.reset_target_dir() + assert exc_info.value.args[0].startswith("Could not reset to original branch `fake_branch`") diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py new file mode 100644 index 0000000000..c4e3d49ae0 --- /dev/null +++ b/tests/test_test_utils.py @@ -0,0 +1,49 @@ +import tempfile +from pathlib import Path + +import pytest + +from .utils import set_wd, with_temporary_file, with_temporary_folder + + +def test_with_temporary_file(): + @with_temporary_file + def tmp_file_exists(tmp_file): + assert Path(tmp_file.name).exists() + + tmp_file_exists() + + +def test_does_not_exist_after(): + tmp_file = with_temporary_file(lambda x: x.name)() + assert not Path(tmp_file).exists() + + +def test_with_temporary_folder(): + @with_temporary_folder + def tmp_folder_exists(tmp_folder): + assert Path(tmp_folder).exists() + + tmp_folder_exists() + + +def test_tmp_folder_does_not_exist_after(): + tmp_folder = with_temporary_folder(lambda x: x)() + assert not Path(tmp_folder).exists() + + +def test_set_wd(): + with tempfile.TemporaryDirectory() as tmpdirname: + with set_wd(tmpdirname): + context_wd = Path().resolve() + assert context_wd == Path(tmpdirname).resolve() + assert context_wd != Path().resolve() + + +def test_set_wd_revert_on_raise(): + wd_before_context = Path().resolve() + with tempfile.TemporaryDirectory() as tmpdirname: + with pytest.raises(Exception): + with set_wd(tmpdirname): + raise Exception + assert wd_before_context == Path().resolve() diff --git a/tests/test_utils.py b/tests/test_utils.py index f914d675a5..2ab5b64bfc 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Tests covering for utility functions. """ @@ -120,7 +119,7 @@ def test_list_files_no_git(self, tmpdir): """Test listing pipeline files without `git-ls`""" # Create a test file in a temporary directory tmp_fn = os.path.join(tmpdir, "testfile") - open(tmp_fn, "a").close() + Path(tmp_fn).touch() pipeline_obj = nf_core.utils.Pipeline(tmpdir) pipeline_obj._list_files() assert tmp_fn in pipeline_obj.files diff --git a/tests/utils.py b/tests/utils.py index 03bfe272a0..77e94be464 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,25 +1,29 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- """ Helper functions for tests """ import functools +import os import tempfile +from contextlib import contextmanager +from pathlib import Path -OLD_TRIMGALORE_SHA = "20d8250d9f39ddb05dfb437603aaf99b5c0b2b41" +OLD_TRIMGALORE_SHA = "06348dffce2a732fc9e656bdc5c64c3e02d302cb" +OLD_TRIMGALORE_BRANCH = "mimic-old-trimgalore" GITLAB_URL = "https://gitlab.com/nf-core/modules-test.git" -GITLAB_REPO = "nf-core/modules-test" +GITLAB_REPO = "nf-core" GITLAB_DEFAULT_BRANCH = "main" +GITLAB_SUBWORKFLOWS_BRANCH = "subworkflows" +OLD_SUBWORKFLOWS_SHA = "f3c078809a2513f1c95de14f6633fe1f03572fdb" # Branch test stuff GITLAB_BRANCH_TEST_BRANCH = "branch-tester" -GITLAB_BRANCH_TEST_OLD_SHA = "eb4bc244de7eaef8e8ff0d451e4ca2e4b2c29821" -GITLAB_BRANCH_TEST_NEW_SHA = "e43448a2cc17d59e085c4d3f77489af5a4dcc26d" +GITLAB_BRANCH_TEST_OLD_SHA = "bce3f17980b8d1beae5e917cfd3c65c0c69e04b5" +GITLAB_BRANCH_TEST_NEW_SHA = "2f5f180f6e705bb81d6e7742dc2f24bf4a0c721e" def with_temporary_folder(func): """ - Call the decorated funtion under the tempfile.TemporaryDirectory + Call the decorated function under the tempfile.TemporaryDirectory context manager. Pass the temporary directory name to the decorated function """ @@ -34,7 +38,7 @@ def wrapper(*args, **kwargs): def with_temporary_file(func): """ - Call the decorated funtion under the tempfile.NamedTemporaryFile + Call the decorated function under the tempfile.NamedTemporaryFile context manager. Pass the opened file handle to the decorated function """ @@ -44,3 +48,55 @@ def wrapper(*args, **kwargs): return func(*args, tmpfile, **kwargs) return wrapper + + +@contextmanager +def set_wd(path: Path): + """Sets the working directory for this context. + + Arguments + --------- + + path : Path + Path to the working directory to be used iside this context. + """ + start_wd = Path().absolute() + os.chdir(path) + try: + yield + finally: + os.chdir(start_wd) + + +def mock_api_calls(mock, module, version): + """Mock biocontainers and anaconda api calls for module""" + biocontainers_api_url = ( + f"https://api.biocontainers.pro/ga4gh/trs/v2/tools/{module}/versions/{module}-{version.split('--')[0]}" + ) + anaconda_api_url = f"https://api.anaconda.org/package/bioconda/{module}" + anaconda_mock = { + "status_code": 200, + "latest_version": version.split("--")[0], + "summary": "", + "doc_url": "", + "dev_url": "", + "files": [{"version": version.split("--")[0]}], + "license": "", + } + biocontainers_mock = { + "status_code": 200, + "images": [ + { + "image_type": "Singularity", + "image_name": f"https://depot.galaxyproject.org/singularity/{module}:{version}", + "updated": "2021-09-04T00:00:00Z", + }, + { + "image_type": "Docker", + "image_name": f"quay.io/biocontainers/{module}:{version}", + "updated": "2021-09-04T00:00:00Z", + }, + ], + } + mock.register_uri("GET", anaconda_api_url, json=anaconda_mock) + mock.register_uri("GET", biocontainers_api_url, json=biocontainers_mock)