diff --git a/.editorconfig b/.editorconfig index 014c2383bd..449f446a3b 100644 --- a/.editorconfig +++ b/.editorconfig @@ -10,3 +10,13 @@ indent_style = space [*.{md,yml,yaml,html,css,scss,js,cff}] indent_size = 2 + +# ignore python and markdown files +[*.py] +indent_style = unset + +[**/{CONTRIBUTING,README}.md] +indent_style = unset + +[**/Makefile] +indent_style = unset diff --git a/.github/.coveragerc b/.github/.coveragerc index 522a29eb62..24a419ae07 100644 --- a/.github/.coveragerc +++ b/.github/.coveragerc @@ -1,2 +1,5 @@ [run] -omit = nf_core/pipeline-template/* +omit = nf_core/*-template/* +source = nf_core +relative_files = True + diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 75da414db6..04d327bd8c 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -35,45 +35,28 @@ pip install -e . ## Code formatting -### Black +### Ruff -All Python code in nf-core/tools must be passed through the [Black Python code formatter](https://black.readthedocs.io/en/stable/). +All Python code in nf-core/tools must be passed through the [Ruff code linter and formatter](https://github.com/astral-sh/ruff). This ensures a harmonised code formatting style throughout the package, from all contributors. -You can run Black on the command line (it's included in `requirements-dev.txt`) - eg. to run recursively on the whole repository: +You can run Ruff on the command line (it's included in `requirements-dev.txt`) - eg. to run recursively on the whole repository: ```bash -black . +ruff format . ``` -Alternatively, Black has [integrations for most common editors](https://black.readthedocs.io/en/stable/editor_integration.html) +Alternatively, Ruff has [integrations for most common editors](https://github.com/astral-sh/ruff-lsp) and VSCode(https://github.com/astral-sh/ruff-vscode) to automatically format code when you hit save. -You can also set it up to run when you [make a commit](https://black.readthedocs.io/en/stable/version_control_integration.html). There is an automated CI check that runs when you open a pull-request to nf-core/tools that will fail if -any code does not adhere to Black formatting. +any code does not adhere to Ruff formatting. -### isort - -All Python code must also be passed through [isort](https://pycqa.github.io/isort/index.html). -This ensures a harmonised imports throughout the package, from all contributors. - -To run isort on the command line recursively on the whole repository you can use: - -```bash -isort . -``` - -isort also has [plugins for most common editors](https://github.com/pycqa/isort/wiki/isort-Plugins) -to automatically format code when you hit save. -Or [version control integration](https://pycqa.github.io/isort/docs/configuration/pre-commit.html) to set it up to run when you make a commit. - -There is an automated CI check that runs when you open a pull-request to nf-core/tools that will fail if -any code does not adhere to isort formatting. +Ruff has been adopted for linting and formatting in replacement of Black, isort (for imports) and pyupgrade. It also includes Flake8. ### pre-commit hooks -This repository comes with [pre-commit](https://pre-commit.com/) hooks for black, isort and Prettier. pre-commit automatically runs checks before a commit is committed into the git history. If all checks pass, the commit is made, if files are changed by the pre-commit hooks, the user is informed and has to stage the changes and attempt the commit again. +This repository comes with [pre-commit](https://pre-commit.com/) hooks for ruff and Prettier. pre-commit automatically runs checks before a commit is committed into the git history. If all checks pass, the commit is made, if files are changed by the pre-commit hooks, the user is informed and has to stage the changes and attempt the commit again. You can use the pre-commit hooks if you like, but you don't have to. The CI on Github will run the same checks as the tools installed with pre-commit. If the pre-commit checks pass, then the same checks in the CI will pass, too. diff --git a/.github/workflows/branch.yml b/.github/workflows/branch.yml index dd64ffa3e5..54dee6df16 100644 --- a/.github/workflows/branch.yml +++ b/.github/workflows/branch.yml @@ -18,7 +18,7 @@ jobs: # If the above check failed, post a comment on the PR explaining the failure - name: Post PR comment if: failure() - uses: mshick/add-pr-comment@v1 + uses: mshick/add-pr-comment@v2 with: message: | ## This PR is against the `master` branch :x: diff --git a/.github/workflows/changelog.py b/.github/workflows/changelog.py new file mode 100644 index 0000000000..2ce14e60b3 --- /dev/null +++ b/.github/workflows/changelog.py @@ -0,0 +1,226 @@ +""" +Taken from https://github.com/MultiQC/MultiQC/blob/main/.github/workflows/changelog.py and updated for nf-core + +To be called by a CI action. Assumes the following environment variables are set: +PR_TITLE, PR_NUMBER, GITHUB_WORKSPACE. + +Adds a line into the CHANGELOG.md: +* Looks for the section to add the line to, based on the PR title, e.g. `Template:`, `Modules:`. +* All other change will go under the "### General" section. +* If an entry for the PR is already added, it will not run. + +Other assumptions: +- CHANGELOG.md has a running section for an ongoing "dev" version +(i.e. titled "## nf-core vX.Ydev"). +""" + +import os +import re +import subprocess +import sys +from pathlib import Path +from typing import List + +REPO_URL = "https://github.com/nf-core/tools" + +# Assumes the environment is set by the GitHub action. +pr_title = os.environ["PR_TITLE"] +pr_number = os.environ["PR_NUMBER"] +comment = os.environ.get("COMMENT", "") +workspace_path = Path(os.environ.get("GITHUB_WORKSPACE", "")) + +assert pr_title, pr_title +assert pr_number, pr_number + +# Trim the PR number added when GitHub squashes commits, e.g. "Template: Updated (#2026)" +pr_title = pr_title.removesuffix(f" (#{pr_number})") + +changelog_path = workspace_path / "CHANGELOG.md" + +if any( + line in pr_title.lower() + for line in [ + "skip changelog", + "skip change log", + "no changelog", + "no change log", + "bump version", + ] +): + print("Skipping changelog update") + sys.exit(0) + + +def _run_cmd(cmd): + print(cmd) + result = subprocess.run(cmd, shell=True, capture_output=True, text=True) + if result.returncode != 0: + raise RuntimeError(f"Error executing command: {result.stderr}") + return result + + +def _determine_change_type(pr_title) -> str: + """ + Determine the type of the PR: Template, Download, Linting, Modules, Subworkflows, or General + Returns a tuple of the section name and the module info. + """ + sections = { + "Template": "### Template updates", + "Download": "### Download updates", + "Linting": "### Linting updates", + "Modules": "### Modules", + "Subworkflows": "### Subworkflows", + } + current_section = "### General" + + # Check if the PR in any of the sections. + for section, section_header in sections.items(): + # check if the PR title contains any of the section headers, with some loose matching, e.g. removing plural and suffixes + if re.sub(r"s$", "", section.lower().replace("ing", "")) in pr_title.lower(): + current_section = section_header + + return current_section + + +# Determine the type of the PR: new module, module update, or core update. +section = _determine_change_type(pr_title) + +# Remove section indicator from the PR title. +pr_title = re.sub(rf"{section}[:\s]*", "", pr_title, flags=re.IGNORECASE) + +# Prepare the change log entry. +pr_link = f"([#{pr_number}]({REPO_URL}/pull/{pr_number}))" + +# Handle manual changelog entries through comments. +if comment := comment.removeprefix("@nf-core-bot changelog").strip(): + pr_title = comment +new_lines = [ + f"- {pr_title} {pr_link}\n", +] + +# Finally, updating the changelog. +# Read the current changelog lines. We will print them back as is, except for one new +# entry, corresponding to this new PR. +with changelog_path.open("r") as f: + orig_lines = f.readlines() +updated_lines: List[str] = [] + + +def _skip_existing_entry_for_this_pr(line: str, same_section: bool = True) -> str: + if line.strip().endswith(pr_link): + existing_lines = [line] + if new_lines and new_lines == existing_lines and same_section: + print(f"Found existing identical entry for this pull request #{pr_number} in the same section:") + print("".join(existing_lines)) + sys.exit(0) # Just leaving the CHANGELOG intact + else: + print( + f"Found existing entry for this pull request #{pr_number}. It will be replaced and/or moved to proper section" + ) + print("".join(existing_lines)) + for _ in range(len(existing_lines)): + try: + line = orig_lines.pop(0) + except IndexError: + break + return line + + +# Find the next line in the change log that matches the pattern "## MultiQC v.*dev" +# If it doesn't exist, exist with code 1 (let's assume that a new section is added +# manually or by CI when a release is pushed). +# Else, find the next line that matches the `section` variable, and insert a new line +# under it (we also assume that section headers are added already). +inside_version_dev = False +already_added_entry = False +while orig_lines: + line = orig_lines.pop(0) + + # If the line already contains a link to the PR, don't add it again. + line = _skip_existing_entry_for_this_pr(line, same_section=False) + + if line.startswith("# ") and not line.strip() == "# nf-core/tools: Changelog": # Version header, e.g. "# v2.12dev" + updated_lines.append(line) + + # Parse version from the line `# v2.12dev` or + # `# [v2.11.1 - Magnesium Dragon Patch](https://github.com/nf-core/tools/releases/tag/2.11) - [2023-12-20]` ... + if not (m := re.match(r".*(v\d+\.\d+(dev)?).*", line)): + print(f"Cannot parse version from line {line.strip()}.", file=sys.stderr) + sys.exit(1) + version = m.group(1) + + if not inside_version_dev: + if not version.endswith("dev"): + print( + "Can't find a 'dev' version section in the changelog. Make sure " + "it's created, and all the required sections, e.g. `### Template` are created under it .", + file=sys.stderr, + ) + sys.exit(1) + inside_version_dev = True + else: + if version.endswith("dev"): + print( + f"Found another 'dev' version section in the changelog, make" + f"sure to change it to a 'release' stable version tag. " + f"Line: {line.strip()}", + file=sys.stderr, + ) + sys.exit(1) + # We are past the dev version, so just add back the rest of the lines and break. + while orig_lines: + line = orig_lines.pop(0) + line = _skip_existing_entry_for_this_pr(line, same_section=False) + if line: + updated_lines.append(line) + break + continue + + if inside_version_dev and line.lower().startswith(section.lower()): # Section of interest header + if already_added_entry: + print( + f"Already added new lines into section {section}, is the section duplicated?", + file=sys.stderr, + ) + sys.exit(1) + updated_lines.append(line) + # Collecting lines until the next section. + section_lines: List[str] = [] + while True: + line = orig_lines.pop(0) + if line.startswith("#"): + # Found the next section header, so need to put all the lines we collected. + updated_lines.append("\n") + _updated_lines = [_l for _l in section_lines + new_lines if _l.strip()] + updated_lines.extend(_updated_lines) + updated_lines.append("\n") + if new_lines: + print(f"Updated {changelog_path} section '{section}' with lines:\n" + "".join(new_lines)) + else: + print(f"Removed existing entry from {changelog_path} section '{section}'") + already_added_entry = True + # Pushing back the next section header line + orig_lines.insert(0, line) + break + # If the line already contains a link to the PR, don't add it again. + line = _skip_existing_entry_for_this_pr(line, same_section=True) + section_lines.append(line) + else: + updated_lines.append(line) + + +def collapse_newlines(lines: List[str]) -> List[str]: + updated = [] + for idx in range(len(lines)): + if idx != 0 and not lines[idx].strip() and not lines[idx - 1].strip(): + continue + updated.append(lines[idx]) + return updated + + +updated_lines = collapse_newlines(updated_lines) + + +# Finally, writing the updated lines back. +with changelog_path.open("w") as f: + f.writelines(updated_lines) diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml new file mode 100644 index 0000000000..4e632fbe4d --- /dev/null +++ b/.github/workflows/changelog.yml @@ -0,0 +1,88 @@ +name: Update CHANGELOG.md +on: + issue_comment: + types: [created] + pull_request_target: + types: [opened] + +jobs: + update_changelog: + runs-on: ubuntu-latest + # Run if comment is on a PR with the main repo, and if it contains the magic keywords. + # Or run on PR creation, unless asked otherwise in the title. + if: | + github.repository_owner == 'nf-core' && ( + github.event_name == 'pull_request_target' || + github.event.issue.pull_request && startsWith(github.event.comment.body, '@nf-core-bot changelog') + ) + + steps: + - uses: actions/checkout@v4 + with: + token: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} + + # Action runs on the issue comment, so we don't get the PR by default. + # Use the GitHub CLI to check out the PR: + - name: Checkout Pull Request + env: + GH_TOKEN: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} + run: | + if [[ "${{ github.event_name }}" == "issue_comment" ]]; then + PR_NUMBER="${{ github.event.issue.number }}" + elif [[ "${{ github.event_name }}" == "pull_request_target" ]]; then + PR_NUMBER="${{ github.event.pull_request.number }}" + fi + gh pr checkout $PR_NUMBER + + - uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install packages + run: | + python -m pip install --upgrade pip + pip install pyyaml + + - name: Update CHANGELOG.md from the PR title + env: + COMMENT: ${{ github.event.comment.body }} + GH_TOKEN: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} + run: | + if [[ "${{ github.event_name }}" == "issue_comment" ]]; then + export PR_NUMBER='${{ github.event.issue.number }}' + export PR_TITLE='${{ github.event.issue.title }}' + elif [[ "${{ github.event_name }}" == "pull_request_target" ]]; then + export PR_NUMBER='${{ github.event.pull_request.number }}' + export PR_TITLE='${{ github.event.pull_request.title }}' + fi + python ${GITHUB_WORKSPACE}/.github/workflows/changelog.py + + - name: Check if CHANGELOG.md actually changed + run: | + git diff --exit-code ${GITHUB_WORKSPACE}/CHANGELOG.md || echo "changed=YES" >> $GITHUB_ENV + echo "File changed: ${{ env.changed }}" + + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: 3.11 + cache: "pip" + + - name: Install pre-commit + run: pip install pre-commit + + - name: Run pre-commit checks + if: env.changed == 'YES' + run: | + pre-commit run --all-files + + - name: Commit and push changes + if: env.changed == 'YES' + run: | + git config user.email "core@nf-co.re" + git config user.name "nf-core-bot" + git config push.default upstream + git add ${GITHUB_WORKSPACE}/CHANGELOG.md + git status + git commit -m "[automated] Update CHANGELOG.md [no ci]" + git push diff --git a/.github/workflows/clean-up.yml b/.github/workflows/clean-up.yml index 4b55c5e4aa..ff311f9df8 100644 --- a/.github/workflows/clean-up.yml +++ b/.github/workflows/clean-up.yml @@ -10,7 +10,7 @@ jobs: issues: write pull-requests: write steps: - - uses: actions/stale@v7 + - uses: actions/stale@v9 with: stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days." stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful." diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 57dbe86d65..0119efcd41 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -26,15 +26,22 @@ jobs: - "23.04.0" - "latest-everything" steps: + - name: go to subdirectory and change nextflow workdir + run: | + mkdir -p create-lint-wf + cd create-lint-wf + export NXF_WORK=$(pwd) + # Get the repo code - uses: actions/checkout@v4 name: Check out source-code repository # Set up nf-core/tools - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 + cache: pip - name: Install python dependencies run: | @@ -47,14 +54,6 @@ jobs: with: version: ${{ matrix.NXF_VER }} - # Install the Prettier linting tools - - uses: actions/setup-node@v4 - with: - node-version: "20" - - - name: Install Prettier and editorconfig-checker - run: npm install -g prettier editorconfig-checker - # Build a pipeline from the template - name: nf-core create run: | @@ -73,11 +72,8 @@ jobs: working-directory: create-lint-wf # Run code style linting - - name: Run Prettier --check - run: prettier --check create-lint-wf/nf-core-testpipeline - - - name: Run ECLint check - run: editorconfig-checker -exclude README.md $(find nf-core-testpipeline/.* -type f | grep -v '.git\|.py\|md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile') + - name: run pre-commit + run: pre-commit run --all-files working-directory: create-lint-wf # Update modules to the latest version @@ -142,7 +138,11 @@ jobs: - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: nf-core-log-file + name: nf-core-log-file-${{ matrix.NXF_VER }} path: create-lint-wf/log.txt + + - name: Cleanup work directory + run: sudo rm -rf create-lint-wf + if: always() diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 37cbf65c7d..3805c1a240 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -20,24 +20,37 @@ env: jobs: RunTestWorkflow: - runs-on: ubuntu-latest + runs-on: ${{ matrix.runner }} env: NXF_ANSI_LOG: false strategy: matrix: TEMPLATE: - - "template_skip_all.yml" - "template_skip_github_badges.yml" - "template_skip_igenomes.yml" - "template_skip_ci.yml" - - "template_skip_nf_core_configs.yml" + runner: ["self-hosted"] + profile: ["self_hosted_runner"] + include: + - TEMPLATE: "template_skip_all.yml" + runner: ubuntu-latest + profile: "docker" + - TEMPLATE: "template_skip_nf_core_configs.yml" + runner: ubuntu-latest + profile: "docker" steps: + - name: go to working directory + run: | + mkdir -p create-lint-wf-template + cd create-lint-wf-template + export NXF_WORK=$(pwd) + - uses: actions/checkout@v4 name: Check out source-code repository - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 @@ -51,18 +64,6 @@ jobs: with: version: latest-everything - # Install the Prettier linting tools - - uses: actions/setup-node@v4 - with: - node-version: "20" - - - name: Install Prettier - run: npm install -g prettier - - # Install the editorconfig linting tools - - name: Install editorconfig-checker - run: npm install -g editorconfig-checker - # Create template files - name: Create template skip all (except github) run: | @@ -95,7 +96,7 @@ jobs: - name: run the pipeline run: | cd create-test-lint-wf - nextflow run my-prefix-testpipeline -profile test,docker --outdir ./results + nextflow run my-prefix-testpipeline -profile test,${{matrix.profile}} --outdir ./results # Remove results folder before linting - name: remove results folder @@ -107,11 +108,8 @@ jobs: run: nf-core --log-file log.txt sync --dir create-test-lint-wf/my-prefix-testpipeline/ # Run code style linting - - name: Run Prettier --check - run: prettier --check create-test-lint-wf/my-prefix-testpipeline - - - name: Run ECLint check - run: editorconfig-checker -exclude README.md $(find my-prefix-testpipeline/.* -type f | grep -v '.git\|.py\|md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile') + - name: Run pre-commit + run: pre-commit run --all-files working-directory: create-test-lint-wf # Remove TODO statements @@ -145,7 +143,11 @@ jobs: - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: nf-core-log-file + name: nf-core-log-file-${{ matrix.TEMPLATE }} path: create-test-lint-wf/artifact_files.tar + + - name: Cleanup work directory + run: sudo rm -rf create-test-lint-wf + if: always() diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 026b0a889b..e128e16a36 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -17,7 +17,7 @@ env: jobs: RunTestWorkflow: - runs-on: ubuntu-latest + runs-on: self-hosted env: NXF_ANSI_LOG: false strategy: @@ -26,11 +26,17 @@ jobs: - "23.04.0" - "latest-everything" steps: + - name: go to working directory + run: | + mkdir -p create-test-wf + cd create-test-wf + export NXF_WORK=$(pwd) + - uses: actions/checkout@v4 name: Check out source-code repository - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 @@ -49,11 +55,16 @@ jobs: mkdir create-test-wf && cd create-test-wf export NXF_WORK=$(pwd) nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain - nextflow run nf-core-testpipeline -profile test,docker --outdir ./results + nextflow run nf-core-testpipeline -profile test,self_hosted_runner --outdir ./results - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: nf-core-log-file + name: nf-core-log-file-${{ matrix.NXF_VER }} path: create-test-wf/log.txt + + - name: Cleanup work directory + # cleanup work directory + run: sudo rm -rf create-test-wf + if: always() diff --git a/.github/workflows/deploy-pypi.yml b/.github/workflows/deploy-pypi.yml index 62c53508d8..8d3a154d80 100644 --- a/.github/workflows/deploy-pypi.yml +++ b/.github/workflows/deploy-pypi.yml @@ -17,7 +17,7 @@ jobs: name: Check out source-code repository - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml index 4184bc5e59..5e857e8dbb 100644 --- a/.github/workflows/fix-linting.yml +++ b/.github/workflows/fix-linting.yml @@ -10,7 +10,7 @@ jobs: contains(github.event.comment.html_url, '/pull/') && contains(github.event.comment.body, '@nf-core-bot fix linting') && github.repository == 'nf-core/tools' - runs-on: ubuntu-latest + runs-on: self-hosted steps: # Use the @nf-core-bot token to check out so we can push later - uses: actions/checkout@v4 @@ -24,31 +24,17 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} - - uses: actions/setup-node@v4 - with: - node-version: "20" - - - name: Install Prettier - run: npm install -g prettier @prettier/plugin-php - - - name: Run 'prettier --write' - run: prettier --write ${GITHUB_WORKSPACE} - - - name: Run Black - uses: psf/black@stable - with: - # Override to remove the default --check flag so that we make changes - options: "--color" - - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 - - name: python-isort - uses: isort/isort-action@v1.0.0 - with: - isortVersion: "latest" - requirementsFiles: "requirements.txt requirements-dev.txt" + cache: "pip" + + - name: Install pre-commit + run: pip install pre-commit + + - name: Run pre-commit + run: pre-commit run --all-files - name: Commit & push changes run: | diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml index 23972c56f6..d9847dd365 100644 --- a/.github/workflows/lint-code.yml +++ b/.github/workflows/lint-code.yml @@ -13,119 +13,21 @@ concurrency: cancel-in-progress: true jobs: - EditorConfig: - runs-on: ["self-hosted"] + Pre-commit: + runs-on: ubuntu-latest + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 - with: - node-version: "20" - - - name: Install editorconfig-checker - run: npm install -g editorconfig-checker - - # Run editor config check only on files not covered by a linter - - name: Run ECLint check - run: editorconfig-checker -exclude README.md $(git ls-files | grep -v 'test\|.py\|md\|json\|yml\|yaml\|html\|css\|Makefile') - - Prettier: - runs-on: ["self-hosted"] - steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-node@v4 - with: - node-version: "20" - - - name: Install Prettier - run: npm install -g prettier - - - name: Run Prettier --check - run: prettier --check ${GITHUB_WORKSPACE} - - PythonBlack: - runs-on: ["self-hosted"] - steps: - - uses: actions/checkout@v4 - - - name: Check code lints with Black - uses: psf/black@stable - - # If the above check failed, post a comment on the PR explaining the failure - - name: Post PR comment - if: failure() - uses: mshick/add-pr-comment@v1 - with: - message: | - ## Python linting (`black`) is failing - - To keep the code consistent with lots of contributors, we run automated code consistency checks. - To fix this CI test, please run: - - * Install [`black`](https://black.readthedocs.io/en/stable/): `pip install black` - * Fix formatting errors in your pipeline: `black .` - - Once you push these changes the test should pass, and you can hide this comment :+1: - - We highly recommend setting up Black in your code editor so that this formatting is done automatically on save. Ask about it on Slack for help! - - Thanks again for your contribution! - repo-token: ${{ secrets.GITHUB_TOKEN }} - allow-repeats: false - - isort: - runs-on: ["self-hosted"] - steps: - - name: Check out source-code repository - uses: actions/checkout@v4 - - name: Set up Python 3.11 - uses: actions/setup-python@v4 - with: - python-version: 3.11 - - name: python-isort - uses: isort/isort-action@v1.1.0 - with: - isortVersion: "latest" - requirementsFiles: "requirements.txt requirements-dev.txt" - - static-type-check: - runs-on: ["self-hosted"] - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 cache: "pip" - - name: Install dependencies - run: | - python -m pip install --upgrade pip -r requirements-dev.txt - pip install -e . + - name: Install pre-commit + run: pip install pre-commit - - name: Cache nf-test installation - id: cache-software - uses: actions/cache@v3 - with: - path: | - /usr/local/bin/nf-test - /home/runner/.nf-test/nf-test.jar - key: ${{ runner.os }}-${{ env.NFTEST_VER }}-nftest - - - name: Install nf-test - if: steps.cache-software.outputs.cache-hit != 'true' - run: | - wget -qO- https://code.askimed.com/install/nf-test | bash - sudo mv nf-test /usr/local/bin/ - - - name: Get Python changed files - id: changed-py-files - uses: tj-actions/changed-files@v23 - with: - files: | - *.py - **/*.py - - name: Run if any of the listed files above is changed - if: steps.changed-py-files.outputs.any_changed == 'true' - run: mypy ${{ steps.changed-py-files.outputs.all_changed_files }} + - name: Run pre-commit + run: pre-commit run --all-files diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml index 1230bfc9d3..169a917d83 100644 --- a/.github/workflows/push_dockerhub_dev.yml +++ b/.github/workflows/push_dockerhub_dev.yml @@ -13,7 +13,7 @@ concurrency: jobs: push_dockerhub: name: Push new Docker image to Docker Hub (dev) - runs-on: self-hosted + runs-on: ubuntu-latest # Only run for the nf-core repo, for releases and merged PRs if: ${{ github.repository == 'nf-core/tools' }} env: diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index b6f3592165..8cec6a63d7 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -14,8 +14,9 @@ on: - "CHANGELOG.md" release: types: [published] + workflow_dispatch: -# Cancel if a newer run is started +# Cancel if a newer run with the same workflow name is queued concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true @@ -25,7 +26,7 @@ env: jobs: setup: - runs-on: ["ubuntu-latest"] + runs-on: "ubuntu-latest" strategy: matrix: python-version: ["3.8", "3.11"] @@ -44,20 +45,45 @@ jobs: runner: ${{ matrix.runner }} run-tests: ${{ steps.conditions.outputs.run-tests }} + # create a test matrix based on all python files in /tests + list_tests: + name: Get test file matrix + runs-on: "ubuntu-latest" + steps: + - uses: actions/checkout@v4 + name: Check out source-code repository + + - name: List tests + id: list_tests + run: | + echo "tests=$(find tests/test_* | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT + outputs: + tests: ${{ steps.list_tests.outputs.tests }} + test: - name: Test with Python ${{ needs.setup.outputs.python-version }} on ${{ needs.setup.outputs.runner }} - needs: setup - if: ${{ needs.setup.outputs.run-tests}} - runs-on: ${{ needs.setup.outputs.runner }} + name: Run ${{matrix.test}} with Python ${{ needs.setup.outputs.python-version }} on ${{ needs.setup.outputs.runner }} + needs: [setup, list_tests] + if: ${{ needs.setup.outputs.run-tests }} + runs-on: self-hosted + strategy: + matrix: ${{ fromJson(needs.list_tests.outputs.tests) }} + fail-fast: false # run all tests even if one fails steps: - - uses: actions/checkout@v2 + - name: go to subdirectory and change nextflow workdir + run: | + mkdir -p pytest + cd pytest + export NXF_WORK=$(pwd) + + - uses: actions/checkout@v4 name: Check out source-code repository - name: Set up Python ${{ needs.setup.outputs.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: ${{ needs.setup.outputs.python-version }} cache: "pip" + token: ${{ secrets.GITHUB_TOKEN }} - name: Install dependencies run: | @@ -71,16 +97,25 @@ jobs: sudo apt remove -y git git-man sudo add-apt-repository --remove ppa:git-core/ppa sudo apt install -y git + - name: Get current date id: date run: echo "date=$(date +'%Y-%m')" >> $GITHUB_ENV - name: Install Nextflow uses: nf-core/setup-nextflow@v1 - with: - version: "latest-everything" + + - name: Look if nf-test is already installed and write to env variable + id: check-nftest + run: | + if [ -f /usr/local/bin/nf-test ]; then + echo "nftest_installed=true" >> $GITHUB_ENV + else + echo "nftest_installed=false" >> $GITHUB_ENV + fi - name: Cache nf-test installation + if: env.nftest_installed != 'true' id: cache-software uses: actions/cache@v3 with: @@ -90,16 +125,70 @@ jobs: key: ${{ runner.os }}-nftest-${{ env.date }} - name: Install nf-test - if: steps.cache-software.outputs.cache-hit != 'true' + if: steps.cache-software.outputs.cache-hit != 'true' && env.nftest_installed != 'true' run: | wget -qO- https://code.askimed.com/install/nf-test | bash sudo mv nf-test /usr/local/bin/ + - name: move coveragerc file up + run: | + mv .github/.coveragerc . + - name: Test with pytest - run: python3 -m pytest tests/ --color=yes --cov-report=xml --cov-config=.github/.coveragerc --cov=nf_core + run: | + python3 -m pytest tests/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$? + # don't fail if no tests were collected, e.g. for test_licence.py + if [ "${exit_code}" -eq 5 ]; then + echo "No tests were collected" + exit 0 + elif [ "${exit_code}" -ne 0 ]; then + echo "Tests failed with exit code ${exit_code}" + exit 1 + fi + + - name: Upload coverage + uses: actions/upload-artifact@v4 + with: + name: coverage_${{ matrix.test }} + path: .coverage + + coverage: + needs: test + runs-on: self-hosted + steps: + - name: go to subdirectory + run: | + mkdir -p pytest + cd pytest + + - uses: actions/checkout@v4 + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + env: + AGENT_TOOLSDIRECTORY: /opt/actions-runner/_work/tools/tools/ + with: + python-version: 3.11 + cache: "pip" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip -r requirements-dev.txt + pip install -e . + + - name: move coveragerc file up + run: | + mv .github/.coveragerc . + + - name: Download all artifacts + uses: actions/download-artifact@v4 + - name: Run coverage + run: | + coverage combine --keep coverage*/.coverage* + coverage report + coverage xml - - uses: codecov/codecov-action@v1 - name: Upload code coverage report + - uses: codecov/codecov-action@v3 with: - if: success() - token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/rich-codex.yml b/.github/workflows/rich-codex.yml index f5f289b73e..e0981e8241 100644 --- a/.github/workflows/rich-codex.yml +++ b/.github/workflows/rich-codex.yml @@ -8,7 +8,7 @@ jobs: - name: Check out the repo uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.x cache: pip diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index 94f8ee54e2..9bbc12cf4b 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -16,7 +16,7 @@ concurrency: jobs: get-pipelines: - runs-on: ubuntu-latest + runs-on: self-hosted outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} steps: @@ -49,7 +49,7 @@ jobs: fetch-depth: "0" - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 @@ -78,7 +78,7 @@ jobs: - name: Upload sync log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: sync_log_${{ matrix.pipeline }} path: sync_log_${{ matrix.pipeline }}.txt diff --git a/.github/workflows/tools-api-docs-dev.yml b/.github/workflows/tools-api-docs-dev.yml index 51c25fa250..91396b2a25 100644 --- a/.github/workflows/tools-api-docs-dev.yml +++ b/.github/workflows/tools-api-docs-dev.yml @@ -20,14 +20,14 @@ concurrency: jobs: api-docs: name: Build & push Sphinx API docs - runs-on: self-hosted + runs-on: ubuntu-latest steps: - name: Check out source-code repository uses: actions/checkout@v4 - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 @@ -43,7 +43,7 @@ jobs: - name: Sync dev docs # Only sync with the website if it was a push from nf-core/tools dev branch if: github.repository == 'nf-core/tools' && github.event_name == 'push' && github.event.ref == 'refs/heads/dev' - uses: SamKirkland/FTP-Deploy-Action@4.0.0 + uses: SamKirkland/FTP-Deploy-Action@v4.3.4 with: server: ${{ secrets.ftp_server }} username: ${{ secrets.ftp_username}} diff --git a/.github/workflows/tools-api-docs-release.yml b/.github/workflows/tools-api-docs-release.yml index b0869190d9..2183db3fcf 100644 --- a/.github/workflows/tools-api-docs-release.yml +++ b/.github/workflows/tools-api-docs-release.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v4 - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 @@ -37,7 +37,7 @@ jobs: - name: Sync release docs if: github.repository == 'nf-core/tools' - uses: SamKirkland/FTP-Deploy-Action@4.0.0 + uses: SamKirkland/FTP-Deploy-Action@v4.3.4 with: server: ${{ secrets.ftp_server }} username: ${{ secrets.ftp_username}} diff --git a/.gitpod.yml b/.gitpod.yml index 3c8b6b5303..39fb530da2 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -9,6 +9,7 @@ tasks: - name: unset JAVA_TOOL_OPTIONS command: | unset JAVA_TOOL_OPTIONS + vscode: extensions: # based on nf-core.nf-core-extensionpack - codezombiech.gitignore # Language support for .gitignore files @@ -21,3 +22,4 @@ vscode: # - nextflow.nextflow # Nextflow syntax highlighting - oderwat.indent-rainbow # Highlight indentation level - streetsidesoftware.code-spell-checker # Spelling checker for source code + - charliermarsh.ruff # Code linter Ruff diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ad23a3c895..fb2a67bace 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,23 +1,23 @@ repos: - - repo: https://github.com/psf/black - rev: 23.1.0 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.11 hooks: - - id: black - - repo: https://github.com/pycqa/isort - rev: 5.12.0 - hooks: - - id: isort + - id: ruff # linter + args: [--fix, --exit-non-zero-on-fix] # sort imports and fix + - id: ruff-format # formatter - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v2.7.1" + rev: "v3.1.0" hooks: - id: prettier - - repo: https://github.com/asottile/pyupgrade - rev: v3.15.0 + + - repo: https://github.com/editorconfig-checker/editorconfig-checker.python + rev: "2.7.3" hooks: - - id: pyupgrade - args: [--py38-plus] + - id: editorconfig-checker + alias: ec + - repo: https://github.com/pre-commit/mirrors-mypy - rev: "v1.7.1" # Use the sha / tag you want to point at + rev: "v1.8.0" hooks: - id: mypy additional_dependencies: diff --git a/.prettierignore b/.prettierignore index 344cafca6e..a55074abfb 100644 --- a/.prettierignore +++ b/.prettierignore @@ -6,3 +6,7 @@ testing nf_core/module-template/meta.yml nf_core/module-template/tests/tags.yml nf_core/subworkflow-template/tests/tags.yml +# don't run on things handled by ruff +*.py +*.pyc + diff --git a/CHANGELOG.md b/CHANGELOG.md index c6fd827245..dc96df408f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ ### Template - Add a Github Action Workflow to the pipeline template that tests a successful download with 'nf-core download' ([#2618](https://github.com/nf-core/tools/pull/2618)) +- Use `pre-commit` to lint files in GitHub CI ([#2635](https://github.com/nf-core/tools/pull/2635)) ### Download @@ -16,6 +17,13 @@ ### General +- Run CI-pytests for nf-core tools on self-hosted runners ([#2550](https://github.com/nf-core/tools/pull/2550)) +- Add Ruff linter and formatter replacing Black, isort and pyupgrade ([#2620](https://github.com/nf-core/tools/pull/2620)) +- Update pre-commit hook pre-commit/mirrors-mypy to v1.8.0 ([#2630](https://github.com/nf-core/tools/pull/2630)) +- Update mshick/add-pr-comment action to v2 ([#2632](https://github.com/nf-core/tools/pull/2632)) +- update python image version in docker file ([#2636](https://github.com/nf-core/tools/pull/2636)) +- Set pdiff as nf-test differ in Docker image for Gitpod ([#2642](https://github.com/nf-core/tools/pull/2642)) + # [v2.11.1 - Magnesium Dragon Patch](https://github.com/nf-core/tools/releases/tag/2.11) - [2023-12-20] ### Template diff --git a/Dockerfile b/Dockerfile index 95d544b26f..9c9770c25f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.11.5-slim +FROM python:3.11-slim LABEL authors="phil.ewels@scilifelab.se,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for the nfcore tools" diff --git a/README.md b/README.md index 653f5295ea..3a9b31187a 100644 --- a/README.md +++ b/README.md @@ -2,9 +2,8 @@ [![Python tests](https://github.com/nf-core/tools/workflows/Python%20tests/badge.svg?branch=master&event=push)](https://github.com/nf-core/tools/actions?query=workflow%3A%22Python+tests%22+branch%3Amaster) [![codecov](https://codecov.io/gh/nf-core/tools/branch/master/graph/badge.svg)](https://codecov.io/gh/nf-core/tools) -[![code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![code style: prettier](https://img.shields.io/badge/code%20style-prettier-ff69b4.svg)](https://github.com/prettier/prettier) -[![Imports: isort](https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336)](https://pycqa.github.io/isort/) +[![code style: Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v1.json)](https://github.com/charliermarsh/ruff) [![install with Bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/recipes/nf-core/README.html) [![install with PyPI](https://img.shields.io/badge/install%20with-PyPI-blue.svg)](https://pypi.org/project/nf-core/) diff --git a/codecov.yml b/codecov.yml index 1ecf8960c0..11a63f8bbf 100644 --- a/codecov.yml +++ b/codecov.yml @@ -4,3 +4,6 @@ coverage: default: threshold: 5% patch: off +comment: + layout: "condensed_header, condensed_files, condensed_footer" + require_changes: true diff --git a/docs/api/Makefile b/docs/api/Makefile index f961e4ded1..ab30a5051e 100644 --- a/docs/api/Makefile +++ b/docs/api/Makefile @@ -16,4 +16,4 @@ help: # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py index 27eaf9bcb3..bfdbd7888d 100644 --- a/docs/api/_src/conf.py +++ b/docs/api/_src/conf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # Configuration file for the Sphinx documentation builder. # @@ -16,9 +15,10 @@ import sys from typing import Dict -sys.path.insert(0, os.path.abspath("../../../nf_core")) import nf_core +sys.path.insert(0, os.path.abspath("../../../nf_core")) + # -- Project information ----------------------------------------------------- project = "nf-core/tools" diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 8cfacf7399..194b2030be 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -934,7 +934,7 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, Test modules within a pipeline or a clone of the nf-core/modules repository. """ - from nf_core.components.lint import LintException + from nf_core.components.lint import LintExceptionError from nf_core.modules import ModuleLint try: @@ -960,7 +960,7 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, ) if len(module_lint.failed) > 0: sys.exit(1) - except LintException as e: + except LintExceptionError as e: log.error(e) sys.exit(1) except (UserWarning, LookupError) as e: @@ -1020,7 +1020,7 @@ def bump_versions(ctx, tool, dir, all, show_all): the nf-core/modules repo. """ from nf_core.modules.bump_versions import ModuleVersionBumper - from nf_core.modules.modules_utils import ModuleException + from nf_core.modules.modules_utils import ModuleExceptionError try: version_bumper = ModuleVersionBumper( @@ -1030,7 +1030,7 @@ def bump_versions(ctx, tool, dir, all, show_all): ctx.obj["modules_repo_no_pull"], ) version_bumper.bump_versions(module=tool, all_modules=all, show_uptodate=show_all) - except ModuleException as e: + except ModuleExceptionError as e: log.error(e) sys.exit(1) except (UserWarning, LookupError) as e: @@ -1207,7 +1207,7 @@ def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, lo Test subworkflows within a pipeline or a clone of the nf-core/modules repository. """ - from nf_core.components.lint import LintException + from nf_core.components.lint import LintExceptionError from nf_core.subworkflows import SubworkflowLint try: @@ -1232,7 +1232,7 @@ def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, lo ) if len(subworkflow_lint.failed) > 0: sys.exit(1) - except LintException as e: + except LintExceptionError as e: log.error(e) sys.exit(1) except (UserWarning, LookupError) as e: @@ -1647,7 +1647,7 @@ def sync(dir, from_branch, pull_request, github_repository, username, template_y the pipeline. It is run automatically for all pipelines when ever a new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. """ - from nf_core.sync import PipelineSync, PullRequestException, SyncException + from nf_core.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError from nf_core.utils import is_pipeline_directory # Check if pipeline directory contains necessary files @@ -1657,7 +1657,7 @@ def sync(dir, from_branch, pull_request, github_repository, username, template_y sync_obj = PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml) try: sync_obj.sync() - except (SyncException, PullRequestException) as e: + except (SyncExceptionError, PullRequestExceptionError) as e: log.error(e) sys.exit(1) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 40c8f8984f..c5e8931fbc 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -68,7 +68,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: [ ( f"/releases/tag/{current_version}", - f"/tree/dev", + "/tree/dev", ) ], ) @@ -78,7 +78,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: pipeline_obj, [ ( - f"/tree/dev", + "/tree/dev", f"/releases/tag/{multiqc_new_version}", ) ], @@ -187,7 +187,7 @@ def update_file_version(filename: Union[str, Path], pipeline_obj: Pipeline, patt fn = pipeline_obj._fp(filename) content = "" try: - with open(fn, "r") as fh: + with open(fn) as fh: content = fh.read() except FileNotFoundError: log.warning(f"File not found: '{fn}'") diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 44924a2704..8332429835 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -227,7 +227,7 @@ def check_patch_paths(self, patch_path: Path, module_name: str) -> None: if patch_path.exists(): log.info(f"Modules {module_name} contains a patch file.") rewrite = False - with open(patch_path, "r") as fh: + with open(patch_path) as fh: lines = fh.readlines() for index, line in enumerate(lines): # Check if there are old paths in the patch file and replace @@ -264,7 +264,7 @@ def check_if_in_include_stmts(self, component_path: str) -> Dict[str, List[Dict[ if self.repo_type == "pipeline": workflow_files = Path(self.dir, "workflows").glob("*.nf") for workflow_file in workflow_files: - with open(workflow_file, "r") as fh: + with open(workflow_file) as fh: # Check if component path is in the file using mmap with mmap.mmap(fh.fileno(), 0, access=mmap.ACCESS_READ) as s: if s.find(component_path.encode()) != -1: diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index eec533ce60..01650a643d 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -53,7 +53,7 @@ def get_repo_info(directory: str, use_prompt: Optional[bool] = True) -> Tuple[st raise UserWarning("Repository type could not be established") # Check if it's a valid answer - if not repo_type in ["pipeline", "modules"]: + if repo_type not in ["pipeline", "modules"]: raise UserWarning(f"Invalid repository type: '{repo_type}'") # Check for org if modules repo @@ -138,7 +138,7 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str """ modules = [] subworkflows = [] - with open(Path(subworkflow_dir, "main.nf"), "r") as fh: + with open(Path(subworkflow_dir, "main.nf")) as fh: for line in fh: regex = re.compile( r"include(?: *{ *)([a-zA-Z\_0-9]*)(?: *as *)?(?:[a-zA-Z\_0-9]*)?(?: *})(?: *from *)(?:'|\")(.*)(?:'|\")" diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 568ca22af5..32f6d1a433 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -2,7 +2,6 @@ The ComponentCreate class handles generating of module and subworkflow templates """ -from __future__ import print_function import glob import json @@ -440,12 +439,13 @@ def _copy_old_files(self, component_old_path): pytest_dir = Path(self.directory, "tests", self.component_type, self.org, self.component_dir) nextflow_config = pytest_dir / "nextflow.config" if nextflow_config.is_file(): - with open(nextflow_config, "r") as fh: + with open(nextflow_config) as fh: config_lines = "" for line in fh: - if "publishDir" not in line: + if "publishDir" not in line and line.strip() != "": config_lines += line - if len(config_lines) > 0: + # if the nextflow.config file only contained publishDir, non_publish_dir_lines will be 11 characters long (`process {\n}`) + if len(config_lines) > 11: log.debug("Copying nextflow.config file from pytest tests") with open( Path(self.directory, self.component_type, self.org, self.component_dir, "tests", "nextflow.config"), @@ -460,7 +460,7 @@ def _print_and_delete_pytest_files(self): "[violet]Do you want to delete the pytest files?[/]\nPytest file 'main.nf' will be printed to standard output to allow migrating the tests manually to 'main.nf.test'.", default=False, ): - with open(pytest_dir / "main.nf", "r") as fh: + with open(pytest_dir / "main.nf") as fh: log.info(fh.read()) shutil.rmtree(pytest_dir) log.info( @@ -475,7 +475,7 @@ def _print_and_delete_pytest_files(self): ) # Delete tags from pytest_modules.yml modules_yml = Path(self.directory, "tests", "config", "pytest_modules.yml") - with open(modules_yml, "r") as fh: + with open(modules_yml) as fh: yml_file = yaml.safe_load(fh) yml_key = str(self.component_dir) if self.component_type == "modules" else f"subworkflows/{self.component_dir}" if yml_key in yml_file: diff --git a/nf_core/components/info.py b/nf_core/components/info.py index e4d8038b87..54fc0004dc 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -184,7 +184,7 @@ def get_local_yaml(self): meta_fn = Path(comp_dir, "meta.yml") if meta_fn.exists(): log.debug(f"Found local file: {meta_fn}") - with open(meta_fn, "r") as fh: + with open(meta_fn) as fh: self.local_path = comp_dir return yaml.safe_load(fh) @@ -196,7 +196,7 @@ def get_local_yaml(self): meta_fn = Path(comp_dir, "meta.yml") if meta_fn.exists(): log.debug(f"Found local file: {meta_fn}") - with open(meta_fn, "r") as fh: + with open(meta_fn) as fh: self.local_path = comp_dir return yaml.safe_load(fh) log.debug(f"{self.component_type[:-1].title()} '{self.component}' meta.yml not found locally") diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index efffc28e85..3c2fb9dde3 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -3,7 +3,6 @@ in nf-core pipelines """ -from __future__ import print_function import logging import operator @@ -27,7 +26,7 @@ log = logging.getLogger(__name__) -class LintException(Exception): +class LintExceptionError(Exception): """Exception raised when there was an error with module or subworkflow linting""" pass @@ -216,7 +215,7 @@ def _print_results(self, show_passed=False, sort_by="test"): try: for lint_result in tests: max_name_len = max(len(lint_result.component_name), max_name_len) - except: + except Exception: pass # Helper function to format test links nicely diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 874fa570bc..2f73afe9d3 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -81,7 +81,7 @@ def __init__( def _get_main_nf_tags(self, test_main_nf: Union[Path, str]): """Collect all tags from the main.nf.test file.""" tags = [] - with open(test_main_nf, "r") as fh: + with open(test_main_nf) as fh: for line in fh: if line.strip().startswith("tag"): tags.append(line.strip().split()[1].strip('"')) @@ -90,7 +90,7 @@ def _get_main_nf_tags(self, test_main_nf: Union[Path, str]): def _get_included_components(self, main_nf: Union[Path, str]): """Collect all included components from the main.nf file.""" included_components = [] - with open(main_nf, "r") as fh: + with open(main_nf) as fh: for line in fh: if line.strip().startswith("include"): # get tool/subtool or subworkflow name from include statement, can be in the form @@ -107,7 +107,7 @@ def _get_included_components(self, main_nf: Union[Path, str]): def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, str]): """Collect all included components from the main.nf file.""" included_components = [] - with open(main_nf_test, "r") as fh: + with open(main_nf_test) as fh: for line in fh: if line.strip().startswith("script"): # get tool/subtool or subworkflow name from script statement, can be: @@ -151,7 +151,7 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st def get_inputs_from_main_nf(self): """Collect all inputs from the main.nf file.""" inputs = [] - with open(self.main_nf, "r") as f: + with open(self.main_nf) as f: data = f.read() # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo # regex matches: @@ -168,17 +168,19 @@ def get_inputs_from_main_nf(self): input_data = data.split("input:")[1].split("output:")[0] regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" matches = re.finditer(regex, input_data, re.MULTILINE) - for matchNum, match in enumerate(matches, start=1): + for _, match in enumerate(matches, start=1): if match.group(3): - inputs.append(match.group(3)) + input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + inputs.append(input_val) elif match.group(4): - inputs.append(match.group(4)) + input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + inputs.append(input_val) log.info(f"Found {len(inputs)} inputs in {self.main_nf}") self.inputs = inputs def get_outputs_from_main_nf(self): outputs = [] - with open(self.main_nf, "r") as f: + with open(self.main_nf) as f: data = f.read() # get output values from main.nf after "output:". the names are always after "emit:" if "output:" not in data: @@ -187,7 +189,7 @@ def get_outputs_from_main_nf(self): output_data = data.split("output:")[1].split("when:")[0] regex = r"emit:\s*([^)\s,]+)" matches = re.finditer(regex, output_data, re.MULTILINE) - for matchNum, match in enumerate(matches, start=1): + for _, match in enumerate(matches, start=1): outputs.append(match.group(1)) log.info(f"Found {len(outputs)} outputs in {self.main_nf}") self.outputs = outputs diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py index 28f2f886b1..55d5747451 100644 --- a/nf_core/components/patch.py +++ b/nf_core/components/patch.py @@ -35,7 +35,7 @@ def _parameter_checks(self, component): if component is not None and component not in component_names: component_dir = [dir for dir, m in components if m == component][0] raise UserWarning( - f"{self.component_type[:-1].title()} '{Path(self.component_type, component_dir, module)}' does not exist in the pipeline" + f"{self.component_type[:-1].title()} '{Path(self.component_type, component_dir, component)}' does not exist in the pipeline" ) def patch(self, component=None): @@ -220,5 +220,5 @@ def remove(self, component): ): log.error( f"Module files do not appear to match the remote for the commit sha in the 'module.json': {component_version}\n" - f"Recommend reinstalling with 'nf-core modules install --force --sha {component_version} {module}' " + f"Recommend reinstalling with 'nf-core modules install --force --sha {component_version} {component}' " ) diff --git a/nf_core/create.py b/nf_core/create.py index 56d0912a07..0d5b7e4d7b 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -108,7 +108,7 @@ def create_param_dict(self, name, description, author, version, template_yaml_pa # Obtain template customization info from template yaml file or `.nf-core.yml` config file try: if template_yaml_path is not None: - with open(template_yaml_path, "r") as f: + with open(template_yaml_path) as f: template_yaml = yaml.safe_load(f) elif "template" in config_yml: template_yaml = config_yml["template"] @@ -395,7 +395,7 @@ def remove_nf_core_in_bug_report_template(self): """ bug_report_path = self.outdir / ".github" / "ISSUE_TEMPLATE" / "bug_report.yml" - with open(bug_report_path, "r") as fh: + with open(bug_report_path) as fh: contents = yaml.load(fh, Loader=yaml.FullLoader) # Remove the first item in the body, which is the information about the docs diff --git a/nf_core/download.py b/nf_core/download.py index 08bef935ba..4c0bc97f42 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -1,6 +1,5 @@ """Downloads a nf-core pipeline to the local file system.""" -from __future__ import print_function import concurrent.futures import io @@ -21,7 +20,7 @@ import rich import rich.progress from git.exc import GitCommandError, InvalidGitRepositoryError -from pkg_resources import parse_version as VersionParser +from pkg_resources import parse_version as version_parser import nf_core import nf_core.list @@ -551,7 +550,7 @@ def read_remote_containers(self): self.containers_remote = sorted(list(set(self.containers_remote))) except (FileNotFoundError, LookupError) as e: log.error(f"[red]Issue with reading the specified remote $NXF_SINGULARITY_CACHE index:[/]\n{e}\n") - if stderr.is_interactive and rich.prompt.Confirm.ask(f"[blue]Specify a new index file and try again?"): + if stderr.is_interactive and rich.prompt.Confirm.ask("[blue]Specify a new index file and try again?"): self.container_cache_index = None # reset chosen path to index file. self.prompt_singularity_cachedir_remote() else: @@ -640,7 +639,7 @@ def wf_use_local_configs(self, revision_dirname): log.debug(f"Editing 'params.custom_config_base' in '{nfconfig_fn}'") # Load the nextflow.config file into memory - with open(nfconfig_fn, "r") as nfconfig_fh: + with open(nfconfig_fn) as nfconfig_fh: nfconfig = nfconfig_fh.read() # Replace the target string @@ -700,7 +699,7 @@ def find_container_images(self, workflow_directory): if bool(config_findings_dsl2): # finding fill always be a tuple of length 2, first the quote used and second the enquoted value. for finding in config_findings_dsl2: - config_findings.append((finding + (self.nf_config, "Nextflow configs"))) + config_findings.append(finding + (self.nf_config, "Nextflow configs")) else: # no regex match, likely just plain string """ Append string also as finding-like tuple for consistency @@ -719,7 +718,7 @@ def find_container_images(self, workflow_directory): for file in files: if file.endswith(".nf"): file_path = os.path.join(subdir, file) - with open(file_path, "r") as fh: + with open(file_path) as fh: # Look for any lines with container "xxx" or container 'xxx' search_space = fh.read() """ @@ -744,7 +743,7 @@ def find_container_images(self, workflow_directory): for finding in local_module_findings: # append finding since we want to collect them from all modules # also append search_space because we need to start over later if nothing was found. - module_findings.append((finding + (search_space, file_path))) + module_findings.append(finding + (search_space, file_path)) # Not sure if there will ever be multiple container definitions per module, but beware DSL3. # Like above run on shallow copy, because length may change at runtime. @@ -853,7 +852,7 @@ def rectify_raw_container_matches(self, raw_findings): ['https://depot.galaxyproject.org/singularity/scanpy:1.7.2--pyhdfd78af_0', 'biocontainers/scanpy:1.7.2--pyhdfd78af_0'] """ container_value_defs = [ - capture for _, capture in container_value_defs[:] if not capture in ["singularity", "apptainer"] + capture for _, capture in container_value_defs[:] if capture not in ["singularity", "apptainer"] ] """ @@ -1066,10 +1065,10 @@ def get_singularity_images(self, current_revision=""): self.singularity_pull_image(*container, library, progress) # Pulling the image was successful, no ContainerError was raised, break the library loop break - except ContainerError.ImageExists as e: + except ContainerError.ImageExistsError: # Pulling not required break - except ContainerError.RegistryNotFound as e: + except ContainerError.RegistryNotFoundError as e: self.container_library.remove(library) # The only library was removed if not self.container_library: @@ -1079,13 +1078,13 @@ def get_singularity_images(self, current_revision=""): else: # Other libraries can be used continue - except ContainerError.ImageNotFound as e: + except ContainerError.ImageNotFoundError as e: # Try other registries if e.error_log.absolute_URI: break # there no point in trying other registries if absolute URI was specified. else: continue - except ContainerError.InvalidTag as e: + except ContainerError.InvalidTagError: # Try other registries continue except ContainerError.OtherError as e: @@ -1524,7 +1523,7 @@ def tidy_tags_and_branches(self): else: # desired revisions may contain arbitrary branch names that do not correspond to valid sematic versioning patterns. valid_versions = [ - VersionParser(v) + version_parser(v) for v in desired_revisions if re.match(r"\d+\.\d+(?:\.\d+)*(?:[\w\-_])*", v) ] @@ -1583,7 +1582,7 @@ def __init__(self, container, registry, address, absolute_URI, out_path, singula for line in error_msg: if re.search(r"dial\stcp.*no\ssuch\shost", line): - self.error_type = self.RegistryNotFound(self) + self.error_type = self.RegistryNotFoundError(self) break elif ( re.search(r"requested\saccess\sto\sthe\sresource\sis\sdenied", line) @@ -1595,13 +1594,13 @@ def __init__(self, container, registry, address, absolute_URI, out_path, singula # unauthorized: authentication required # Quay.io: StatusCode: 404, \n'] # ghcr.io: Requesting bearer token: invalid status code from registry 400 (Bad Request) - self.error_type = self.ImageNotFound(self) + self.error_type = self.ImageNotFoundError(self) break elif re.search(r"manifest\sunknown", line): - self.error_type = self.InvalidTag(self) + self.error_type = self.InvalidTagError(self) break elif re.search(r"Image\sfile\salready\sexists", line): - self.error_type = self.ImageExists(self) + self.error_type = self.ImageExistsError(self) break else: continue @@ -1615,7 +1614,7 @@ def __init__(self, container, registry, address, absolute_URI, out_path, singula raise self.error_type - class RegistryNotFound(ConnectionRefusedError): + class RegistryNotFoundError(ConnectionRefusedError): """The specified registry does not resolve to a valid IP address""" def __init__(self, error_log): @@ -1628,7 +1627,7 @@ def __init__(self, error_log): ) super().__init__(self.message, self.helpmessage, self.error_log) - class ImageNotFound(FileNotFoundError): + class ImageNotFoundError(FileNotFoundError): """The image can not be found in the registry""" def __init__(self, error_log): @@ -1644,7 +1643,7 @@ def __init__(self, error_log): super().__init__(self.message) - class InvalidTag(AttributeError): + class InvalidTagError(AttributeError): """Image and registry are valid, but the (version) tag is not""" def __init__(self, error_log): @@ -1653,7 +1652,7 @@ def __init__(self, error_log): self.helpmessage = f'Please chose a different library than {self.error_log.registry}\nor try to locate the "{self.error_log.address.split(":")[-1]}" version of "{self.error_log.container}" manually.\nPlease troubleshoot the command \n"{" ".join(self.error_log.singularity_command)}" manually.\n' super().__init__(self.message) - class ImageExists(FileExistsError): + class ImageExistsError(FileExistsError): """Image already exists in cache/output directory.""" def __init__(self, error_log): diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index e721f210d0..ad4bed5052 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -47,19 +47,23 @@ RUN conda config --add channels defaults && \ conda config --add channels conda-forge && \ conda config --set channel_priority strict && \ conda install --quiet --yes --name base \ - mamba \ - nextflow \ - nf-core \ - nf-test \ - black \ - prettier \ - pre-commit \ - openjdk \ - pytest-workflow && \ + mamba \ + nextflow \ + nf-core \ + nf-test \ + prettier \ + pre-commit \ + ruff \ + openjdk \ + pytest-workflow && \ conda clean --all --force-pkgs-dirs --yes # Update Nextflow RUN nextflow self-update # Install nf-core -RUN python -m pip install . +RUN python -m pip install . --no-cache-dir + +# Setup pdiff for nf-test diffs +RUN export NFT_DIFF="pdiff" && \ + export NFT_DIFF_ARGS="--line-numbers --expand-tabs=2" diff --git a/nf_core/launch.py b/nf_core/launch.py index 363506c448..25bb4c150c 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -1,6 +1,5 @@ """ Launch a pipeline, interactively collecting params """ -from __future__ import print_function import copy import json @@ -428,7 +427,7 @@ def prompt_param(self, param_id, param_obj, is_required, answers): answer = questionary.unsafe_prompt([question], style=nf_core.utils.nfcore_question_style) # If required and got an empty reponse, ask again - while type(answer[param_id]) is str and answer[param_id].strip() == "" and is_required: + while isinstance(answer[param_id], str) and answer[param_id].strip() == "" and is_required: log.error(f"'--{param_id}' is required") answer = questionary.unsafe_prompt([question], style=nf_core.utils.nfcore_question_style) @@ -546,14 +545,14 @@ def single_param_to_questionary(self, param_id, param_obj, answers=None, print_h # Start with the default from the param object if "default" in param_obj: # Boolean default is cast back to a string later - this just normalises all inputs - if param_obj["type"] == "boolean" and type(param_obj["default"]) is str: + if param_obj["type"] == "boolean" and isinstance(param_obj["default"], str): question["default"] = param_obj["default"].lower() == "true" else: question["default"] = param_obj["default"] # Overwrite default with parsed schema, includes --params-in etc if self.schema_obj is not None and param_id in self.schema_obj.input_params: - if param_obj["type"] == "boolean" and type(self.schema_obj.input_params[param_id]) is str: + if param_obj["type"] == "boolean" and isinstance(self.schema_obj.input_params[param_id], str): question["default"] = "true" == self.schema_obj.input_params[param_id].lower() else: question["default"] = self.schema_obj.input_params[param_id] diff --git a/nf_core/licences.py b/nf_core/licences.py index d686a56178..a8a35334dd 100644 --- a/nf_core/licences.py +++ b/nf_core/licences.py @@ -1,6 +1,5 @@ """Lists software licences for a given workflow.""" -from __future__ import print_function import json import logging diff --git a/nf_core/lint/actions_awsfulltest.py b/nf_core/lint/actions_awsfulltest.py index e8e1c951b1..66aa3f99bf 100644 --- a/nf_core/lint/actions_awsfulltest.py +++ b/nf_core/lint/actions_awsfulltest.py @@ -32,7 +32,7 @@ def actions_awsfulltest(self): fn = os.path.join(self.wf_path, ".github", "workflows", "awsfulltest.yml") if os.path.isfile(fn): try: - with open(fn, "r") as fh: + with open(fn) as fh: wf = yaml.safe_load(fh) except Exception as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} diff --git a/nf_core/lint/actions_awstest.py b/nf_core/lint/actions_awstest.py index ccdf0abf6a..7c55998944 100644 --- a/nf_core/lint/actions_awstest.py +++ b/nf_core/lint/actions_awstest.py @@ -27,7 +27,7 @@ def actions_awstest(self): return {"ignored": [f"'awstest.yml' workflow not found: `{fn}`"]} try: - with open(fn, "r") as fh: + with open(fn) as fh: wf = yaml.safe_load(fh) except Exception as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} diff --git a/nf_core/lint/actions_ci.py b/nf_core/lint/actions_ci.py index e669eceb8c..a3e7d54b66 100644 --- a/nf_core/lint/actions_ci.py +++ b/nf_core/lint/actions_ci.py @@ -1,5 +1,4 @@ import os -import re import yaml @@ -48,7 +47,7 @@ def actions_ci(self): return {"ignored": ["'.github/workflows/ci.yml' not found"]} try: - with open(fn, "r") as fh: + with open(fn) as fh: ciwf = yaml.safe_load(fh) except Exception as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} @@ -62,7 +61,7 @@ def actions_ci(self): if not ( pr_subtree is None or ("branches" in pr_subtree and "dev" in pr_subtree["branches"]) - or ("ignore_branches" in pr_subtree and not "dev" in pr_subtree["ignore_branches"]) + or ("ignore_branches" in pr_subtree and "dev" not in pr_subtree["ignore_branches"]) ): raise AssertionError() if "published" not in ciwf[True]["release"]["types"]: diff --git a/nf_core/lint/actions_schema_validation.py b/nf_core/lint/actions_schema_validation.py index 9d49b84c6b..fa4471d98c 100644 --- a/nf_core/lint/actions_schema_validation.py +++ b/nf_core/lint/actions_schema_validation.py @@ -36,7 +36,7 @@ def actions_schema_validation(self): # load workflow try: - with open(wf_path, "r") as fh: + with open(wf_path) as fh: wf_json = yaml.safe_load(fh) except Exception as e: failed.append(f"Could not parse yaml file: {wf}, {e}") diff --git a/nf_core/lint/files_unchanged.py b/nf_core/lint/files_unchanged.py index 2b64d62638..82b286fb44 100644 --- a/nf_core/lint/files_unchanged.py +++ b/nf_core/lint/files_unchanged.py @@ -187,16 +187,16 @@ def _tf(file_path): else: for f in files: try: - with open(_pf(f), "r") as fh: + with open(_pf(f)) as fh: pipeline_file = fh.read() - with open(_tf(f), "r") as fh: + with open(_tf(f)) as fh: template_file = fh.read() if template_file in pipeline_file: passed.append(f"`{f}` matches the template") else: if "files_unchanged" in self.fix: # Try to fix the problem by overwriting the pipeline file - with open(_tf(f), "r") as fh: + with open(_tf(f)) as fh: template_file = fh.read() with open(_pf(f), "w") as fh: fh.write(template_file) diff --git a/nf_core/lint/merge_markers.py b/nf_core/lint/merge_markers.py index f33a5095d8..8ef425234b 100644 --- a/nf_core/lint/merge_markers.py +++ b/nf_core/lint/merge_markers.py @@ -1,5 +1,4 @@ import fnmatch -import io import logging import os @@ -23,9 +22,9 @@ def merge_markers(self): ignore = [".git"] if os.path.isfile(os.path.join(self.wf_path, ".gitignore")): - with io.open(os.path.join(self.wf_path, ".gitignore"), "rt", encoding="latin1") as fh: - for l in fh: - ignore.append(os.path.basename(l.strip().rstrip("/"))) + with open(os.path.join(self.wf_path, ".gitignore"), encoding="latin1") as fh: + for line in fh: + ignore.append(os.path.basename(line.strip().rstrip("/"))) for root, dirs, files in os.walk(self.wf_path, topdown=True): # Ignore files for i_base in ignore: @@ -41,12 +40,12 @@ def merge_markers(self): if nf_core.utils.is_file_binary(os.path.join(root, fname)): continue try: - with io.open(os.path.join(root, fname), "rt", encoding="latin1") as fh: - for l in fh: - if ">>>>>>>" in l: - failed.append(f"Merge marker '>>>>>>>' in `{os.path.join(root, fname)}`: {l[:30]}") - if "<<<<<<<" in l: - failed.append(f"Merge marker '<<<<<<<' in `{os.path.join(root, fname)}`: {l[:30]}") + with open(os.path.join(root, fname), encoding="latin1") as fh: + for line in fh: + if ">>>>>>>" in line: + failed.append(f"Merge marker '>>>>>>>' in `{os.path.join(root, fname)}`: {line[:30]}") + if "<<<<<<<" in line: + failed.append(f"Merge marker '<<<<<<<' in `{os.path.join(root, fname)}`: {line[:30]}") except FileNotFoundError: log.debug(f"Could not open file {os.path.join(root, fname)} in merge_markers lint test") if len(failed) == 0: diff --git a/nf_core/lint/multiqc_config.py b/nf_core/lint/multiqc_config.py index cbbeae07a8..b2f1a89a1b 100644 --- a/nf_core/lint/multiqc_config.py +++ b/nf_core/lint/multiqc_config.py @@ -34,7 +34,7 @@ def multiqc_config(self) -> Dict[str, List[str]]: return {"ignored": ["'assets/multiqc_config.yml' not found"]} try: - with open(fn, "r") as fh: + with open(fn) as fh: mqc_yml = yaml.safe_load(fh) except Exception as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 24f1e5c12f..328bc03759 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -300,7 +300,7 @@ def nextflow_config(self): ] path = os.path.join(self.wf_path, "nextflow.config") i = 0 - with open(path, "r") as f: + with open(path) as f: for line in f: if lines[i] in line: i += 1 @@ -320,7 +320,7 @@ def nextflow_config(self): ) # Check for the availability of the "test" configuration profile by parsing nextflow.config - with open(os.path.join(self.wf_path, "nextflow.config"), "r") as f: + with open(os.path.join(self.wf_path, "nextflow.config")) as f: content = f.read() # Remove comments diff --git a/nf_core/lint/pipeline_todos.py b/nf_core/lint/pipeline_todos.py index 890e227fa1..ba6ec79150 100644 --- a/nf_core/lint/pipeline_todos.py +++ b/nf_core/lint/pipeline_todos.py @@ -1,5 +1,4 @@ import fnmatch -import io import logging import os @@ -41,9 +40,9 @@ def pipeline_todos(self, root_dir=None): ignore = [".git"] if os.path.isfile(os.path.join(root_dir, ".gitignore")): - with io.open(os.path.join(root_dir, ".gitignore"), "rt", encoding="latin1") as fh: - for l in fh: - ignore.append(os.path.basename(l.strip().rstrip("/"))) + with open(os.path.join(root_dir, ".gitignore"), encoding="latin1") as fh: + for line in fh: + ignore.append(os.path.basename(line.strip().rstrip("/"))) for root, dirs, files in os.walk(root_dir, topdown=True): # Ignore files for i_base in ignore: @@ -52,18 +51,18 @@ def pipeline_todos(self, root_dir=None): files[:] = [f for f in files if not fnmatch.fnmatch(os.path.join(root, f), i)] for fname in files: try: - with io.open(os.path.join(root, fname), "rt", encoding="latin1") as fh: - for l in fh: - if "TODO nf-core" in l: - l = ( - l.replace("", "") .replace("# TODO nf-core: ", "") .replace("// TODO nf-core: ", "") .replace("TODO nf-core: ", "") .strip() ) - warned.append(f"TODO string in `{fname}`: _{l}_") + warned.append(f"TODO string in `{fname}`: _{line}_") file_paths.append(os.path.join(root, fname)) except FileNotFoundError: log.debug(f"Could not open file {fname} in pipeline_todos lint test") diff --git a/nf_core/lint/readme.py b/nf_core/lint/readme.py index 55060442b1..cade9ca3ea 100644 --- a/nf_core/lint/readme.py +++ b/nf_core/lint/readme.py @@ -31,7 +31,7 @@ def readme(self): # Remove field that should be ignored according to the linting config ignore_configs = self.lint_config.get("readme", []) - with open(os.path.join(self.wf_path, "README.md"), "r") as fh: + with open(os.path.join(self.wf_path, "README.md")) as fh: content = fh.read() if "nextflow_badge" not in ignore_configs: diff --git a/nf_core/lint/system_exit.py b/nf_core/lint/system_exit.py index 56a526d97b..435a2452d0 100644 --- a/nf_core/lint/system_exit.py +++ b/nf_core/lint/system_exit.py @@ -25,9 +25,9 @@ def system_exit(self): for file in to_check: try: with file.open() as fh: - for i, l in enumerate(fh.readlines(), start=1): - if "System.exit" in l and not "System.exit(0)" in l: - warned.append(f"`System.exit` in {file.name}: _{l.strip()}_ [line {i}]") + for i, line in enumerate(fh.readlines(), start=1): + if "System.exit" in line and "System.exit(0)" not in line: + warned.append(f"`System.exit` in {file.name}: _{line.strip()}_ [line {i}]") except FileNotFoundError: log.debug(f"Could not open file {file.name} in system_exit lint test") diff --git a/nf_core/lint/template_strings.py b/nf_core/lint/template_strings.py index fb1f0f32e5..3467229362 100644 --- a/nf_core/lint/template_strings.py +++ b/nf_core/lint/template_strings.py @@ -1,4 +1,3 @@ -import io import mimetypes import re @@ -30,11 +29,11 @@ def template_strings(self): if encoding is not None or (ftype is not None and any([ftype.startswith(ft) for ft in binary_ftypes])): continue - with io.open(fn, "r", encoding="latin1") as fh: + with open(fn, encoding="latin1") as fh: lnum = 0 - for l in fh: + for line in fh: lnum += 1 - cc_matches = re.findall(r"[^$]{{[^:}]*}}", l) + cc_matches = re.findall(r"[^$]{{[^:}]*}}", line) if len(cc_matches) > 0: for cc_match in cc_matches: failed.append(f"Found a Jinja template string in `{fn}` L{lnum}: {cc_match}") diff --git a/nf_core/lint/version_consistency.py b/nf_core/lint/version_consistency.py index fa5b50de01..e396ca9e7a 100644 --- a/nf_core/lint/version_consistency.py +++ b/nf_core/lint/version_consistency.py @@ -31,7 +31,7 @@ def version_consistency(self): versions["manifest.version"] = self.nf_config.get("manifest.version", "").strip(" '\"") # Get version from the docker tag - if self.nf_config.get("process.container", "") and not ":" in self.nf_config.get("process.container", ""): + if self.nf_config.get("process.container", "") and ":" not in self.nf_config.get("process.container", ""): failed.append(f"Docker slug seems not to have a version tag: {self.nf_config.get('process.container', '')}") # Get config container tag (if set; one container per workflow) @@ -53,8 +53,9 @@ def version_consistency(self): # Check if they are consistent if len(set(versions.values())) != 1: failed.append( - "The versioning is not consistent between container, release tag " - "and config. Found {}".format(", ".join(["{} = {}".format(k, v) for k, v in versions.items()])) + "The versioning is not consistent between container, release tag " "and config. Found {}".format( + ", ".join([f"{k} = {v}" for k, v in versions.items()]) + ) ) passed.append("Version tags are numeric and consistent between container, release tag and config.") diff --git a/nf_core/list.py b/nf_core/list.py index 94d9d8e043..d0b59319a3 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -1,6 +1,5 @@ """Lists available nf-core pipelines and versions.""" -from __future__ import print_function import json import logging @@ -205,7 +204,7 @@ def print_summary(self): def sort_pulled_date(wf): try: return wf.local_wf.last_pull * -1 - except: + except Exception: return 0 filtered_workflows.sort(key=sort_pulled_date) diff --git a/nf_core/module-template/tests/main.nf.test b/nf_core/module-template/tests/main.nf.test index 5a2e6cdc63..e1b1dadf12 100644 --- a/nf_core/module-template/tests/main.nf.test +++ b/nf_core/module-template/tests/main.nf.test @@ -26,12 +26,12 @@ nextflow_process { """ // TODO nf-core: define inputs of the process here. Example: {% if has_meta %} - input = [ + input[0] = [ [ id:'test', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] {%- else %} - input = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + input[0] = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) {%- endif %} """ } @@ -58,12 +58,12 @@ nextflow_process { """ // TODO nf-core: define inputs of the process here. Example: {% if has_meta %} - input = [ + input[0] = [ [ id:'test', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] {%- else %} - input = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + input[0] = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) {%- endif %} """ } diff --git a/nf_core/modules/__init__.py b/nf_core/modules/__init__.py index 4b36f302bd..6be871ece8 100644 --- a/nf_core/modules/__init__.py +++ b/nf_core/modules/__init__.py @@ -6,7 +6,7 @@ from .list import ModuleList from .modules_json import ModulesJson from .modules_repo import ModulesRepo -from .modules_utils import ModuleException +from .modules_utils import ModuleExceptionError from .patch import ModulePatch from .remove import ModuleRemove from .update import ModuleUpdate diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 25259f1a16..b9003be974 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -4,8 +4,6 @@ """ -from __future__ import print_function - import logging import os import re @@ -24,9 +22,8 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.utils import custom_yaml_dumper +from nf_core.utils import custom_yaml_dumper, rich_force_colors from nf_core.utils import plural_s as _s -from nf_core.utils import rich_force_colors log = logging.getLogger(__name__) @@ -74,7 +71,7 @@ def bump_versions( # Verify that this is not a pipeline if not self.repo_type == "modules": - raise nf_core.modules.modules_utils.ModuleException( + raise nf_core.modules.modules_utils.ModuleExceptionError( "This command only works on the nf-core/modules repository, not on pipelines!" ) @@ -105,12 +102,14 @@ def bump_versions( if module: self.show_up_to_date = True if all_modules: - raise nf_core.modules.modules_utils.ModuleException( + raise nf_core.modules.modules_utils.ModuleExceptionError( "You cannot specify a tool and request all tools to be bumped." ) nfcore_modules = [m for m in nfcore_modules if m.component_name == module] if len(nfcore_modules) == 0: - raise nf_core.modules.modules_utils.ModuleException(f"Could not find the specified module: '{module}'") + raise nf_core.modules.modules_utils.ModuleExceptionError( + f"Could not find the specified module: '{module}'" + ) progress_bar = Progress( "[bold blue]{task.description}", @@ -146,10 +145,10 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: except FileNotFoundError: # try it in the main.nf instead try: - with open(module.main_nf, "r") as fh: - for l in fh: - if "bioconda::" in l: - bioconda_packages = [b for b in l.split() if "bioconda::" in b] + with open(module.main_nf) as fh: + for line in fh: + if "bioconda::" in line: + bioconda_packages = [b for b in line.split() if "bioconda::" in b] except FileNotFoundError: log.error( f"Neither `environment.yml` nor `main.nf` of {module.component_name} module could be read to get bioconada version of used tools." @@ -208,7 +207,7 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: ), ] - with open(module.main_nf, "r") as fh: + with open(module.main_nf) as fh: content = fh.read() # Go over file content of main.nf and find replacements @@ -241,7 +240,7 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: fh.write(content) # change version in environment.yml - with open(module.environment_yml, "r") as fh: + with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) re.sub(bioconda_packages[0], f"'bioconda::{bioconda_tool_name}={last_ver}'", env_yml["dependencies"]) with open(module.environment_yml, "w") as fh: @@ -266,7 +265,7 @@ def get_bioconda_version(self, module: NFCoreComponent) -> List[str]: # Check whether file exists and load it bioconda_packages = [] try: - with open(module.environment_yml, "r") as fh: + with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) bioconda_packages = env_yml.get("dependencies", []) except FileNotFoundError: @@ -289,7 +288,7 @@ def _print_results(self) -> None: for m in [self.up_to_date, self.updated, self.failed]: try: max_mod_name_len = max(len(m[2]), max_mod_name_len) - except: + except Exception: pass def format_result(module_updates: List[Tuple[str, str]], table: Table) -> Table: diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 68a38cc0cd..866e6312aa 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -6,7 +6,6 @@ nf-core modules lint """ -from __future__ import print_function import logging import os @@ -16,7 +15,7 @@ import nf_core.modules.modules_utils import nf_core.utils -from nf_core.components.lint import ComponentLint, LintException, LintResult +from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult from nf_core.lint_utils import console log = logging.getLogger(__name__) @@ -119,11 +118,11 @@ def lint( # Only lint the given module if module: if all_modules: - raise LintException("You cannot specify a tool and request all tools to be linted.") + raise LintExceptionError("You cannot specify a tool and request all tools to be linted.") local_modules = [] remote_modules = [m for m in self.all_remote_components if m.component_name == module] if len(remote_modules) == 0: - raise LintException(f"Could not find the specified module: '{module}'") + raise LintExceptionError(f"Could not find the specified module: '{module}'") else: local_modules = self.all_local_components remote_modules = self.all_remote_components diff --git a/nf_core/modules/lint/environment_yml.py b/nf_core/modules/lint/environment_yml.py index a052425539..c2fc9384e5 100644 --- a/nf_core/modules/lint/environment_yml.py +++ b/nf_core/modules/lint/environment_yml.py @@ -23,14 +23,14 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) env_yml = None # load the environment.yml file try: - with open(Path(module.component_dir, "environment.yml"), "r") as fh: + with open(Path(module.component_dir, "environment.yml")) as fh: env_yml = yaml.safe_load(fh) module.passed.append(("environment_yml_exists", "Module's `environment.yml` exists", module.environment_yml)) except FileNotFoundError: # check if the module's main.nf requires a conda environment - with open(Path(module.component_dir, "main.nf"), "r") as fh: + with open(Path(module.component_dir, "main.nf")) as fh: main_nf = fh.read() if 'conda "${moduleDir}/environment.yml"' in main_nf: module.failed.append( @@ -49,9 +49,7 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) if env_yml: valid_env_yml = False try: - with open( - Path(module_lint_object.modules_repo.local_repo_dir, "modules/environment-schema.json"), "r" - ) as fh: + with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/environment-schema.json")) as fh: schema = json.load(fh) validators.validate(instance=env_yml, schema=schema) module.passed.append( @@ -92,7 +90,7 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) yaml.dump(env_yml, fh, Dumper=custom_yaml_dumper()) # Check that the name in the environment.yml file matches the name in the meta.yml file - with open(Path(module.component_dir, "meta.yml"), "r") as fh: + with open(Path(module.component_dir, "meta.yml")) as fh: meta_yml = yaml.safe_load(fh) if env_yml["name"] == meta_yml["name"]: diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 56a9e99925..fd4d81f7f2 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -55,7 +55,7 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): if lines is None: try: # Check whether file exists and load it - with open(module.main_nf, "r") as fh: + with open(module.main_nf) as fh: lines = fh.readlines() module.passed.append(("main_nf_exists", "Module file exists", module.main_nf)) except FileNotFoundError: @@ -81,39 +81,39 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): script_lines = [] shell_lines = [] when_lines = [] - for l in lines: - if re.search(r"^\s*process\s*\w*\s*{", l) and state == "module": + for line in lines: + if re.search(r"^\s*process\s*\w*\s*{", line) and state == "module": state = "process" - if re.search(r"input\s*:", l) and state in ["process"]: + if re.search(r"input\s*:", line) and state in ["process"]: state = "input" continue - if re.search(r"output\s*:", l) and state in ["input", "process"]: + if re.search(r"output\s*:", line) and state in ["input", "process"]: state = "output" continue - if re.search(r"when\s*:", l) and state in ["input", "output", "process"]: + if re.search(r"when\s*:", line) and state in ["input", "output", "process"]: state = "when" continue - if re.search(r"script\s*:", l) and state in ["input", "output", "when", "process"]: + if re.search(r"script\s*:", line) and state in ["input", "output", "when", "process"]: state = "script" continue - if re.search(r"shell\s*:", l) and state in ["input", "output", "when", "process"]: + if re.search(r"shell\s*:", line) and state in ["input", "output", "when", "process"]: state = "shell" continue # Perform state-specific linting checks - if state == "process" and not _is_empty(l): - process_lines.append(l) - if state == "input" and not _is_empty(l): - inputs.extend(_parse_input(module, l)) - if state == "output" and not _is_empty(l): - outputs += _parse_output(module, l) + if state == "process" and not _is_empty(line): + process_lines.append(line) + if state == "input" and not _is_empty(line): + inputs.extend(_parse_input(module, line)) + if state == "output" and not _is_empty(line): + outputs += _parse_output(module, line) outputs = list(set(outputs)) # remove duplicate 'meta's - if state == "when" and not _is_empty(l): - when_lines.append(l) - if state == "script" and not _is_empty(l): - script_lines.append(l) - if state == "shell" and not _is_empty(l): - shell_lines.append(l) + if state == "when" and not _is_empty(line): + when_lines.append(line) + if state == "script" and not _is_empty(line): + script_lines.append(line) + if state == "shell" and not _is_empty(line): + shell_lines.append(line) # Check that we have required sections if not len(outputs): @@ -140,7 +140,7 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): # Check that shell uses a template if len(shell_lines): - if any("template" in l for l in shell_lines): + if any("template" in line for line in shell_lines): module.passed.append(("main_nf_shell_template", "`template` found in `shell` block", module.main_nf)) else: module.failed.append(("main_nf_shell_template", "No `template` found in `shell` block", module.main_nf)) @@ -249,21 +249,21 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): # Deprecated enable_conda for i, raw_line in enumerate(lines): url = None - l = raw_line.strip(" \n'\"}:") + line = raw_line.strip(" \n'\"}:") # Catch preceeding "container " - if l.startswith("container"): - l = l.replace("container", "").strip(" \n'\"}:") + if line.startswith("container"): + line = line.replace("container", "").strip(" \n'\"}:") - if _container_type(l) == "conda": - if "bioconda::" in l: - bioconda_packages = [b for b in l.split() if "bioconda::" in b] - match = re.search(r"params\.enable_conda", l) + if _container_type(line) == "conda": + if "bioconda::" in line: + bioconda_packages = [b for b in line.split() if "bioconda::" in b] + match = re.search(r"params\.enable_conda", line) if match is None: self.passed.append( ( "deprecated_enable_conda", - f"Deprecated parameter 'params.enable_conda' correctly not found in the conda definition", + "Deprecated parameter 'params.enable_conda' correctly not found in the conda definition", self.main_nf, ) ) @@ -271,35 +271,35 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): self.failed.append( ( "deprecated_enable_conda", - f"Found deprecated parameter 'params.enable_conda' in the conda definition", + "Found deprecated parameter 'params.enable_conda' in the conda definition", self.main_nf, ) ) - if _container_type(l) == "singularity": + if _container_type(line) == "singularity": # e.g. "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img -> v1.2.0_cv1 # e.g. "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0 -> 0.11.9--0 # Please god let's find a better way to do this than regex - match = re.search(r"(?:[:.])?([A-Za-z\d\-_.]+?)(?:\.img)?(?:\.sif)?$", l) + match = re.search(r"(?:[:.])?([A-Za-z\d\-_.]+?)(?:\.img)?(?:\.sif)?$", line) if match is not None: singularity_tag = match.group(1) self.passed.append(("singularity_tag", f"Found singularity tag: {singularity_tag}", self.main_nf)) else: self.failed.append(("singularity_tag", "Unable to parse singularity tag", self.main_nf)) singularity_tag = None - url = urlparse(l.split("'")[0]) + url = urlparse(line.split("'")[0]) - if _container_type(l) == "docker": + if _container_type(line) == "docker": # e.g. "quay.io/biocontainers/krona:2.7.1--pl526_5 -> 2.7.1--pl526_5 # e.g. "biocontainers/biocontainers:v1.2.0_cv1 -> v1.2.0_cv1 - match = re.search(r":([A-Za-z\d\-_.]+)$", l) + match = re.search(r":([A-Za-z\d\-_.]+)$", line) if match is not None: docker_tag = match.group(1) self.passed.append(("docker_tag", f"Found docker tag: {docker_tag}", self.main_nf)) else: self.failed.append(("docker_tag", "Unable to parse docker tag", self.main_nf)) docker_tag = None - if l.startswith(registry): - l_stripped = re.sub(r"\W+$", "", l) + if line.startswith(registry): + l_stripped = re.sub(r"\W+$", "", line) self.failed.append( ( "container_links", @@ -308,15 +308,15 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): ) ) else: - self.passed.append(("container_links", f"Container prefix is correct", self.main_nf)) + self.passed.append(("container_links", "Container prefix is correct", self.main_nf)) # Guess if container name is simple one (e.g. nfcore/ubuntu:20.04) # If so, add quay.io as default container prefix - if l.count("/") == 1 and l.count(":") == 1: - l = "/".join([registry, l]).replace("//", "/") - url = urlparse(l.split("'")[0]) + if line.count("/") == 1 and line.count(":") == 1: + line = "/".join([registry, line]).replace("//", "/") + url = urlparse(line.split("'")[0]) - if l.startswith("container") or _container_type(l) == "docker" or _container_type(l) == "singularity": + if line.startswith("container") or _container_type(line) == "docker" or _container_type(line) == "singularity": check_container_link_line(self, raw_line, registry) # Try to connect to container URLs @@ -348,7 +348,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): # Get bioconda packages from environment.yml try: - with open(Path(self.component_dir, "environment.yml"), "r") as fh: + with open(Path(self.component_dir, "environment.yml")) as fh: env_yml = yaml.safe_load(fh) if "dependencies" in env_yml: bioconda_packages = [x for x in env_yml["dependencies"] if isinstance(x, str) and "bioconda::" in x] @@ -424,7 +424,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): def check_process_labels(self, lines): correct_process_labels = ["process_single", "process_low", "process_medium", "process_high", "process_long"] - all_labels = [l.strip() for l in lines if l.lstrip().startswith("label ")] + all_labels = [line.strip() for line in lines if line.lstrip().startswith("label ")] bad_labels = [] good_labels = [] if len(all_labels) > 0: @@ -475,14 +475,14 @@ def check_process_labels(self, lines): def check_container_link_line(self, raw_line, registry): """Look for common problems in the container name / URL, for docker and singularity.""" - l = raw_line.strip(" \n'\"}:") + line = raw_line.strip(" \n'\"}:") # lint double quotes - if l.count('"') > 2: + if line.count('"') > 2: self.failed.append( ( "container_links", - f"Too many double quotes found when specifying container: {l.lstrip('container ')}", + f"Too many double quotes found when specifying container: {line.lstrip('container ')}", self.main_nf, ) ) @@ -490,7 +490,7 @@ def check_container_link_line(self, raw_line, registry): self.passed.append( ( "container_links", - f"Correct number of double quotes found when specifying container: {l.lstrip('container ')}", + f"Correct number of double quotes found when specifying container: {line.lstrip('container ')}", self.main_nf, ) ) @@ -524,7 +524,9 @@ def check_container_link_line(self, raw_line, registry): ) # lint more than one container in the same line - if ("https://containers" in l or "https://depot" in l) and ("biocontainers/" in l or l.startswith(registry)): + if ("https://containers" in line or "https://depot" in line) and ( + "biocontainers/" in line or line.startswith(registry) + ): self.warned.append( ( "container_links", @@ -576,7 +578,7 @@ def _parse_output(self, line): output = [] if "meta" in line: output.append("meta") - if not "emit:" in line: + if "emit:" not in line: self.failed.append(("missing_emit", f"Missing emit statement: {line.strip()}", self.main_nf)) else: output.append(line.split("emit:")[1].strip()) @@ -605,14 +607,14 @@ def _fix_module_version(self, current_version, latest_version, singularity_tag, # Get latest build build = _get_build(response) - with open(self.main_nf, "r") as source: + with open(self.main_nf) as source: lines = source.readlines() # Check if the new version + build exist and replace new_lines = [] for line in lines: - l = line.strip(" '\"") - build_type = _container_type(l) + line_stripped = line.strip(" '\"") + build_type = _container_type(line_stripped) if build_type == "conda": new_lines.append(re.sub(rf"{current_version}", f"{latest_version}", line)) elif build_type in ("singularity", "docker"): diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 7552c1ceae..551a978f4d 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -54,7 +54,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None meta_yaml = yaml.safe_load("".join(lines)) if meta_yaml is None: try: - with open(module.meta_yml, "r") as fh: + with open(module.meta_yml) as fh: meta_yaml = yaml.safe_load(fh) module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) except FileNotFoundError: @@ -64,7 +64,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None # Confirm that the meta.yml file is valid according to the JSON schema valid_meta_yml = False try: - with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/meta-schema.json"), "r") as fh: + with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/meta-schema.json")) as fh: schema = json.load(fh) validators.validate(instance=meta_yaml, schema=schema) module.passed.append(("meta_yml_valid", "Module `meta.yml` is valid", module.meta_yml)) diff --git a/nf_core/modules/lint/module_patch.py b/nf_core/modules/lint/module_patch.py index d52962eabb..29bf78a66b 100644 --- a/nf_core/modules/lint/module_patch.py +++ b/nf_core/modules/lint/module_patch.py @@ -40,7 +40,7 @@ def check_patch_valid(module, patch_path): Returns: (bool): False if any test failed, True otherwise """ - with open(patch_path, "r") as fh: + with open(patch_path) as fh: patch_lines = fh.readlines() # Check that the file contains a patch for at least one file @@ -170,8 +170,8 @@ def patch_reversible(module_lint_object, module, patch_path): ) except LookupError: # Patch failed. Save the patch file by moving to the install dir - module.failed.append((("patch_reversible", "Patch file is outdated or edited", patch_path))) + module.failed.append(("patch_reversible", "Patch file is outdated or edited", patch_path)) return False - module.passed.append((("patch_reversible", "Patch agrees with module files", patch_path))) + module.passed.append(("patch_reversible", "Patch agrees with module files", patch_path)) return True diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index 87033e3f49..520f8cf0a2 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -44,7 +44,7 @@ def module_tests(_, module: NFCoreComponent): if module.nftest_main_nf.is_file(): # Check if main.nf.test.snap file exists, if 'snap(' is inside main.nf.test - with open(module.nftest_main_nf, "r") as fh: + with open(module.nftest_main_nf) as fh: if "snapshot(" in fh.read(): snap_file = module.nftest_testdir / "main.nf.test.snap" if snap_file.is_file(): @@ -52,7 +52,7 @@ def module_tests(_, module: NFCoreComponent): ("test_snapshot_exists", "snapshot file `main.nf.test.snap` exists", snap_file) ) # Validate no empty files - with open(snap_file, "r") as snap_fh: + with open(snap_file) as snap_fh: try: snap_content = json.load(snap_fh) for test_name in snap_content.keys(): @@ -145,7 +145,7 @@ def module_tests(_, module: NFCoreComponent): pytest_yml_path = module.base_dir / "tests" / "config" / "pytest_modules.yml" if pytest_yml_path.is_file() and not is_pytest: try: - with open(pytest_yml_path, "r") as fh: + with open(pytest_yml_path) as fh: pytest_yml = yaml.safe_load(fh) if module.component_name in pytest_yml.keys(): module.failed.append( @@ -165,7 +165,7 @@ def module_tests(_, module: NFCoreComponent): if module.tags_yml.is_file(): # Check that tags.yml exists and it has the correct entry module.passed.append(("test_tags_yml_exists", "file `tags.yml` exists", module.tags_yml)) - with open(module.tags_yml, "r") as fh: + with open(module.tags_yml) as fh: tags_yml = yaml.safe_load(fh) if module.component_name in tags_yml.keys(): module.passed.append(("test_tags_yml", "correct entry in tags.yml", module.tags_yml)) diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index efce3868e5..a97229ff62 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -74,9 +74,9 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d temp_path = Path(to_dir, file) curr_path = Path(from_dir, file) if temp_path.exists() and curr_path.exists() and temp_path.is_file(): - with open(temp_path, "r") as fh: + with open(temp_path) as fh: new_lines = fh.readlines() - with open(curr_path, "r") as fh: + with open(curr_path) as fh: old_lines = fh.readlines() if new_lines == old_lines: @@ -93,7 +93,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d diffs[file] = (ModulesDiffer.DiffEnum.CHANGED, diff) elif temp_path.exists(): - with open(temp_path, "r") as fh: + with open(temp_path) as fh: new_lines = fh.readlines() # The file was created # Show file against /dev/null @@ -108,7 +108,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d elif curr_path.exists(): # The file was removed # Show file against /dev/null - with open(curr_path, "r") as fh: + with open(curr_path) as fh: old_lines = fh.readlines() diff = difflib.unified_diff( old_lines, @@ -279,7 +279,7 @@ def per_file_patch(patch_fn): dict[str, str]: A dictionary indexed by the filenames with the file patches as values """ - with open(patch_fn, "r") as fh: + with open(patch_fn) as fh: lines = fh.readlines() patches = {} @@ -447,7 +447,7 @@ def try_apply_patch(module, repo_path, patch_path, module_dir, reverse=False): log.debug(f"Applying patch to {file}") fn = Path(file).relative_to(module_relpath) file_path = module_dir / fn - with open(file_path, "r") as fh: + with open(file_path) as fh: file_lines = fh.readlines() patched_new_lines = ModulesDiffer.try_apply_single_patch(file_lines, patch, reverse=reverse) new_files[str(fn)] = patched_new_lines diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 32eb8736d6..f68c27b2d8 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -637,7 +637,7 @@ def load(self): UserWarning: If the modules.json file is not found """ try: - with open(self.modules_json_path, "r") as fh: + with open(self.modules_json_path) as fh: try: self.modules_json = json.load(fh) except json.JSONDecodeError as e: diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 152ed7b0c0..204c20fd71 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -1,8 +1,6 @@ -import filecmp import logging import os import shutil -from pathlib import Path import git import rich diff --git a/nf_core/modules/modules_utils.py b/nf_core/modules/modules_utils.py index 3ae01e9eef..ca8993483b 100644 --- a/nf_core/modules/modules_utils.py +++ b/nf_core/modules/modules_utils.py @@ -9,7 +9,7 @@ log = logging.getLogger(__name__) -class ModuleException(Exception): +class ModuleExceptionError(Exception): """Exception raised when there was an error with module commands""" pass @@ -69,12 +69,12 @@ def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], Lis if os.path.exists(nfcore_modules_dir): for m in sorted([m for m in os.listdir(nfcore_modules_dir) if not m == "lib"]): if not os.path.isdir(os.path.join(nfcore_modules_dir, m)): - raise ModuleException( + raise ModuleExceptionError( f"File found in '{nfcore_modules_dir}': '{m}'! This directory should only contain module directories." ) m_content = os.listdir(os.path.join(nfcore_modules_dir, m)) # Not a module, but contains sub-modules - if not "main.nf" in m_content: + if "main.nf" not in m_content: for tool in m_content: nfcore_modules_names.append(os.path.join(m, tool)) else: diff --git a/nf_core/params_file.py b/nf_core/params_file.py index 5c50c53fb9..51986821b5 100644 --- a/nf_core/params_file.py +++ b/nf_core/params_file.py @@ -1,6 +1,5 @@ """ Create a YAML parameter file """ -from __future__ import print_function import json import logging @@ -9,8 +8,6 @@ from typing import Literal, Optional import questionary -import rich -import rich.columns import nf_core.list import nf_core.utils @@ -196,7 +193,7 @@ def format_param(self, name, properties, required_properties=(), show_hidden=Fal description = properties.get("description", "") self.schema_obj.get_schema_defaults() default = properties.get("default") - typ = properties.get("type") + type = properties.get("type") required = name in required_properties out += _print_wrapped(name, "-", mode="both") @@ -204,8 +201,11 @@ def format_param(self, name, properties, required_properties=(), show_hidden=Fal if description: out += _print_wrapped(description + "\n", mode="none", indent=4) - if typ: - out += _print_wrapped(f"Type: {typ}", mode="none", indent=4) + if type: + out += _print_wrapped(f"Type: {type}", mode="none", indent=4) + + if required: + out += _print_wrapped("Required", mode="none", indent=4) out += _print_wrapped("\n", mode="end") out += f"# {name} = {json.dumps(default)}\n" diff --git a/nf_core/pipeline-template/.editorconfig b/nf_core/pipeline-template/.editorconfig index b6b3190776..9b990088ab 100644 --- a/nf_core/pipeline-template/.editorconfig +++ b/nf_core/pipeline-template/.editorconfig @@ -22,3 +22,11 @@ indent_size = unset [/assets/email*] indent_size = unset + +# ignore Readme +[README.md] +indent_style = unset + +# ignore python +[*.{py}] +indent_style = unset diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index 2f83a0962c..4c9fd69fcc 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -31,7 +31,7 @@ jobs: } profiles: test_full - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: Tower debug log file path: | diff --git a/nf_core/pipeline-template/.github/workflows/awstest.yml b/nf_core/pipeline-template/.github/workflows/awstest.yml index 9a0bf4afbc..25726aa1c9 100644 --- a/nf_core/pipeline-template/.github/workflows/awstest.yml +++ b/nf_core/pipeline-template/.github/workflows/awstest.yml @@ -25,7 +25,7 @@ jobs: } profiles: test - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: Tower debug log file path: | diff --git a/nf_core/pipeline-template/.github/workflows/branch.yml b/nf_core/pipeline-template/.github/workflows/branch.yml index 8edfa540c9..057016e4be 100644 --- a/nf_core/pipeline-template/.github/workflows/branch.yml +++ b/nf_core/pipeline-template/.github/workflows/branch.yml @@ -19,7 +19,7 @@ jobs: # NOTE - this doesn't currently work if the PR is coming from a fork, due to limitations in GitHub actions secrets - name: Post PR comment if: failure() - uses: mshick/add-pr-comment@v1 + uses: mshick/add-pr-comment@v2 with: message: | ## This PR is against the `master` branch :x: diff --git a/nf_core/pipeline-template/.github/workflows/clean-up.yml b/nf_core/pipeline-template/.github/workflows/clean-up.yml index 427aad5087..8feb3fb017 100644 --- a/nf_core/pipeline-template/.github/workflows/clean-up.yml +++ b/nf_core/pipeline-template/.github/workflows/clean-up.yml @@ -10,7 +10,7 @@ jobs: issues: write pull-requests: write steps: - - uses: actions/stale@v7 + - uses: actions/stale@v9 with: stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days." stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful." diff --git a/nf_core/pipeline-template/.github/workflows/fix-linting.yml b/nf_core/pipeline-template/.github/workflows/fix-linting.yml index 31e8cd2b36..d9986bd30f 100644 --- a/nf_core/pipeline-template/.github/workflows/fix-linting.yml +++ b/nf_core/pipeline-template/.github/workflows/fix-linting.yml @@ -24,32 +24,25 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} - - uses: actions/setup-node@v4 - - - name: Install Prettier - run: npm install -g prettier @prettier/plugin-php + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: 3.11 + cache: "pip" - # Check that we actually need to fix something - - name: Run 'prettier --check' - id: prettier_status - run: | - if prettier --check ${GITHUB_WORKSPACE}; then - echo "result=pass" >> $GITHUB_OUTPUT - else - echo "result=fail" >> $GITHUB_OUTPUT - fi + - name: Install pre-commit + run: pip install pre-commit - - name: Run 'prettier --write' - if: steps.prettier_status.outputs.result == 'fail' - run: prettier --write ${GITHUB_WORKSPACE} + - name: Run pre-commit + run: pre-commit run --all-files || echo "status=fail" >> $GITHUB_ENV - name: Commit & push changes - if: steps.prettier_status.outputs.result == 'fail' + if: env.status == 'fail' run: | git config user.email "core@nf-co.re" git config user.name "nf-core-bot" git config push.default upstream git add . git status - git commit -m "[automated] Fix linting with Prettier" + git commit -m "[automated] Fix linting with pre-commit" git push {%- endraw %} diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index 94aa5278be..a267f1ec14 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -11,61 +11,22 @@ on: types: [published] jobs: - EditorConfig: + pre-commit: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 - - - name: Install editorconfig-checker - run: npm install -g editorconfig-checker - - - name: Run ECLint check - run: editorconfig-checker -exclude README.md $(find .* -type f | grep -v '.git\|.py\|.md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile') - - Prettier: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-node@v4 - - - name: Install Prettier - run: npm install -g prettier - - - name: Run Prettier --check - run: prettier --check ${GITHUB_WORKSPACE} - - PythonBlack: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Check code lints with Black - uses: psf/black@stable - - # If the above check failed, post a comment on the PR explaining the failure - - name: Post PR comment - if: failure() - uses: mshick/add-pr-comment@v1 + - name: Set up Python 3.11 + uses: actions/setup-python@v5 with: - message: | - ## Python linting (`black`) is failing - - To keep the code consistent with lots of contributors, we run automated code consistency checks. - To fix this CI test, please run: - - * Install [`black`](https://black.readthedocs.io/en/stable/): `pip install black` - * Fix formatting errors in your pipeline: `black .` - - Once you push these changes the test should pass, and you can hide this comment :+1: + python-version: 3.11 + cache: "pip" - We highly recommend setting up Black in your code editor so that this formatting is done automatically on save. Ask about it on Slack for help! + - name: Install pre-commit + run: pip install pre-commit - Thanks again for your contribution! - repo-token: ${{ secrets.GITHUB_TOKEN }} - allow-repeats: false + - name: Run pre-commit + run: pre-commit run --all-files nf-core: runs-on: ubuntu-latest @@ -76,7 +37,7 @@ jobs: - name: Install Nextflow uses: nf-core/setup-nextflow@v1 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: "3.11" architecture: "x64" @@ -99,7 +60,7 @@ jobs: - name: Upload linting log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: linting-logs path: | diff --git a/nf_core/pipeline-template/.github/workflows/linting_comment.yml b/nf_core/pipeline-template/.github/workflows/linting_comment.yml index 09f8c423e5..e5528b29cf 100644 --- a/nf_core/pipeline-template/.github/workflows/linting_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/linting_comment.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download lint results - uses: dawidd6/action-download-artifact@v2 + uses: dawidd6/action-download-artifact@v3 with: workflow: linting.yml workflow_conclusion: completed diff --git a/nf_core/pipeline-template/.github/workflows/release-announcements.yml b/nf_core/pipeline-template/.github/workflows/release-announcements.yml index ad497db4e1..1dd48b123f 100644 --- a/nf_core/pipeline-template/.github/workflows/release-announcements.yml +++ b/nf_core/pipeline-template/.github/workflows/release-announcements.yml @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install dependencies @@ -56,7 +56,7 @@ jobs: bsky-post: runs-on: ubuntu-latest steps: - - uses: zentered/bluesky-post-action@v0.0.2 + - uses: zentered/bluesky-post-action@v0.1.0 with: post: | Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}! diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index acf7269536..363d5b1d42 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -7,6 +7,7 @@ tasks: - name: unset JAVA_TOOL_OPTIONS command: | unset JAVA_TOOL_OPTIONS + vscode: extensions: # based on nf-core.nf-core-extensionpack - codezombiech.gitignore # Language support for .gitignore files diff --git a/nf_core/pipeline-template/.pre-commit-config.yaml b/nf_core/pipeline-template/.pre-commit-config.yaml index 0c31cdb99f..984321ff26 100644 --- a/nf_core/pipeline-template/.pre-commit-config.yaml +++ b/nf_core/pipeline-template/.pre-commit-config.yaml @@ -1,5 +1,10 @@ repos: - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v2.7.1" + rev: "v3.1.1" hooks: - id: prettier + - repo: https://github.com/editorconfig-checker/editorconfig-checker.python + rev: "2.7.3" + hooks: + - id: editorconfig-checker + alias: ec diff --git a/nf_core/pipeline-template/pyproject.toml b/nf_core/pipeline-template/pyproject.toml index 0d62beb6f9..984c091091 100644 --- a/nf_core/pipeline-template/pyproject.toml +++ b/nf_core/pipeline-template/pyproject.toml @@ -1,10 +1,13 @@ -# Config file for Python. Mostly used to configure linting of bin/check_samplesheet.py with Black. +# Config file for Python. Mostly used to configure linting of bin/check_samplesheet.py with Ruff. # Should be kept the same as nf-core/tools to avoid fighting with template synchronisation. -[tool.black] +[tool.ruff] line-length = 120 -target_version = ["py37", "py38", "py39", "py310"] +target-version = "py38" +select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"] +cache-dir = "~/.cache/ruff" -[tool.isort] -profile = "black" -known_first_party = ["nf_core"] -multi_line_output = 3 +[tool.ruff.isort] +known-first-party = ["nf_core"] + +[tool.ruff.per-file-ignores] +"__init__.py" = ["E402", "F401"] diff --git a/nf_core/refgenie.py b/nf_core/refgenie.py index 6f09b75532..de9201bcd6 100644 --- a/nf_core/refgenie.py +++ b/nf_core/refgenie.py @@ -2,7 +2,6 @@ Update a nextflow.config file with refgenie genomes """ -import json import logging import os import re @@ -86,7 +85,7 @@ def _update_nextflow_home_config(refgenie_genomes_config_file, nxf_home): if os.path.exists(nxf_home_config): # look for include statement in config has_include_statement = False - with open(nxf_home_config, "r") as fh: + with open(nxf_home_config) as fh: lines = fh.readlines() for line in lines: if re.match(rf"\s*includeConfig\s*'{os.path.abspath(refgenie_genomes_config_file)}'", line): diff --git a/nf_core/schema.py b/nf_core/schema.py index 7e4726f189..b0c5dc04b6 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -1,6 +1,5 @@ """ Code to deal with pipeline JSON Schema """ -from __future__ import print_function import copy import json @@ -107,7 +106,7 @@ def load_lint_schema(self): def load_schema(self): """Load a pipeline schema from a file""" - with open(self.schema_filename, "r") as fh: + with open(self.schema_filename) as fh: self.schema = json.load(fh) self.schema_defaults = {} self.schema_params = {} @@ -189,7 +188,7 @@ def load_input_params(self, params_path): """ # First, try to load as JSON try: - with open(params_path, "r") as fh: + with open(params_path) as fh: try: params = json.load(fh) except json.JSONDecodeError as e: @@ -200,7 +199,7 @@ def load_input_params(self, params_path): log.debug(f"Could not load input params as JSON: {json_e}") # This failed, try to load as YAML try: - with open(params_path, "r") as fh: + with open(params_path) as fh: params = yaml.safe_load(fh) self.input_params.update(params) log.debug(f"Loaded YAML input params: {params_path}") @@ -317,7 +316,7 @@ def validate_config_default_parameter(self, param, schema_param, config_default) param ] = f"String should not be set to `{config_default}`" if schema_param["type"] == "boolean": - if not str(config_default) in ["false", "true"]: + if str(config_default) not in ["false", "true"]: self.invalid_nextflow_config_default_parameters[ param ] = f"Booleans should only be true or false, not `{config_default}`" @@ -757,9 +756,7 @@ def prompt_remove_schema_notfound_config(self, p_key): if self.no_prompts or self.schema_from_scratch: return True if Confirm.ask( - ":question: Unrecognised [bold]'params.{}'[/] found in the schema but not in the pipeline config! [yellow]Remove it?".format( - p_key - ) + f":question: Unrecognised [bold]'params.{p_key}'[/] found in the schema but not in the pipeline config! [yellow]Remove it?" ): return True return False diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index ffba41f9da..3a87190422 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -6,7 +6,6 @@ nf-core subworkflows lint """ -from __future__ import print_function import logging import os @@ -16,7 +15,7 @@ import nf_core.modules.modules_utils import nf_core.utils -from nf_core.components.lint import ComponentLint, LintException, LintResult +from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult from nf_core.lint_utils import console log = logging.getLogger(__name__) @@ -114,11 +113,11 @@ def lint( # Only lint the given module if subworkflow: if all_subworkflows: - raise LintException("You cannot specify a tool and request all tools to be linted.") + raise LintExceptionError("You cannot specify a tool and request all tools to be linted.") local_subworkflows = [] remote_subworkflows = [s for s in self.all_remote_components if s.component_name == subworkflow] if len(remote_subworkflows) == 0: - raise LintException(f"Could not find the specified subworkflow: '{subworkflow}'") + raise LintExceptionError(f"Could not find the specified subworkflow: '{subworkflow}'") else: local_subworkflows = self.all_local_components remote_subworkflows = self.all_remote_components diff --git a/nf_core/subworkflows/lint/main_nf.py b/nf_core/subworkflows/lint/main_nf.py index c7ce77490d..f59e1e4279 100644 --- a/nf_core/subworkflows/lint/main_nf.py +++ b/nf_core/subworkflows/lint/main_nf.py @@ -32,7 +32,7 @@ def main_nf(_, subworkflow): if lines is None: try: # Check whether file exists and load it - with open(subworkflow.main_nf, "r") as fh: + with open(subworkflow.main_nf) as fh: lines = fh.readlines() subworkflow.passed.append(("main_nf_exists", "Subworkflow file exists", subworkflow.main_nf)) except FileNotFoundError: @@ -45,30 +45,30 @@ def main_nf(_, subworkflow): subworkflow_lines = [] workflow_lines = [] main_lines = [] - for l in lines: - if re.search(r"^\s*workflow\s*\w*\s*{", l) and state == "subworkflow": + for line in lines: + if re.search(r"^\s*workflow\s*\w*\s*{", line) and state == "subworkflow": state = "workflow" - if re.search(r"take\s*:", l) and state in ["workflow"]: + if re.search(r"take\s*:", line) and state in ["workflow"]: state = "take" continue - if re.search(r"main\s*:", l) and state in ["take", "workflow"]: + if re.search(r"main\s*:", line) and state in ["take", "workflow"]: state = "main" continue - if re.search(r"emit\s*:", l) and state in ["take", "main", "workflow"]: + if re.search(r"emit\s*:", line) and state in ["take", "main", "workflow"]: state = "emit" continue # Perform state-specific linting checks - if state == "subworkflow" and not _is_empty(l): - subworkflow_lines.append(l) - if state == "workflow" and not _is_empty(l): - workflow_lines.append(l) - if state == "take" and not _is_empty(l): - inputs.extend(_parse_input(subworkflow, l)) - if state == "emit" and not _is_empty(l): - outputs.extend(_parse_output(subworkflow, l)) - if state == "main" and not _is_empty(l): - main_lines.append(l) + if state == "subworkflow" and not _is_empty(line): + subworkflow_lines.append(line) + if state == "workflow" and not _is_empty(line): + workflow_lines.append(line) + if state == "take" and not _is_empty(line): + inputs.extend(_parse_input(subworkflow, line)) + if state == "emit" and not _is_empty(line): + outputs.extend(_parse_output(subworkflow, line)) + if state == "main" and not _is_empty(line): + main_lines.append(line) # Check that we have required sections if not len(outputs): @@ -177,9 +177,9 @@ def check_subworkflow_section(self, lines): ) includes = [] - for l in lines: - if l.strip().startswith("include"): - component_name = l.split("{")[1].split("}")[0].strip() + for line in lines: + if line.strip().startswith("include"): + component_name = line.split("{")[1].split("}")[0].strip() if " as " in component_name: component_name = component_name.split(" as ")[1].strip() includes.append(component_name) diff --git a/nf_core/subworkflows/lint/meta_yml.py b/nf_core/subworkflows/lint/meta_yml.py index 4944b26188..24e75eddbf 100644 --- a/nf_core/subworkflows/lint/meta_yml.py +++ b/nf_core/subworkflows/lint/meta_yml.py @@ -26,7 +26,7 @@ def meta_yml(subworkflow_lint_object, subworkflow): """ # Read the meta.yml file try: - with open(subworkflow.meta_yml, "r") as fh: + with open(subworkflow.meta_yml) as fh: meta_yaml = yaml.safe_load(fh) subworkflow.passed.append(("meta_yml_exists", "Subworkflow `meta.yml` exists", subworkflow.meta_yml)) except FileNotFoundError: @@ -36,9 +36,7 @@ def meta_yml(subworkflow_lint_object, subworkflow): # Confirm that the meta.yml file is valid according to the JSON schema valid_meta_yml = True try: - with open( - Path(subworkflow_lint_object.modules_repo.local_repo_dir, "subworkflows/yaml-schema.json"), "r" - ) as fh: + with open(Path(subworkflow_lint_object.modules_repo.local_repo_dir, "subworkflows/yaml-schema.json")) as fh: schema = json.load(fh) jsonschema.validators.validate(instance=meta_yaml, schema=schema) subworkflow.passed.append(("meta_yml_valid", "Subworkflow `meta.yml` is valid", subworkflow.meta_yml)) @@ -118,7 +116,7 @@ def meta_yml(subworkflow_lint_object, subworkflow): subworkflow.failed.append( ( "meta_modules_deprecated", - f"Deprecated section 'modules' found in `meta.yml`, use 'components' instead", + "Deprecated section 'modules' found in `meta.yml`, use 'components' instead", subworkflow.meta_yml, ) ) @@ -126,7 +124,7 @@ def meta_yml(subworkflow_lint_object, subworkflow): subworkflow.passed.append( ( "meta_modules_deprecated", - f"Deprecated section 'modules' not found in `meta.yml`", + "Deprecated section 'modules' not found in `meta.yml`", subworkflow.meta_yml, ) ) diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index 1ebced6d42..f7284320ea 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -52,14 +52,14 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): ) if subworkflow.nftest_main_nf.is_file(): - with open(subworkflow.nftest_main_nf, "r") as fh: + with open(subworkflow.nftest_main_nf) as fh: # Check if main.nf.test.snap file exists, if 'snap(' is inside main.nf.test if "snapshot(" in fh.read(): snap_file = subworkflow.nftest_testdir / "main.nf.test.snap" if snap_file.is_file(): subworkflow.passed.append(("test_snapshot_exists", "test `main.nf.test.snap` exists", snap_file)) # Validate no empty files - with open(snap_file, "r") as snap_fh: + with open(snap_file) as snap_fh: try: snap_content = json.load(snap_fh) for test_name in snap_content.keys(): @@ -158,7 +158,7 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): pytest_yml_path = subworkflow.base_dir / "tests" / "config" / "pytest_modules.yml" if pytest_yml_path.is_file() and not is_pytest: try: - with open(pytest_yml_path, "r") as fh: + with open(pytest_yml_path) as fh: pytest_yml = yaml.safe_load(fh) if "subworkflows/" + subworkflow.component_name in pytest_yml.keys(): subworkflow.failed.append( @@ -178,7 +178,7 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): if subworkflow.tags_yml.is_file(): # Check tags.yml exists and it has the correct entry subworkflow.passed.append(("test_tags_yml_exists", "file `tags.yml` exists", subworkflow.tags_yml)) - with open(subworkflow.tags_yml, "r") as fh: + with open(subworkflow.tags_yml) as fh: tags_yml = yaml.safe_load(fh) if "subworkflows/" + subworkflow.component_name in tags_yml.keys(): subworkflow.passed.append(("test_tags_yml", "correct entry in tags.yml", subworkflow.tags_yml)) diff --git a/nf_core/sync.py b/nf_core/sync.py index 5402a6121d..995baeacd2 100644 --- a/nf_core/sync.py +++ b/nf_core/sync.py @@ -23,13 +23,13 @@ log = logging.getLogger(__name__) -class SyncException(Exception): +class SyncExceptionError(Exception): """Exception raised when there was an error with TEMPLATE branch synchronisation""" pass -class PullRequestException(Exception): +class PullRequestExceptionError(Exception): """Exception raised when there was an error creating a Pull-Request on GitHub.com""" pass @@ -96,7 +96,7 @@ def __init__( default=False, ).unsafe_ask() if overwrite_template or "template" not in self.config_yml: - with open(template_yaml_path, "r") as f: + with open(template_yaml_path) as f: self.config_yml["template"] = yaml.safe_load(f) with open(self.config_yml_path, "w") as fh: yaml.safe_dump(self.config_yml, fh) @@ -138,20 +138,20 @@ def sync(self): try: # Check that we have an API auth token if os.environ.get("GITHUB_AUTH_TOKEN", "") == "": - raise PullRequestException("GITHUB_AUTH_TOKEN not set!") + raise PullRequestExceptionError("GITHUB_AUTH_TOKEN not set!") # Check that we know the github username and repo name if self.gh_username is None and self.gh_repo is None: - raise PullRequestException("Could not find GitHub username and repo name") + raise PullRequestExceptionError("Could not find GitHub username and repo name") self.push_template_branch() self.create_merge_base_branch() self.push_merge_branch() self.make_pull_request() self.close_open_template_merge_prs() - except PullRequestException as e: + except PullRequestExceptionError as e: self.reset_target_dir() - raise PullRequestException(e) + raise PullRequestExceptionError(e) self.reset_target_dir() @@ -170,7 +170,7 @@ def inspect_sync_dir(self): try: self.repo = git.Repo(self.pipeline_dir) except InvalidGitRepositoryError: - raise SyncException(f"'{self.pipeline_dir}' does not appear to be a git repository") + raise SyncExceptionError(f"'{self.pipeline_dir}' does not appear to be a git repository") # get current branch so we can switch back later self.original_branch = self.repo.active_branch.name @@ -178,7 +178,7 @@ def inspect_sync_dir(self): # Check to see if there are uncommitted changes on current branch if self.repo.is_dirty(untracked_files=True): - raise SyncException( + raise SyncExceptionError( "Uncommitted changes found in pipeline directory!\nPlease commit these before running nf-core sync" ) @@ -192,7 +192,7 @@ def get_wf_config(self): log.info(f"Checking out workflow branch '{self.from_branch}'") self.repo.git.checkout(self.from_branch) except GitCommandError: - raise SyncException(f"Branch `{self.from_branch}` not found!") + raise SyncExceptionError(f"Branch `{self.from_branch}` not found!") # If not specified, get the name of the active branch if not self.from_branch: @@ -208,7 +208,7 @@ def get_wf_config(self): # Check that we have the required variables for rvar in self.required_config_vars: if rvar not in self.wf_config: - raise SyncException(f"Workflow config variable `{rvar}` not found!") + raise SyncExceptionError(f"Workflow config variable `{rvar}` not found!") def checkout_template_branch(self): """ @@ -223,7 +223,7 @@ def checkout_template_branch(self): try: self.repo.git.checkout("TEMPLATE") except GitCommandError: - raise SyncException("Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'") + raise SyncExceptionError("Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'") def delete_template_branch_files(self): """ @@ -242,7 +242,7 @@ def delete_template_branch_files(self): elif os.path.isdir(file_path): shutil.rmtree(file_path) except Exception as e: - raise SyncException(e) + raise SyncExceptionError(e) def make_template_pipeline(self): """ @@ -272,7 +272,7 @@ def make_template_pipeline(self): except Exception as err: # Reset to where you were to prevent git getting messed up. self.repo.git.reset("--hard") - raise SyncException(f"Failed to rebuild pipeline from template with error:\n{err}") + raise SyncExceptionError(f"Failed to rebuild pipeline from template with error:\n{err}") def commit_template_changes(self): """If we have any changes with the new template files, make a git commit""" @@ -287,7 +287,7 @@ def commit_template_changes(self): self.made_changes = True log.info("Committed changes to 'TEMPLATE' branch") except Exception as e: - raise SyncException(f"Could not commit changes to TEMPLATE:\n{e}") + raise SyncExceptionError(f"Could not commit changes to TEMPLATE:\n{e}") return True def push_template_branch(self): @@ -299,7 +299,7 @@ def push_template_branch(self): try: self.repo.git.push() except GitCommandError as e: - raise PullRequestException(f"Could not push TEMPLATE branch:\n {e}") + raise PullRequestExceptionError(f"Could not push TEMPLATE branch:\n {e}") def create_merge_base_branch(self): """Create a new branch from the updated TEMPLATE branch @@ -326,7 +326,7 @@ def create_merge_base_branch(self): try: self.repo.create_head(self.merge_branch) except GitCommandError as e: - raise SyncException(f"Could not create new branch '{self.merge_branch}'\n{e}") + raise SyncExceptionError(f"Could not create new branch '{self.merge_branch}'\n{e}") def push_merge_branch(self): """Push the newly created merge branch to the remote repository""" @@ -335,7 +335,7 @@ def push_merge_branch(self): origin = self.repo.remote() origin.push(self.merge_branch) except GitCommandError as e: - raise PullRequestException(f"Could not push branch '{self.merge_branch}':\n {e}") + raise PullRequestExceptionError(f"Could not push branch '{self.merge_branch}':\n {e}") def make_pull_request(self): """Create a pull request to a base branch (default: dev), @@ -374,7 +374,7 @@ def make_pull_request(self): ) except Exception as e: stderr.print_exception() - raise PullRequestException(f"Something went badly wrong - {e}") + raise PullRequestExceptionError(f"Something went badly wrong - {e}") else: self.gh_pr_returned_data = r.json() self.pr_url = self.gh_pr_returned_data["html_url"] @@ -395,7 +395,7 @@ def close_open_template_merge_prs(self): try: list_prs_json = json.loads(list_prs_request.content) list_prs_pp = json.dumps(list_prs_json, indent=4) - except: + except Exception: list_prs_json = list_prs_request.content list_prs_pp = list_prs_request.content @@ -438,7 +438,7 @@ def close_open_pr(self, pr): try: pr_request_json = json.loads(pr_request.content) pr_request_pp = json.dumps(pr_request_json, indent=4) - except: + except Exception: pr_request_json = pr_request.content pr_request_pp = pr_request.content @@ -462,4 +462,4 @@ def reset_target_dir(self): try: self.repo.git.checkout(self.original_branch) except GitCommandError as e: - raise SyncException(f"Could not reset to original branch `{self.original_branch}`:\n{e}") + raise SyncExceptionError(f"Could not reset to original branch `{self.original_branch}`:\n{e}") diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index a2107f633c..ac0f467e66 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -6,7 +6,6 @@ from typing import Dict import git -import rich.progress from git.exc import GitCommandError from nf_core.utils import load_tools_config @@ -117,8 +116,6 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa self.remote_url = remote_url - self.fullname = nf_core.modules.modules_utils.repo_full_name_from_remote(self.remote_url) - self.setup_local_repo(remote_url, branch, hide_progress) config_fn, repo_config = load_tools_config(self.local_repo_dir) diff --git a/nf_core/utils.py b/nf_core/utils.py index bcc8faa3fd..10b21018d2 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -139,7 +139,7 @@ def __init__(self, wf_path): try: repo = git.Repo(self.wf_path) self.git_sha = repo.head.object.hexsha - except: + except Exception: log.debug(f"Could not find git hash for pipeline: {self.wf_path}") # Overwrite if we have the last commit from the PR - otherwise we get a merge commit hash @@ -181,14 +181,14 @@ def _load_pipeline_config(self): self.pipeline_prefix, self.pipeline_name = self.nf_config.get("manifest.name", "").strip("'").split("/") - nextflowVersionMatch = re.search(r"[0-9\.]+(-edge)?", self.nf_config.get("manifest.nextflowVersion", "")) - if nextflowVersionMatch: - self.minNextflowVersion = nextflowVersionMatch.group(0) + nextflow_version_match = re.search(r"[0-9\.]+(-edge)?", self.nf_config.get("manifest.nextflowVersion", "")) + if nextflow_version_match: + self.minNextflowVersion = nextflow_version_match.group(0) def _load_conda_environment(self): """Try to load the pipeline environment.yml file, if it exists""" try: - with open(os.path.join(self.wf_path, "environment.yml"), "r") as fh: + with open(os.path.join(self.wf_path, "environment.yml")) as fh: self.conda_config = yaml.safe_load(fh) except FileNotFoundError: log.debug("No conda `environment.yml` file found.") @@ -262,7 +262,7 @@ def fetch_wf_config(wf_path, cache_config=True): cache_path = os.path.join(cache_basedir, cache_fn) if os.path.isfile(cache_path) and cache_config is True: log.debug(f"Found a config cache, loading: {cache_path}") - with open(cache_path, "r") as fh: + with open(cache_path) as fh: try: config = json.load(fh) except json.JSONDecodeError as e: @@ -274,8 +274,8 @@ def fetch_wf_config(wf_path, cache_config=True): result = run_cmd("nextflow", f"config -flat {wf_path}") if result is not None: nfconfig_raw, _ = result - for l in nfconfig_raw.splitlines(): - ul = l.decode("utf-8") + for line in nfconfig_raw.splitlines(): + ul = line.decode("utf-8") try: k, v = ul.split(" = ", 1) config[k] = v.strip("'\"") @@ -286,9 +286,9 @@ def fetch_wf_config(wf_path, cache_config=True): # Values in this file are likely to be complex, so don't both trying to capture them. Just get the param name. try: main_nf = os.path.join(wf_path, "main.nf") - with open(main_nf, "r") as fh: - for l in fh: - match = re.match(r"^\s*(params\.[a-zA-Z0-9_]+)\s*=", l) + with open(main_nf) as fh: + for line in fh: + match = re.match(r"^\s*(params\.[a-zA-Z0-9_]+)\s*=", line) if match: config[match.group(1)] = "null" except FileNotFoundError as e: @@ -312,7 +312,7 @@ def run_cmd(executable: str, cmd: str) -> Union[Tuple[bytes, bytes], None]: full_cmd = f"{executable} {cmd}" log.debug(f"Running command: {full_cmd}") try: - proc = subprocess.run(shlex.split(full_cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) + proc = subprocess.run(shlex.split(full_cmd), capture_output=True, check=True) return (proc.stdout, proc.stderr) except OSError as e: if e.errno == errno.ENOENT: @@ -433,7 +433,7 @@ def poll_nfcore_web_api(api_url, post_data=None): return web_response -class GitHub_API_Session(requests_cache.CachedSession): +class GitHubAPISession(requests_cache.CachedSession): """ Class to provide a single session for interacting with the GitHub API for a run. Inherits the requests_cache.CachedSession and adds additional functionality, @@ -480,7 +480,7 @@ def __call__(self, r): gh_cli_config_fn = os.path.expanduser("~/.config/gh/hosts.yml") if self.auth is None and os.path.exists(gh_cli_config_fn): try: - with open(gh_cli_config_fn, "r") as fh: + with open(gh_cli_config_fn) as fh: gh_cli_config = yaml.safe_load(fh) self.auth = requests.auth.HTTPBasicAuth( gh_cli_config["github.com"]["user"], gh_cli_config["github.com"]["oauth_token"] @@ -590,7 +590,7 @@ def request_retry(self, url, post_data=None): # Single session object to use for entire codebase. Not sure if there's a better way to do this? -gh_api = GitHub_API_Session() +gh_api = GitHubAPISession() def anaconda_package(dep, dep_channels=None): @@ -666,18 +666,18 @@ def parse_anaconda_licence(anaconda_response, version=None): # Clean up / standardise licence names clean_licences = [] - for l in licences: - l = re.sub(r"GNU General Public License v\d \(([^\)]+)\)", r"\1", l) - l = re.sub(r"GNU GENERAL PUBLIC LICENSE", "GPL", l, flags=re.IGNORECASE) - l = l.replace("GPL-", "GPLv") - l = re.sub(r"GPL\s*([\d\.]+)", r"GPL v\1", l) # Add v prefix to GPL version if none found - l = re.sub(r"GPL\s*v(\d).0", r"GPL v\1", l) # Remove superflous .0 from GPL version - l = re.sub(r"GPL \(([^\)]+)\)", r"GPL \1", l) - l = re.sub(r"GPL\s*v", "GPL v", l) # Normalise whitespace to one space between GPL and v - l = re.sub(r"\s*(>=?)\s*(\d)", r" \1\2", l) # Normalise whitespace around >= GPL versions - l = l.replace("Clause", "clause") # BSD capitilisation - l = re.sub(r"-only$", "", l) # Remove superflous GPL "only" version suffixes - clean_licences.append(l) + for license in licences: + license = re.sub(r"GNU General Public License v\d \(([^\)]+)\)", r"\1", license) + license = re.sub(r"GNU GENERAL PUBLIC LICENSE", "GPL", license, flags=re.IGNORECASE) + license = license.replace("GPL-", "GPLv") + license = re.sub(r"GPL\s*([\d\.]+)", r"GPL v\1", license) # Add v prefix to GPL version if none found + license = re.sub(r"GPL\s*v(\d).0", r"GPL v\1", license) # Remove superflous .0 from GPL version + license = re.sub(r"GPL \(([^\)]+)\)", r"GPL \1", license) + license = re.sub(r"GPL\s*v", "GPL v", license) # Normalise whitespace to one space between GPL and v + license = re.sub(r"\s*(>=?)\s*(\d)", r" \1\2", license) # Normalise whitespace around >= GPL versions + license = license.replace("Clause", "clause") # BSD capitilisation + license = re.sub(r"-only$", "", license) # Remove superflous GPL "only" version suffixes + clean_licences.append(license) return clean_licences @@ -792,7 +792,7 @@ def increase_indent(self, flow=False, indentless=False): See https://github.com/yaml/pyyaml/issues/234#issuecomment-765894586 """ - return super(CustomDumper, self).increase_indent(flow=flow, indentless=False) + return super().increase_indent(flow=flow, indentless=False) # HACK: insert blank lines between top-level objects # inspired by https://stackoverflow.com/a/44284819/3786245 @@ -1025,7 +1025,7 @@ def load_tools_config(directory: Union[str, Path] = "."): log.debug(f"No tools config file found: {CONFIG_PATHS[0]}") return Path(directory, CONFIG_PATHS[0]), {} - with open(config_fn, "r") as fh: + with open(config_fn) as fh: tools_config = yaml.safe_load(fh) # If the file is empty @@ -1145,7 +1145,7 @@ def validate_file_md5(file_name, expected_md5hex): if file_md5hex.upper() == expected_md5hex.upper(): log.debug(f"md5 sum of image matches expected: {expected_md5hex}") else: - raise IOError(f"{file_name} md5 does not match remote: {expected_md5hex} - {file_md5hex}") + raise OSError(f"{file_name} md5 does not match remote: {expected_md5hex} - {file_md5hex}") return True diff --git a/pyproject.toml b/pyproject.toml index 2380073107..d75ae89df6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,10 +5,6 @@ requires = [ "wheel" ] -[tool.black] -line-length = 120 -target_version = ["py37", "py38", "py39", "py310"] - [tool.pytest.ini_options] markers = [ "datafiles: load datafiles" @@ -16,7 +12,18 @@ markers = [ testpaths = ["tests"] norecursedirs = [ ".*", "build", "dist", "*.egg", "data", "__pycache__", ".github", "nf_core", "docs"] -[tool.isort] -profile = "black" -known_first_party = ["nf_core"] -multi_line_output = 3 +[tool.ruff] +line-length = 120 +target-version = "py38" +select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"] +cache-dir = "~/.cache/ruff" + +[tool.ruff.isort] +known-first-party = ["nf_core"] + +[tool.ruff.per-file-ignores] +"__init__.py" = ["E402", "F401"] + +[tool.ruff.lint.pep8-naming] +extend-ignore-names = ["mocked_*", "*allOf", "*URI*"] + diff --git a/requirements-dev.txt b/requirements-dev.txt index 13dba6f30d..6d890f0e1f 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,3 @@ -black -isort myst_parser pytest-cov pytest-datafiles @@ -8,8 +6,8 @@ Sphinx sphinx-rtd-theme mypy types-PyYAML -pyupgrade types-requests types-jsonschema types-Markdown types-setuptools +ruff diff --git a/tests/components/generate_snapshot.py b/tests/components/generate_snapshot.py index 46fd63fe3f..c5067d7210 100644 --- a/tests/components/generate_snapshot.py +++ b/tests/components/generate_snapshot.py @@ -26,7 +26,7 @@ def test_generate_snapshot_module(self): snap_path = Path("modules", "nf-core-test", "fastqc", "tests", "main.nf.test.snap") assert snap_path.exists() - with open(snap_path, "r") as fh: + with open(snap_path) as fh: snap_content = json.load(fh) assert "versions" in snap_content assert "content" in snap_content["versions"] @@ -48,7 +48,7 @@ def test_generate_snapshot_subworkflow(self): snap_path = Path("subworkflows", "nf-core-test", "bam_sort_stats_samtools", "tests", "main.nf.test.snap") assert snap_path.exists() - with open(snap_path, "r") as fh: + with open(snap_path) as fh: snap_content = json.load(fh) assert "test_bam_sort_stats_samtools_paired_end_flagstats" in snap_content assert ( @@ -86,7 +86,7 @@ def test_update_snapshot_module(self): with set_wd(self.nfcore_modules): snap_path = Path("modules", "nf-core-test", "bwa", "mem", "tests", "main.nf.test.snap") - with open(snap_path, "r") as fh: + with open(snap_path) as fh: snap_content = json.load(fh) original_timestamp = snap_content["Single-End"]["timestamp"] # delete the timestamp in json @@ -103,7 +103,7 @@ def test_update_snapshot_module(self): ) snap_generator.run() - with open(snap_path, "r") as fh: + with open(snap_path) as fh: snap_content = json.load(fh) assert "Single-End" in snap_content assert snap_content["Single-End"]["timestamp"] != original_timestamp diff --git a/tests/lint/actions_awsfulltest.py b/tests/lint/actions_awsfulltest.py index 30293e31a4..bbda92a4d1 100644 --- a/tests/lint/actions_awsfulltest.py +++ b/tests/lint/actions_awsfulltest.py @@ -19,7 +19,7 @@ def test_actions_awsfulltest_pass(self): # Edit .github/workflows/awsfulltest.yml to use -profile test_full new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: awsfulltest_yml = fh.read() awsfulltest_yml = awsfulltest_yml.replace("-profile test ", "-profile test_full ") with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: @@ -44,7 +44,7 @@ def test_actions_awsfulltest_fail(self): # Edit .github/workflows/awsfulltest.yml to use -profile test_full new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: awsfulltest_yml = yaml.safe_load(fh) del awsfulltest_yml[True]["release"] with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: diff --git a/tests/lint/actions_awstest.py b/tests/lint/actions_awstest.py index 0e19f781aa..7bfa6052f8 100644 --- a/tests/lint/actions_awstest.py +++ b/tests/lint/actions_awstest.py @@ -20,7 +20,7 @@ def test_actions_awstest_fail(self): # Edit .github/workflows/awsfulltest.yml to use -profile test_full new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: awstest_yml = yaml.safe_load(fh) awstest_yml[True]["push"] = ["master"] with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: diff --git a/tests/lint/actions_ci.py b/tests/lint/actions_ci.py index d44dbb73b5..8734b2f78b 100644 --- a/tests/lint/actions_ci.py +++ b/tests/lint/actions_ci.py @@ -31,7 +31,7 @@ def test_actions_ci_fail_wrong_trigger(self): # Edit .github/workflows/actions_ci.yml to mess stuff up! new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml")) as fh: ci_yml = yaml.safe_load(fh) ci_yml[True]["push"] = ["dev", "patch"] ci_yml["jobs"]["test"]["strategy"]["matrix"] = {"nxf_versionnn": ["foo", ""]} diff --git a/tests/lint/actions_schema_validation.py b/tests/lint/actions_schema_validation.py index 48bb07e4dd..ad65d90018 100644 --- a/tests/lint/actions_schema_validation.py +++ b/tests/lint/actions_schema_validation.py @@ -9,7 +9,7 @@ def test_actions_schema_validation_missing_jobs(self): """Missing 'jobs' field should result in failure""" new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: awstest_yml = yaml.safe_load(fh) awstest_yml.pop("jobs") with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: @@ -27,7 +27,7 @@ def test_actions_schema_validation_missing_on(self): """Missing 'on' field should result in failure""" new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: awstest_yml = yaml.safe_load(fh) awstest_yml.pop(True) with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: @@ -46,7 +46,7 @@ def test_actions_schema_validation_fails_for_additional_property(self): """Missing 'jobs' field should result in failure""" new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: awstest_yml = yaml.safe_load(fh) awstest_yml["not_jobs"] = awstest_yml["jobs"] with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: diff --git a/tests/lint/merge_markers.py b/tests/lint/merge_markers.py index be0d076757..64a62e25c3 100644 --- a/tests/lint/merge_markers.py +++ b/tests/lint/merge_markers.py @@ -7,7 +7,7 @@ def test_merge_markers_found(self): """Missing 'jobs' field should result in failure""" new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, "main.nf"), "r") as fh: + with open(os.path.join(new_pipeline, "main.nf")) as fh: main_nf_content = fh.read() main_nf_content = ">>>>>>>\n" + main_nf_content with open(os.path.join(new_pipeline, "main.nf"), "w") as fh: diff --git a/tests/lint/multiqc_config.py b/tests/lint/multiqc_config.py index 446b4378b0..721560ce81 100644 --- a/tests/lint/multiqc_config.py +++ b/tests/lint/multiqc_config.py @@ -18,7 +18,7 @@ def test_multiqc_config_exists_ignore(self): def test_multiqc_config_missing_report_section_order(self): """Test that linting fails if the multiqc_config.yml file is missing the report_section_order""" new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: mqc_yml = yaml.safe_load(fh) mqc_yml_tmp = mqc_yml mqc_yml.pop("report_section_order") @@ -36,7 +36,7 @@ def test_multiqc_config_missing_report_section_order(self): def test_multiqc_incorrect_export_plots(self): """Test that linting fails if the multiqc_config.yml file has an incorrect value for export_plots""" new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: mqc_yml = yaml.safe_load(fh) mqc_yml_tmp = mqc_yml mqc_yml["export_plots"] = False @@ -54,7 +54,7 @@ def test_multiqc_incorrect_export_plots(self): def test_multiqc_config_report_comment_fail(self): """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment""" new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: mqc_yml = yaml.safe_load(fh) mqc_yml_tmp = mqc_yml mqc_yml["report_comment"] = "This is a test" @@ -73,7 +73,7 @@ def test_multiqc_config_report_comment_fail(self): def test_multiqc_config_report_comment_release_fail(self): """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment for a release version""" new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: mqc_yml = yaml.safe_load(fh) mqc_yml_tmp = mqc_yml with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: diff --git a/tests/lint/nextflow_config.py b/tests/lint/nextflow_config.py index 1542b8cf65..5d5f8e7345 100644 --- a/tests/lint/nextflow_config.py +++ b/tests/lint/nextflow_config.py @@ -43,7 +43,7 @@ def test_nextflow_config_missing_test_profile_failed(self): new_pipeline = self._make_pipeline_copy() # Change the name of the test profile so there is no such profile nf_conf_file = os.path.join(new_pipeline, "nextflow.config") - with open(nf_conf_file, "r") as f: + with open(nf_conf_file) as f: content = f.read() fail_content = re.sub(r"\btest\b", "testfail", content) with open(nf_conf_file, "w") as f: diff --git a/tests/modules/bump_versions.py b/tests/modules/bump_versions.py index 3c19041f63..ce8c6dbe11 100644 --- a/tests/modules/bump_versions.py +++ b/tests/modules/bump_versions.py @@ -2,17 +2,16 @@ import re import pytest -import yaml import nf_core.modules -from nf_core.modules.modules_utils import ModuleException +from nf_core.modules.modules_utils import ModuleExceptionError def test_modules_bump_versions_single_module(self): """Test updating a single module""" # Change the bpipe/test version to an older version env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") - with open(env_yml_path, "r") as fh: + with open(env_yml_path) as fh: content = fh.read() new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content) with open(env_yml_path, "w") as fh: @@ -32,7 +31,7 @@ def test_modules_bump_versions_all_modules(self): def test_modules_bump_versions_fail(self): """Fail updating a module with wrong name""" version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - with pytest.raises(ModuleException) as excinfo: + with pytest.raises(ModuleExceptionError) as excinfo: version_bumper.bump_versions(module="no/module") assert "Could not find the specified module:" in str(excinfo.value) @@ -41,7 +40,7 @@ def test_modules_bump_versions_fail_unknown_version(self): """Fail because of an unknown version""" # Change the bpipe/test version to an older version env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") - with open(env_yml_path, "r") as fh: + with open(env_yml_path) as fh: content = fh.read() new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content) with open(env_yml_path, "w") as fh: diff --git a/tests/modules/create.py b/tests/modules/create.py index 74e5ec3896..460a1439cb 100644 --- a/tests/modules/create.py +++ b/tests/modules/create.py @@ -1,4 +1,3 @@ -import filecmp import os import shutil from pathlib import Path @@ -87,9 +86,9 @@ def test_modules_migrate(self, mock_rich_ask): # Clone modules repo with pytests shutil.rmtree(self.nfcore_modules) Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - with open(module_dir / "main.nf", "r") as fh: + with open(module_dir / "main.nf") as fh: old_main_nf = fh.read() - with open(module_dir / "meta.yml", "r") as fh: + with open(module_dir / "meta.yml") as fh: old_meta_yml = fh.read() # Create a module with --migrate-pytest @@ -97,9 +96,9 @@ def test_modules_migrate(self, mock_rich_ask): module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) module_create.create() - with open(module_dir / "main.nf", "r") as fh: + with open(module_dir / "main.nf") as fh: new_main_nf = fh.read() - with open(module_dir / "meta.yml", "r") as fh: + with open(module_dir / "meta.yml") as fh: new_meta_yml = fh.read() nextflow_config = module_dir / "tests" / "nextflow.config" diff --git a/tests/modules/lint.py b/tests/modules/lint.py index a8a775e6f6..a5d8567b76 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -333,7 +333,7 @@ def test_modules_lint_snapshot_file_missing_fail(self): def test_modules_lint_snapshot_file_not_needed(self): """Test linting a module which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "r") as fh: + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test")) as fh: content = fh.read() new_content = content.replace("snapshot(", "snap (") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: @@ -372,7 +372,7 @@ def test_modules_environment_yml_file_sorted_correctly(self): def test_modules_environment_yml_file_sorted_incorrectly(self): """Test linting a module with an incorrectly sorted environment.yml file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "r") as fh: + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh: yaml_content = yaml.safe_load(fh) # Add a new dependency to the environment.yml file and reverse the order yaml_content["dependencies"].append("z") @@ -548,7 +548,7 @@ def test_modules_missing_test_main_nf(self): def test_modules_missing_required_tag(self): """Test linting a module with a missing required tag""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "r") as fh: + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test")) as fh: content = fh.read() new_content = content.replace("modules_nfcore", "foo") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: @@ -581,7 +581,7 @@ def test_modules_missing_tags_yml(self): def test_modules_incorrect_tags_yml_key(self): """Test linting a module with an incorrect key in tags.yml file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "r") as fh: + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml")) as fh: content = fh.read() new_content = content.replace("bpipe/test:", "bpipe_test:") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh: @@ -598,7 +598,7 @@ def test_modules_incorrect_tags_yml_key(self): def test_modules_incorrect_tags_yml_values(self): """Test linting a module with an incorrect path in tags.yml file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "r") as fh: + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml")) as fh: content = fh.read() new_content = content.replace("modules/nf-core/bpipe/test/**", "foo") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh: diff --git a/tests/modules/modules_json.py b/tests/modules/modules_json.py index 63ee4e743d..a054b6b131 100644 --- a/tests/modules/modules_json.py +++ b/tests/modules/modules_json.py @@ -17,7 +17,7 @@ def test_get_modules_json(self): """Checks that the get_modules_json function returns the correct result""" mod_json_path = os.path.join(self.pipeline_dir, "modules.json") - with open(mod_json_path, "r") as fh: + with open(mod_json_path) as fh: try: mod_json_sb = json.load(fh) except json.JSONDecodeError as e: @@ -73,7 +73,7 @@ def test_mod_json_create(self): def modify_main_nf(path): """Modify a file to test patch creation""" - with open(path, "r") as fh: + with open(path) as fh: lines = fh.readlines() # Modify $meta.id to $meta.single_end lines[1] = ' tag "$meta.single_end"\n' @@ -112,7 +112,7 @@ def test_mod_json_create_with_patch(self): assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] # Check that fastqc/main.nf maintains the changes - with open(module_path / "main.nf", "r") as fh: + with open(module_path / "main.nf") as fh: lines = fh.readlines() assert lines[1] == ' tag "$meta.single_end"\n' @@ -214,7 +214,7 @@ def test_mod_json_dump(self): assert os.path.exists(mod_json_path) # Check that the dump function writes the correct content - with open(mod_json_path, "r") as f: + with open(mod_json_path) as f: try: mod_json_new = json.load(f) except json.JSONDecodeError as e: diff --git a/tests/modules/patch.py b/tests/modules/patch.py index 338d890f2f..dc939c7ea7 100644 --- a/tests/modules/patch.py +++ b/tests/modules/patch.py @@ -43,7 +43,7 @@ def setup_patch(pipeline_dir, modify_module): def modify_main_nf(path): """Modify a file to test patch creation""" - with open(path, "r") as fh: + with open(path) as fh: lines = fh.readlines() # We want a patch file that looks something like: # - tuple val(meta), path(reads) @@ -99,7 +99,7 @@ def test_create_patch_change(self): ) # Check that the correct lines are in the patch file - with open(module_path / patch_fn, "r") as fh: + with open(module_path / patch_fn) as fh: patch_lines = fh.readlines() module_relpath = module_path.relative_to(self.pipeline_dir) assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines, module_relpath / "main.nf" @@ -157,7 +157,7 @@ def test_create_patch_try_apply_successful(self): ) # Check that the correct lines are in the patch file - with open(module_path / patch_fn, "r") as fh: + with open(module_path / patch_fn) as fh: patch_lines = fh.readlines() module_relpath = module_path.relative_to(self.pipeline_dir) assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines @@ -167,7 +167,7 @@ def test_create_patch_try_apply_successful(self): assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines # Check that 'main.nf' is updated correctly - with open(module_path / "main.nf", "r") as fh: + with open(module_path / "main.nf") as fh: main_nf_lines = fh.readlines() # These lines should have been removed by the patch assert " tuple val(meta), path(reads)\n" not in main_nf_lines @@ -258,7 +258,7 @@ def test_create_patch_update_success(self): ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) # Check that the correct lines are in the patch file - with open(module_path / patch_fn, "r") as fh: + with open(module_path / patch_fn) as fh: patch_lines = fh.readlines() module_relpath = module_path.relative_to(self.pipeline_dir) assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines @@ -268,7 +268,7 @@ def test_create_patch_update_success(self): assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines # Check that 'main.nf' is updated correctly - with open(module_path / "main.nf", "r") as fh: + with open(module_path / "main.nf") as fh: main_nf_lines = fh.readlines() # These lines should have been removed by the patch assert " tuple val(meta), path(reads)\n" not in main_nf_lines @@ -300,7 +300,7 @@ def test_create_patch_update_fail(self): ) # Save the file contents for downstream comparison - with open(module_path / patch_fn, "r") as fh: + with open(module_path / patch_fn) as fh: patch_contents = fh.read() update_obj = nf_core.modules.ModuleUpdate( @@ -317,14 +317,14 @@ def test_create_patch_update_fail(self): temp_module_dir = temp_dir / BISMARK_ALIGN for file in os.listdir(temp_module_dir): assert file in os.listdir(module_path) - with open(module_path / file, "r") as fh: + with open(module_path / file) as fh: installed = fh.read() - with open(temp_module_dir / file, "r") as fh: + with open(temp_module_dir / file) as fh: shouldbe = fh.read() assert installed == shouldbe # Check that the patch file is unaffected - with open(module_path / patch_fn, "r") as fh: + with open(module_path / patch_fn) as fh: new_patch_contents = fh.read() assert patch_contents == new_patch_contents diff --git a/tests/modules/update.py b/tests/modules/update.py index 399e9cc12c..5208070fa5 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -345,7 +345,7 @@ def test_update_only_show_differences_when_patch(self, mock_prompt): # We modify fastqc because it's one of the modules that can be updated and there's another one before it (custom/dumpsoftwareversions) module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") main_path = Path(module_path, "main.nf") - with open(main_path, "r") as fh: + with open(main_path) as fh: lines = fh.readlines() for line_index in range(len(lines)): if lines[line_index] == " label 'process_medium'\n": diff --git a/tests/subworkflows/create.py b/tests/subworkflows/create.py index fc628df34f..002b889671 100644 --- a/tests/subworkflows/create.py +++ b/tests/subworkflows/create.py @@ -1,4 +1,3 @@ -import filecmp import os import shutil from pathlib import Path @@ -53,9 +52,9 @@ def test_subworkflows_migrate(self, mock_rich_ask): # Clone modules repo with pytests shutil.rmtree(self.nfcore_modules) Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - with open(subworkflow_dir / "main.nf", "r") as fh: + with open(subworkflow_dir / "main.nf") as fh: old_main_nf = fh.read() - with open(subworkflow_dir / "meta.yml", "r") as fh: + with open(subworkflow_dir / "meta.yml") as fh: old_meta_yml = fh.read() # Create a subworkflow with --migrate-pytest @@ -65,9 +64,9 @@ def test_subworkflows_migrate(self, mock_rich_ask): ) subworkflow_create.create() - with open(subworkflow_dir / "main.nf", "r") as fh: + with open(subworkflow_dir / "main.nf") as fh: new_main_nf = fh.read() - with open(subworkflow_dir / "meta.yml", "r") as fh: + with open(subworkflow_dir / "meta.yml") as fh: new_meta_yml = fh.read() nextflow_config = subworkflow_dir / "tests" / "nextflow.config" diff --git a/tests/subworkflows/lint.py b/tests/subworkflows/lint.py index 1380db2260..b53fef7f0e 100644 --- a/tests/subworkflows/lint.py +++ b/tests/subworkflows/lint.py @@ -1,4 +1,3 @@ -import os from pathlib import Path import pytest @@ -87,9 +86,7 @@ def test_subworkflows_lint_snapshot_file_missing_fail(self): def test_subworkflows_lint_snapshot_file_not_needed(self): """Test linting a subworkflow which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" - with open( - Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test"), "r" - ) as fh: + with open(Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test")) as fh: content = fh.read() new_content = content.replace("snapshot(", "snap (") with open( diff --git a/tests/subworkflows/remove.py b/tests/subworkflows/remove.py index 53a948778b..dec67875bd 100644 --- a/tests/subworkflows/remove.py +++ b/tests/subworkflows/remove.py @@ -1,7 +1,5 @@ from pathlib import Path -from rich.console import Console - from nf_core.modules.modules_json import ModulesJson @@ -18,7 +16,7 @@ def test_subworkflows_remove_subworkflow(self): bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - mod_json_obj = ModulesJson(self.pipeline_dir) + ModulesJson(self.pipeline_dir) mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() assert self.subworkflow_remove.remove("bam_sort_stats_samtools") mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py index 698086e186..32a69ba180 100644 --- a/tests/subworkflows/update.py +++ b/tests/subworkflows/update.py @@ -8,7 +8,6 @@ import nf_core.utils from nf_core.modules.modules_json import ModulesJson from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE -from nf_core.modules.remove import ModuleRemove from nf_core.modules.update import ModuleUpdate from nf_core.subworkflows.update import SubworkflowUpdate @@ -73,7 +72,7 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): assert update_obj.update("fastq_align_bowtie2") is True assert cmp_component(tmpdir, sw_path) is True - with open(patch_path, "r") as fh: + with open(patch_path) as fh: line = fh.readline() assert line.startswith( "Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" diff --git a/tests/test_download.py b/tests/test_download.py index 7c9532e977..7f34f7fbc6 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -1,7 +1,6 @@ """Tests for the download subcommand of nf-core tools """ -import hashlib import os import re import shutil @@ -16,9 +15,9 @@ import nf_core.utils from nf_core.download import ContainerError, DownloadWorkflow, WorkflowRepo from nf_core.synced_repo import SyncedRepo -from nf_core.utils import NFCORE_CACHE_DIR, NFCORE_DIR, run_cmd +from nf_core.utils import run_cmd -from .utils import with_temporary_file, with_temporary_folder +from .utils import with_temporary_folder class DownloadTest(unittest.TestCase): @@ -160,8 +159,8 @@ def test__find_container_images_config_nextflow(self, tmp_path, mock_fetch_wf_co if result is not None: nfconfig_raw, _ = result config = {} - for l in nfconfig_raw.splitlines(): - ul = l.decode("utf-8") + for line in nfconfig_raw.splitlines(): + ul = line.decode("utf-8") try: k, v = ul.split(" = ", 1) config[k] = v.strip("'\"") @@ -259,7 +258,7 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p ) # Pull again, but now the image already exists - with pytest.raises(ContainerError.ImageExists): + with pytest.raises(ContainerError.ImageExistsError): download_obj.singularity_pull_image( "hello-world", f"{tmp_dir}/hello-world.sif", None, "docker.io", mock_rich_progress ) @@ -269,8 +268,8 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p "docker.io/bschiffthaler/sed", f"{tmp_dir}/sed.sif", None, "docker.io", mock_rich_progress ) - # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExists is raised before attempting to pull.) - with pytest.raises(ContainerError.RegistryNotFound): + # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExistsError is raised before attempting to pull.) + with pytest.raises(ContainerError.RegistryNotFoundError): download_obj.singularity_pull_image( "hello-world", f"{tmp_dir}/hello-world_new.sif", @@ -280,23 +279,23 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p ) # test Image not found for several registries - with pytest.raises(ContainerError.ImageNotFound): + with pytest.raises(ContainerError.ImageNotFoundError): download_obj.singularity_pull_image( "a-container", f"{tmp_dir}/acontainer.sif", None, "quay.io", mock_rich_progress ) - with pytest.raises(ContainerError.ImageNotFound): + with pytest.raises(ContainerError.ImageNotFoundError): download_obj.singularity_pull_image( "a-container", f"{tmp_dir}/acontainer.sif", None, "docker.io", mock_rich_progress ) - with pytest.raises(ContainerError.ImageNotFound): + with pytest.raises(ContainerError.ImageNotFoundError): download_obj.singularity_pull_image( "a-container", f"{tmp_dir}/acontainer.sif", None, "ghcr.io", mock_rich_progress ) # test Image not found for absolute URI. - with pytest.raises(ContainerError.ImageNotFound): + with pytest.raises(ContainerError.ImageNotFoundError): download_obj.singularity_pull_image( "docker.io/bschiffthaler/nothingtopullhere", f"{tmp_dir}/nothingtopullhere.sif", @@ -306,7 +305,7 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p ) # Traffic from Github Actions to GitHub's Container Registry is unlimited, so no harm should be done here. - with pytest.raises(ContainerError.InvalidTag): + with pytest.raises(ContainerError.InvalidTagError): download_obj.singularity_pull_image( "ewels/multiqc:go-rewrite", f"{tmp_dir}/umi-transfer.sif", @@ -343,9 +342,8 @@ def test_get_singularity_images(self, tmp_path, mock_fetch_wf_config): container_library=("mirage-the-imaginative-registry.io", "quay.io", "ghcr.io", "docker.io"), ) mock_fetch_wf_config.return_value = { - "process.mapping.container": "helloworld", - "process.mapping.container": "helloworld", - "process.mapping.container": "helloooooooworld", + "process.helloworld.container": "helloworld", + "process.hellooworld.container": "helloooooooworld", "process.mapping.container": "ewels/multiqc:gorewrite", } download_obj.find_container_images("workflow") diff --git a/tests/test_launch.py b/tests/test_launch.py index 03c6a8b692..dc8d6b147c 100644 --- a/tests/test_launch.py +++ b/tests/test_launch.py @@ -86,7 +86,7 @@ def test_get_pipeline_defaults(self): self.launcher.get_pipeline_schema() self.launcher.set_schema_inputs() assert len(self.launcher.schema_obj.input_params) > 0 - assert self.launcher.schema_obj.input_params["validate_params"] == True + assert self.launcher.schema_obj.input_params["validate_params"] is True @with_temporary_file def test_get_pipeline_defaults_input_params(self, tmp_file): @@ -119,12 +119,12 @@ def test_ob_to_questionary_string(self): @mock.patch("questionary.unsafe_prompt", side_effect=[{"use_web_gui": "Web based"}]) def test_prompt_web_gui_true(self, mock_prompt): """Check the prompt to launch the web schema or use the cli""" - assert self.launcher.prompt_web_gui() == True + assert self.launcher.prompt_web_gui() is True @mock.patch("questionary.unsafe_prompt", side_effect=[{"use_web_gui": "Command line"}]) def test_prompt_web_gui_false(self, mock_prompt): """Check the prompt to launch the web schema or use the cli""" - assert self.launcher.prompt_web_gui() == False + assert self.launcher.prompt_web_gui() is False @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{}]) def test_launch_web_gui_missing_keys(self, mock_poll_nfcore_web_api): @@ -144,7 +144,7 @@ def test_launch_web_gui(self, mock_poll_nfcore_web_api, mock_webbrowser, mock_wa """Check the code that opens the web browser""" self.launcher.get_pipeline_schema() self.launcher.merge_nxf_flag_schema() - assert self.launcher.launch_web_gui() == None + assert self.launcher.launch_web_gui() is None @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "error", "message": "foo"}]) def test_get_web_launch_response_error(self, mock_poll_nfcore_web_api): @@ -163,7 +163,7 @@ def test_get_web_launch_response_unexpected(self, mock_poll_nfcore_web_api): @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "waiting_for_user"}]) def test_get_web_launch_response_waiting(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - status waiting_for_user""" - assert self.launcher.get_web_launch_response() == False + assert self.launcher.get_web_launch_response() is False @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "launch_params_complete"}]) def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): @@ -191,7 +191,7 @@ def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): def test_get_web_launch_response_valid(self, mock_poll_nfcore_web_api, mock_sanitise): """Test polling the website for a launch response - complete, valid response""" self.launcher.get_pipeline_schema() - assert self.launcher.get_web_launch_response() == True + assert self.launcher.get_web_launch_response() is True def test_sanitise_web_response(self): """Check that we can properly sanitise results from the web""" @@ -201,7 +201,7 @@ def test_sanitise_web_response(self): self.launcher.schema_obj.input_params["max_cpus"] = "12" self.launcher.sanitise_web_response() assert "-name" not in self.launcher.nxf_flags - assert self.launcher.schema_obj.input_params["igenomes_ignore"] == True + assert self.launcher.schema_obj.input_params["igenomes_ignore"] is True assert self.launcher.schema_obj.input_params["max_cpus"] == 12 def test_ob_to_questionary_bool(self): @@ -216,12 +216,12 @@ def test_ob_to_questionary_bool(self): assert result["message"] == "" assert result["choices"] == ["True", "False"] assert result["default"] == "True" - assert result["filter"]("True") == True - assert result["filter"]("true") == True - assert result["filter"](True) == True - assert result["filter"]("False") == False - assert result["filter"]("false") == False - assert result["filter"](False) == False + assert result["filter"]("True") is True + assert result["filter"]("true") is True + assert result["filter"](True) is True + assert result["filter"]("False") is False + assert result["filter"]("false") is False + assert result["filter"](False) is False def test_ob_to_questionary_number(self): """Check converting a python dict to a pyenquirer format - with enum""" @@ -234,7 +234,7 @@ def test_ob_to_questionary_number(self): assert result["validate"]("") is True assert result["validate"]("123.56.78") == "Must be a number" assert result["validate"]("123.56sdkfjb") == "Must be a number" - assert result["filter"]("123.456") == float(123.456) + assert result["filter"]("123.456") == 123.456 assert result["filter"]("") == "" def test_ob_to_questionary_integer(self): @@ -248,7 +248,7 @@ def test_ob_to_questionary_integer(self): assert result["validate"]("") is True assert result["validate"]("123.45") == "Must be an integer" assert result["validate"]("123.56sdkfjb") == "Must be an integer" - assert result["filter"]("123") == int(123) + assert result["filter"]("123") == 123 assert result["filter"]("") == "" def test_ob_to_questionary_range(self): @@ -321,7 +321,7 @@ def test_build_command_params(self): == f'nextflow run {self.pipeline_dir} -params-file "{os.path.relpath(self.nf_params_fn)}"' ) # Check saved parameters file - with open(self.nf_params_fn, "r") as fh: + with open(self.nf_params_fn) as fh: try: saved_json = json.load(fh) except json.JSONDecodeError as e: diff --git a/tests/test_lint.py b/tests/test_lint.py index b2e7f3b574..32913bda0d 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -56,9 +56,9 @@ def test_run_linting_function(self): We don't really check any of this code as it's just a series of function calls and we're testing each of those individually. This is mostly to check for syntax errors.""" - lint_obj = nf_core.lint.run_linting(self.test_pipeline_dir, False) + nf_core.lint.run_linting(self.test_pipeline_dir, False) - def test_init_PipelineLint(self): + def test_init_pipeline_lint(self): """Simply create a PipelineLint object. This checks that all of the lint test imports are working properly, @@ -134,7 +134,7 @@ def test_json_output(self, tmp_dir): self.lint_obj._save_json_results(json_fn) # Load created JSON file and check its contents - with open(json_fn, "r") as fh: + with open(json_fn) as fh: try: saved_json = json.load(fh) except json.JSONDecodeError as e: diff --git a/tests/test_modules.py b/tests/test_modules.py index 92c8dfda3f..f7ada2a483 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -47,7 +47,7 @@ def create_modules_repo_dummy(tmp_dir): # Remove doi from meta.yml which makes lint fail meta_yml_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "meta.yml") - with open(meta_yml_path, "r") as fh: + with open(meta_yml_path) as fh: meta_yml = yaml.safe_load(fh) del meta_yml["tools"][0]["bpipe"]["doi"] with open(meta_yml_path, "w") as fh: @@ -60,7 +60,7 @@ def create_modules_repo_dummy(tmp_dir): # remove "TODO" statements from main.nf main_nf_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "main.nf") - with open(main_nf_path, "r") as fh: + with open(main_nf_path) as fh: main_nf = fh.read() main_nf = main_nf.replace("TODO", "") with open(main_nf_path, "w") as fh: @@ -68,7 +68,7 @@ def create_modules_repo_dummy(tmp_dir): # remove "TODO" statements from main.nf.test main_nf_test_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test") - with open(main_nf_test_path, "r") as fh: + with open(main_nf_test_path) as fh: main_nf_test = fh.read() main_nf_test = main_nf_test.replace("TODO", "") with open(main_nf_test_path, "w") as fh: diff --git a/tests/test_params_file.py b/tests/test_params_file.py index 824e8fe345..13c82f5188 100644 --- a/tests/test_params_file.py +++ b/tests/test_params_file.py @@ -31,7 +31,7 @@ def setup_class(cls): cls.invalid_template_schema = os.path.join(cls.template_dir, "nextflow_schema_invalid.json") # Remove the allOf section to make the schema invalid - with open(cls.template_schema, "r") as fh: + with open(cls.template_schema) as fh: o = json.load(fh) del o["allOf"] @@ -49,7 +49,7 @@ def test_build_template(self): assert os.path.exists(outfile) - with open(outfile, "r") as fh: + with open(outfile) as fh: out = fh.read() assert "nf-core/testpipeline" in out @@ -68,7 +68,7 @@ def test_build_template_file_exists(self, caplog): # Creates a new empty file outfile = Path(self.tmp_dir) / "params-file.yml" - with open(outfile, "w") as fp: + with open(outfile, "w"): pass res = self.params_template_builder.write_params_file(outfile) diff --git a/tests/test_refgenie.py b/tests/test_refgenie.py index 73fbcb863f..5440c1c477 100644 --- a/tests/test_refgenie.py +++ b/tests/test_refgenie.py @@ -7,8 +7,6 @@ import tempfile import unittest -import yaml - class TestRefgenie(unittest.TestCase): """Class for refgenie tests""" @@ -26,7 +24,7 @@ def setUp(self): # avoids adding includeConfig statement to config file outside the current tmpdir try: self.NXF_HOME_ORIGINAL = os.environ["NXF_HOME"] - except: + except Exception: self.NXF_HOME_ORIGINAL = None os.environ["NXF_HOME"] = self.NXF_HOME diff --git a/tests/test_schema.py b/tests/test_schema.py index 105cd9473e..89fcc98b66 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -305,7 +305,7 @@ def test_build_schema(self): Build a new schema param from a pipeline Run code to ensure it doesn't crash. Individual functions tested separately. """ - param = self.schema_obj.build_schema(self.template_dir, True, False, None) + self.schema_obj.build_schema(self.template_dir, True, False, None) @with_temporary_folder def test_build_schema_from_scratch(self, tmp_dir): @@ -319,7 +319,7 @@ def test_build_schema_from_scratch(self, tmp_dir): shutil.copytree(self.template_dir, test_pipeline_dir) os.remove(os.path.join(test_pipeline_dir, "nextflow_schema.json")) - param = self.schema_obj.build_schema(test_pipeline_dir, True, False, None) + self.schema_obj.build_schema(test_pipeline_dir, True, False, None) @mock.patch("requests.post") def test_launch_web_builder_timeout(self, mock_post): diff --git a/tests/test_sync.py b/tests/test_sync.py index 597e4375d3..51a27653ab 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -44,7 +44,7 @@ def tearDown(self): def test_inspect_sync_dir_notgit(self, tmp_dir): """Try syncing an empty directory""" psync = nf_core.sync.PipelineSync(tmp_dir) - with pytest.raises(nf_core.sync.SyncException) as exc_info: + with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() assert "does not appear to be a git repository" in exc_info.value.args[0] @@ -56,7 +56,7 @@ def test_inspect_sync_dir_dirty(self): # Try to sync, check we halt with the right error psync = nf_core.sync.PipelineSync(self.pipeline_dir) try: - with pytest.raises(nf_core.sync.SyncException) as exc_info: + with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() assert exc_info.value.args[0].startswith("Uncommitted changes found in pipeline directory!") finally: @@ -66,7 +66,7 @@ def test_get_wf_config_no_branch(self): """Try getting a workflow config when the branch doesn't exist""" # Try to sync, check we halt with the right error psync = nf_core.sync.PipelineSync(self.pipeline_dir, from_branch="foo") - with pytest.raises(nf_core.sync.SyncException) as exc_info: + with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() psync.get_wf_config() assert exc_info.value.args[0] == "Branch `foo` not found!" @@ -76,7 +76,7 @@ def test_get_wf_config_missing_required_config(self): # Try to sync, check we halt with the right error psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.required_config_vars = ["fakethisdoesnotexist"] - with pytest.raises(nf_core.sync.SyncException) as exc_info: + with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() psync.get_wf_config() # Check that we did actually get some config back @@ -99,7 +99,7 @@ def test_checkout_template_branch_no_template(self): psync.repo.delete_head("TEMPLATE") - with pytest.raises(nf_core.sync.SyncException) as exc_info: + with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: psync.checkout_template_branch() assert exc_info.value.args[0] == "Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'" @@ -165,7 +165,7 @@ def test_push_template_branch_error(self): test_fn.touch() psync.commit_template_changes() # Try to push changes - with pytest.raises(nf_core.sync.PullRequestException) as exc_info: + with pytest.raises(nf_core.sync.PullRequestExceptionError) as exc_info: psync.push_template_branch() assert exc_info.value.args[0].startswith("Could not push TEMPLATE branch") @@ -220,7 +220,7 @@ def test_push_merge_branch_without_create_branch(self): psync.get_wf_config() psync.repo.create_remote("origin", self.remote_path) - with pytest.raises(nf_core.sync.PullRequestException) as exc_info: + with pytest.raises(nf_core.sync.PullRequestExceptionError) as exc_info: psync.push_merge_branch() assert exc_info.value.args[0].startswith(f"Could not push branch '{psync.merge_branch}'") @@ -329,7 +329,7 @@ def test_make_pull_request_bad_response(self, mock_post, mock_get): psync.gh_username = "bad_url" psync.gh_repo = "bad_url/response" os.environ["GITHUB_AUTH_TOKEN"] = "test" - with pytest.raises(nf_core.sync.PullRequestException) as exc_info: + with pytest.raises(nf_core.sync.PullRequestExceptionError) as exc_info: psync.make_pull_request() assert exc_info.value.args[0].startswith( "Something went badly wrong - GitHub API PR failed - got return code 404" @@ -420,6 +420,6 @@ def test_reset_target_dir_fake_branch(self): psync.original_branch = "fake_branch" - with pytest.raises(nf_core.sync.SyncException) as exc_info: + with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: psync.reset_target_dir() assert exc_info.value.args[0].startswith("Could not reset to original branch `fake_branch`") diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index 154a31fca6..c7088b9282 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -1,8 +1,5 @@ -import tempfile from pathlib import Path -import pytest - from .utils import with_temporary_file, with_temporary_folder diff --git a/tests/test_utils.py b/tests/test_utils.py index 90d1886dbd..3079d75808 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -134,7 +134,7 @@ def test_request_cant_create_cache(self, mock_mkd, mock_exists): def test_pip_package_pass(self): result = nf_core.utils.pip_package("multiqc=1.10") - assert type(result) == dict + assert isinstance(result, dict) @mock.patch("requests.get") def test_pip_package_timeout(self, mock_get): diff --git a/tests/utils.py b/tests/utils.py index 198ac3d583..89c1328818 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -5,9 +5,7 @@ import functools import os import tempfile -from contextlib import contextmanager -from pathlib import Path -from typing import Any, Callable, Generator, Tuple +from typing import Any, Callable, Tuple import responses @@ -27,7 +25,7 @@ GITLAB_BRANCH_ORG_PATH_BRANCH = "org-path" GITLAB_BRANCH_TEST_OLD_SHA = "e772abc22c1ff26afdf377845c323172fb3c19ca" GITLAB_BRANCH_TEST_NEW_SHA = "7d73e21f30041297ea44367f2b4fd4e045c0b991" -GITLAB_NFTEST_BRANCH = "nf-test-tests" +GITLAB_NFTEST_BRANCH = "nf-test-tests-self-hosted-runners" def with_temporary_folder(func: Callable[..., Any]) -> Callable[..., Any]: