diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 68a6fa3ed7..1827289b76 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.2 + rev: v0.8.6 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix @@ -19,7 +19,7 @@ repos: alias: ec - repo: https://github.com/pre-commit/mirrors-mypy - rev: "v1.13.0" + rev: "v1.14.1" hooks: - id: mypy additional_dependencies: diff --git a/CHANGELOG.md b/CHANGELOG.md index 95b7d3783b..704124baa9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # nf-core/tools: Changelog -## v3.2.0dev +## v3.1.2dev ### Template @@ -12,10 +12,15 @@ ### Modules +- Fix bump-versions: only append module name if it is a dir and contains main.nf ([#3384](https://github.com/nf-core/tools/pull/3384)) + ### Subworkflows ### General +- Parameters schema validation: allow oneOf, anyOf and allOf with `required` ([#3386](https://github.com/nf-core/tools/pull/3386)) +- Run pre-comit when rendering template for pipelines sync ([#3371](https://github.com/nf-core/tools/pull/3371)) + ### Version updates ## [v3.1.1 - Brass Boxfish Patch](https://github.com/nf-core/tools/releases/tag/3.1.1) - [2024-12-20] diff --git a/Dockerfile b/Dockerfile index f2141145b8..f5a6796a27 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:2b0079146a74e23bf4ae8f6a28e1b484c6292f6fb904cbb51825b4a19812fcd8 +FROM python:3.12-slim@sha256:10f3aaab98db50cba827d3b33a91f39dc9ec2d02ca9b85cbc5008220d07b17f3 LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" diff --git a/nf_core/modules/modules_utils.py b/nf_core/modules/modules_utils.py index ecfe5f24ee..0f42d1bcea 100644 --- a/nf_core/modules/modules_utils.py +++ b/nf_core/modules/modules_utils.py @@ -65,19 +65,20 @@ def get_installed_modules(directory: Path, repo_type="modules") -> Tuple[List[st local_modules = sorted([x for x in local_modules if x.endswith(".nf")]) # Get nf-core modules - if os.path.exists(nfcore_modules_dir): - for m in sorted([m for m in os.listdir(nfcore_modules_dir) if not m == "lib"]): - if not os.path.isdir(os.path.join(nfcore_modules_dir, m)): + if nfcore_modules_dir.exists(): + for m in sorted([m for m in nfcore_modules_dir.iterdir() if not m == "lib"]): + if not m.is_dir(): raise ModuleExceptionError( f"File found in '{nfcore_modules_dir}': '{m}'! This directory should only contain module directories." ) - m_content = os.listdir(os.path.join(nfcore_modules_dir, m)) + m_content = [d.name for d in m.iterdir()] # Not a module, but contains sub-modules if "main.nf" not in m_content: for tool in m_content: - nfcore_modules_names.append(os.path.join(m, tool)) + if (m / tool).is_dir() and "main.nf" in [d.name for d in (m / tool).iterdir()]: + nfcore_modules_names.append(str(Path(m.name, tool))) else: - nfcore_modules_names.append(m) + nfcore_modules_names.append(m.name) # Make full (relative) file paths and create NFCoreComponent objects if local_modules_dir: diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 9db393d9f0..f3a1073843 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -46,6 +46,8 @@ jobs: steps: - name: Check out pipeline code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + with: + fetch-depth: 0 - name: Set up Nextflow uses: nf-core/setup-nextflow@v2 diff --git a/nf_core/pipeline-template/conf/test.config b/nf_core/pipeline-template/conf/test.config index bea6f670d0..ebe720f295 100644 --- a/nf_core/pipeline-template/conf/test.config +++ b/nf_core/pipeline-template/conf/test.config @@ -27,7 +27,8 @@ params { // TODO nf-core: Give any required params for the test so that command line flags are not needed input = params.pipelines_testdata_base_path + 'viralrecon/samplesheet/samplesheet_test_illumina_amplicon.csv' - {% if igenomes -%} + {%- if igenomes -%} + // Genome references genome = 'R64-1-1' {%- endif %} diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 4f90ca17f9..86ac022772 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -386,6 +386,10 @@ def render_template(self) -> None: yaml.dump(config_yml.model_dump(exclude_none=True), fh, Dumper=custom_yaml_dumper()) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") + # Run prettier on files for pipelines sync + log.debug("Running prettier on pipeline files") + run_prettier_on_file([str(f) for f in self.outdir.glob("**/*")]) + def fix_linting(self): """ Updates the .nf-core.yml with linting configurations diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index e46c43ad6b..c0f6e8c2f3 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1543,7 +1543,7 @@ def singularity_pull_image( progress.remove_task(task) - def compress_download(self) -> None: + def compress_download(self): """Take the downloaded files and make a compressed .tar.gz archive.""" log.debug(f"Creating archive: {self.output_filename}") diff --git a/nf_core/pipelines/lint_utils.py b/nf_core/pipelines/lint_utils.py index a6b98b1899..d41cf16b12 100644 --- a/nf_core/pipelines/lint_utils.py +++ b/nf_core/pipelines/lint_utils.py @@ -70,6 +70,15 @@ def print_fixes(lint_obj): ) +def check_git_repo() -> bool: + """Check if the current directory is a git repository.""" + try: + subprocess.check_output(["git", "rev-parse", "--is-inside-work-tree"]) + return True + except subprocess.CalledProcessError: + return False + + def run_prettier_on_file(file: Union[Path, str, List[str]]) -> None: """Run the pre-commit hook prettier on a file. @@ -80,6 +89,8 @@ def run_prettier_on_file(file: Union[Path, str, List[str]]) -> None: If Prettier is not installed, a warning is logged. """ + is_git = check_git_repo() + nf_core_pre_commit_config = Path(nf_core.__file__).parent / ".pre-commit-prettier-config.yaml" args = ["pre-commit", "run", "--config", str(nf_core_pre_commit_config), "prettier"] if isinstance(file, List): @@ -87,21 +98,24 @@ def run_prettier_on_file(file: Union[Path, str, List[str]]) -> None: else: args.extend(["--files", str(file)]) - try: - subprocess.run(args, capture_output=True, check=True) - log.debug(f"${subprocess.STDOUT}") - except subprocess.CalledProcessError as e: - if ": SyntaxError: " in e.stdout.decode(): - log.critical(f"Can't format {file} because it has a syntax error.\n{e.stdout.decode()}") - elif "files were modified by this hook" in e.stdout.decode(): - all_lines = [line for line in e.stdout.decode().split("\n")] - files = "\n".join(all_lines[3:]) - log.debug(f"The following files were modified by prettier:\n {files}") - else: - log.warning( - "There was an error running the prettier pre-commit hook.\n" - f"STDOUT: {e.stdout.decode()}\nSTDERR: {e.stderr.decode()}" - ) + if is_git: + try: + proc = subprocess.run(args, capture_output=True, check=True) + log.debug(f"{proc.stdout.decode()}") + except subprocess.CalledProcessError as e: + if ": SyntaxError: " in e.stdout.decode(): + log.critical(f"Can't format {file} because it has a syntax error.\n{e.stdout.decode()}") + elif "files were modified by this hook" in e.stdout.decode(): + all_lines = [line for line in e.stdout.decode().split("\n")] + files = "\n".join(all_lines[3:]) + log.debug(f"The following files were modified by prettier:\n {files}") + else: + log.warning( + "There was an error running the prettier pre-commit hook.\n" + f"STDOUT: {e.stdout.decode()}\nSTDERR: {e.stderr.decode()}" + ) + else: + log.debug("Not in a git repository, skipping pre-commit hook.") def dump_json_with_prettier(file_name, file_content): diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index a08dd0a2d0..b425ec64ed 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -327,11 +327,32 @@ def validate_default_params(self): schema_no_required = copy.deepcopy(self.schema) if "required" in schema_no_required: schema_no_required.pop("required") + for keyword in ["allOf", "anyOf", "oneOf"]: + if keyword in schema_no_required: + for i, kw_content in enumerate(schema_no_required[keyword]): + if "required" in kw_content: + schema_no_required[keyword][i].pop("required") + schema_no_required[keyword] = [ + kw_content for kw_content in schema_no_required[keyword] if kw_content + ] + if not schema_no_required[keyword]: + schema_no_required.pop(keyword) for group_key, group in schema_no_required.get(self.defs_notation, {}).items(): if "required" in group: schema_no_required[self.defs_notation][group_key].pop("required") + for keyword in ["allOf", "anyOf", "oneOf"]: + if keyword in group: + for i, kw_content in enumerate(group[keyword]): + if "required" in kw_content: + schema_no_required[self.defs_notation][group_key][keyword][i].pop("required") + schema_no_required[self.defs_notation][group_key][keyword] = [ + kw_content for kw_content in group[keyword] if kw_content + ] + if not group[keyword]: + schema_no_required[self.defs_notation][group_key].pop(keyword) jsonschema.validate(self.schema_defaults, schema_no_required) except jsonschema.exceptions.ValidationError as e: + log.debug(f"Complete error message:\n{e}") raise AssertionError(f"Default parameters are invalid: {e.message}") for param, default in self.schema_defaults.items(): if default in ("null", "", None, "None") or default is False: diff --git a/setup.py b/setup.py index 4a142c20fd..ebf167a22c 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "3.2.0dev" +version = "3.1.2dev" with open("README.md") as f: readme = f.read() diff --git a/tests/modules/test_modules_utils.py b/tests/modules/test_modules_utils.py new file mode 100644 index 0000000000..763725337b --- /dev/null +++ b/tests/modules/test_modules_utils.py @@ -0,0 +1,20 @@ +import nf_core.modules.modules_utils + +from ..test_modules import TestModules + + +class TestModulesUtils(TestModules): + def test_get_installed_modules(self): + """Test getting installed modules""" + _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(self.nfcore_modules) + assert len(nfcore_modules) == 1 + assert nfcore_modules[0].component_name == "bpipe/test" + + def test_get_installed_modules_with_files(self): + """Test getting installed modules. When a module contains a file in its directory, it shouldn't be picked up as a tool/subtool""" + # Create a file in the module directory + with open(self.nfcore_modules / "modules" / "nf-core" / "bpipe" / "test_file.txt", "w") as fh: + fh.write("test") + + _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(self.nfcore_modules) + assert len(nfcore_modules) == 1 diff --git a/tests/pipelines/test_schema.py b/tests/pipelines/test_schema.py index ab543d8b90..efc2798969 100644 --- a/tests/pipelines/test_schema.py +++ b/tests/pipelines/test_schema.py @@ -285,6 +285,89 @@ def test_remove_schema_notfound_configs_childschema(self): assert len(params_removed) == 1 assert "foo" in params_removed + def test_validate_defaults(self): + """Test validating default values""" + self.schema_obj.schema = { + "properties": {"foo": {"type": "string"}, "bar": {"type": "string"}}, + "required": ["foo"], + } + self.schema_obj.schema_defaults = {"foo": "foo", "bar": "bar"} + self.schema_obj.no_prompts = True + try: + self.schema_obj.validate_default_params() + except AssertionError: + self.fail("Error validating schema defaults") + + def test_validate_defaults_required(self): + """Test validating default values when required params don't have a default""" + self.schema_obj.schema = { + "properties": {"foo": {"type": "string"}, "bar": {"type": "string"}}, + "required": ["foo"], + } + self.schema_obj.schema_defaults = {} + self.schema_obj.no_prompts = True + try: + self.schema_obj.validate_default_params() + except AssertionError: + self.fail("Error validating schema defaults") + + def test_validate_defaults_required_inside_group(self): + """Test validating default values when required params don't have a default, inside a group""" + self.schema_obj.schema = { + "$defs": { + "subSchemaId": { + "properties": {"foo": {"type": "string"}, "bar": {"type": "string"}}, + "required": ["foo"], + }, + } + } + self.schema_obj.schema_defaults = {} + self.schema_obj.no_prompts = True + try: + self.schema_obj.validate_default_params() + except AssertionError: + self.fail("Error validating schema defaults") + + def test_validate_defaults_required_inside_group_with_anyof(self): + """Test validating default values when required params don't have a default, inside a group with anyOf""" + self.schema_obj.schema = { + "$defs": { + "subSchemaId": { + "anyOf": [{"required": ["foo"]}, {"required": ["bar"]}], + "properties": {"foo": {"type": "string"}, "bar": {"type": "string"}}, + }, + } + } + self.schema_obj.schema_defaults = {} + self.schema_obj.no_prompts = True + try: + self.schema_obj.validate_default_params() + except AssertionError: + self.fail("Error validating schema defaults") + + def test_validate_defaults_required_with_anyof(self): + """Test validating default values when required params don't have a default, with anyOf""" + self.schema_obj.schema = { + "properties": {"foo": {"type": "string"}, "bar": {"type": "string"}, "baz": {"type": "string"}}, + "anyOf": [{"required": ["foo"]}, {"required": ["bar"]}], + } + self.schema_obj.schema_defaults = {"baz": "baz"} + self.schema_obj.no_prompts = True + try: + self.schema_obj.validate_default_params() + except AssertionError: + self.fail("Error validating schema defaults") + + def test_validate_defaults_error(self): + """Test validating default raises an exception when a default is not valid""" + self.schema_obj.schema = { + "properties": {"foo": {"type": "string"}}, + } + self.schema_obj.schema_defaults = {"foo": 1} + self.schema_obj.no_prompts = True + with self.assertRaises(AssertionError): + self.schema_obj.validate_default_params() + def test_add_schema_found_configs(self): """Try adding a new parameter to the schema from the config""" self.schema_obj.pipeline_params = {"foo": "bar"}