Skip to content

Commit

Permalink
Merge branch 'dev' into patch
Browse files Browse the repository at this point in the history
  • Loading branch information
mirpedrol authored Oct 10, 2022
2 parents bfbbea0 + 909caa5 commit 87949fb
Show file tree
Hide file tree
Showing 23 changed files with 1,419 additions and 30 deletions.
6 changes: 5 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@
### General

- Fix error in tagging GitPod docker images during releases
- Don't remove local copy of modules repo, only update it with fetch ([#1879](https://github.com/nf-core/tools/pull/1879))
- Don't remove local copy of modules repo, only update it with fetch ([#1881](https://github.com/nf-core/tools/pull/1881))
- Add subworkflow commands create-test-yml, create and install ([#1897](https://github.com/nf-core/tools/pull/1897))
- Update subworkflows install so it installs also imported modules and subworkflows ([#1904](https://github.com/nf-core/tools/pull/1904))

### Modules

Expand All @@ -21,6 +23,7 @@

### Template

- Add template for subworkflows
- Add `actions/upload-artifact` step to the awstest workflows, to expose the debug log file
- Add `prettier` as a requirement to Gitpod Dockerimage
- Bioconda incompatible conda channel setups now result in more informative error messages ([#1812](https://github.com/nf-core/tools/pull/1812))
Expand All @@ -43,6 +46,7 @@
- Schema: Remove `allOf` if no definition groups are left.
- Use contextlib to temporarily change working directories ([#1819](https://github.com/nf-core/tools/pull/1819))
- More helpful error messages if `nf-core download` can't parse a singularity image download
- Add `nf-core subworkflows create` command

### Modules

Expand Down
1 change: 1 addition & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@ include LICENSE
include README.md
graft nf_core/module-template
graft nf_core/pipeline-template
graft nf_core/subworkflow-template
include requirements.txt
145 changes: 144 additions & 1 deletion nf_core/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import nf_core.list
import nf_core.modules
import nf_core.schema
import nf_core.subworkflows
import nf_core.sync
import nf_core.utils

Expand Down Expand Up @@ -54,6 +55,16 @@
"commands": ["create", "create-test-yml", "lint", "bump-versions", "mulled", "test"],
},
],
"nf-core subworkflows": [
{
"name": "For pipelines",
"commands": ["install"],
},
{
"name": "Developing new subworkflows",
"commands": ["create", "create-test-yml"],
},
],
}
click.rich_click.OPTION_GROUPS = {
"nf-core modules list local": [{"options": ["--dir", "--json", "--help"]}],
Expand Down Expand Up @@ -89,7 +100,7 @@ def run_nf_core():
log.debug(f"Could not check latest version: {e}")
stderr.print("\n")

# Lanch the click cli
# Launch the click cli
nf_core_cli(auto_envvar_prefix="NFCORE")


Expand Down Expand Up @@ -383,6 +394,38 @@ def modules(ctx, git_remote, branch, no_pull):
ctx.obj["modules_repo_no_pull"] = no_pull


# nf-core subworkflows click command
@nf_core_cli.group()
@click.option(
"-g",
"--git-remote",
type=str,
default=nf_core.modules.modules_repo.NF_CORE_MODULES_REMOTE,
help="Remote git repo to fetch files from",
)
@click.option("-b", "--branch", type=str, default=None, help="Branch of git repository hosting modules.")
@click.option(
"-N",
"--no-pull",
is_flag=True,
default=False,
help="Do not pull in latest changes to local clone of modules repository.",
)
@click.pass_context
def subworkflows(ctx, git_remote, branch, no_pull):
"""
Commands to manage Nextflow DSL2 subworkflows (tool wrappers).
"""
# ensure that ctx.obj exists and is a dict (in case `cli()` is called
# by means other than the `if` block below)
ctx.ensure_object(dict)

# Place the arguments in a context object
ctx.obj["modules_repo_url"] = git_remote
ctx.obj["modules_repo_branch"] = branch
ctx.obj["modules_repo_no_pull"] = no_pull


# nf-core modules list subcommands
@modules.group()
@click.pass_context
Expand Down Expand Up @@ -855,6 +898,106 @@ def test_module(ctx, tool, no_prompts, pytest_args):
sys.exit(1)


# nf-core subworkflows create
@subworkflows.command("create")
@click.pass_context
@click.argument("subworkflow", type=str, required=False, metavar="subworkflow name")
@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="<directory>")
@click.option("-a", "--author", type=str, metavar="<author>", help="Module author's GitHub username prefixed with '@'")
@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite any files if they already exist")
def create_subworkflow(ctx, subworkflow, dir, author, force):
"""
Create a new subworkflow from the nf-core template.
If the specified directory is a pipeline, this function creates a file called
'subworkflows/local/<subworkflow_name>.nf'
If the specified directory is a clone of nf-core/modules, it creates or modifies files
in 'subworkflows/', 'tests/subworkflows' and 'tests/config/pytest_modules.yml'
"""

# Run function
try:
subworkflow_create = nf_core.subworkflows.SubworkflowCreate(dir, subworkflow, author, force)
subworkflow_create.create()
except UserWarning as e:
log.critical(e)
sys.exit(1)
except LookupError as e:
log.error(e)
sys.exit(1)


# nf-core subworkflows create-test-yml
@subworkflows.command("create-test-yml")
@click.pass_context
@click.argument("subworkflow", type=str, required=False, metavar="subworkflow name")
@click.option("-t", "--run-tests", is_flag=True, default=False, help="Run the test workflows")
@click.option("-o", "--output", type=str, help="Path for output YAML file")
@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output YAML file if it already exists")
@click.option("-p", "--no-prompts", is_flag=True, default=False, help="Use defaults without prompting")
def create_test_yml(ctx, subworkflow, run_tests, output, force, no_prompts):
"""
Auto-generate a test.yml file for a new subworkflow.
Given the name of a module, runs the Nextflow test command and automatically generate
the required `test.yml` file based on the output files.
"""
try:
meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder(
subworkflow=subworkflow,
run_tests=run_tests,
test_yml_output_path=output,
force_overwrite=force,
no_prompts=no_prompts,
)
meta_builder.run()
except (UserWarning, LookupError) as e:
log.critical(e)
sys.exit(1)


# nf-core subworkflows install
@subworkflows.command()
@click.pass_context
@click.argument("subworkflow", type=str, required=False, metavar="subworkflow name")
@click.option(
"-d",
"--dir",
type=click.Path(exists=True),
default=".",
help=r"Pipeline directory. [dim]\[default: current working directory][/]",
)
@click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the subworkflow")
@click.option(
"-f", "--force", is_flag=True, default=False, help="Force reinstallation of subworkflow if it already exists"
)
@click.option("-s", "--sha", type=str, metavar="<commit sha>", help="Install subworkflow at commit SHA")
def install(ctx, subworkflow, dir, prompt, force, sha):
"""
Install DSL2 subworkflow within a pipeline.
Fetches and installs subworkflow files from a remote repo e.g. nf-core/modules.
"""
try:
subworkflow_install = nf_core.subworkflows.SubworkflowInstall(
dir,
force,
prompt,
sha,
ctx.obj["modules_repo_url"],
ctx.obj["modules_repo_branch"],
ctx.obj["modules_repo_no_pull"],
)
exit_status = subworkflow_install.install(subworkflow)
if not exit_status and all:
sys.exit(1)
except (UserWarning, LookupError) as e:
log.error(e)
raise
sys.exit(1)


# nf-core schema subcommands
@nf_core_cli.group()
def schema():
Expand Down
6 changes: 3 additions & 3 deletions nf_core/module-template/modules/main.nf
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// TODO nf-core: If in doubt look at other nf-core/modules to see how we are doing things! :)
// https://github.com/nf-core/modules/tree/master/modules
// https://github.com/nf-core/modules/tree/master/modules/nf-core/
// You can also ask for help via your pull request or on the #modules channel on the nf-core Slack workspace:
// https://nf-co.re/join
// TODO nf-core: A module file SHOULD only define input and output files as command-line parameters.
Expand Down Expand Up @@ -32,7 +32,7 @@ process {{ tool_name_underscore|upper }} {
// TODO nf-core: Where applicable all sample-specific information e.g. "id", "single_end", "read_group"
// MUST be provided as an input via a Groovy Map called "meta".
// This information may not be required in some instances e.g. indexing reference genome files:
// https://github.com/nf-core/modules/blob/master/modules/bwa/index/main.nf
// https://github.com/nf-core/modules/blob/master/modules/nf-core/bwa/index/main.nf
// TODO nf-core: Where applicable please provide/convert compressed files as input/output
// e.g. "*.fastq.gz" and NOT "*.fastq", "*.bam" and NOT "*.sam" etc.
{{ 'tuple val(meta), path(bam)' if has_meta else 'path bam' }}
Expand All @@ -53,7 +53,7 @@ process {{ tool_name_underscore|upper }} {
{%- endif %}
// TODO nf-core: Where possible, a command MUST be provided to obtain the version number of the software e.g. 1.10
// If the software is unable to output a version number on the command-line then it can be manually specified
// e.g. https://github.com/nf-core/modules/blob/master/modules/homer/annotatepeaks/main.nf
// e.g. https://github.com/nf-core/modules/blob/master/modules/nf-core/homer/annotatepeaks/main.nf
// Each software used MUST provide the software name and version number in the YAML version file (versions.yml)
// TODO nf-core: It MUST be possible to pass additional parameters to the tool as a command-line string via the "task.ext.args" directive
// TODO nf-core: If the tool supports multi-threading then you MUST provide the appropriate parameter
Expand Down
6 changes: 3 additions & 3 deletions nf_core/modules/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def create(self):

self.tool_name_underscore = self.tool_name.replace("/", "_")

# Check existance of directories early for fast-fail
# Check existence of directories early for fast-fail
self.file_paths = self.get_module_dirs()

# Try to find a bioconda package for 'tool'
Expand Down Expand Up @@ -239,14 +239,14 @@ def create(self):
"Where applicable all sample-specific information e.g. 'id', 'single_end', 'read_group' "
"MUST be provided as an input via a Groovy Map called 'meta'. "
"This information may [italic]not[/] be required in some instances, for example "
"[link=https://github.com/nf-core/modules/blob/master/modules/bwa/index/main.nf]indexing reference genome files[/link]."
"[link=https://github.com/nf-core/modules/blob/master/modules/nf-core/bwa/index/main.nf]indexing reference genome files[/link]."
)
while self.has_meta is None:
self.has_meta = rich.prompt.Confirm.ask(
"[violet]Will the module require a meta map of sample information?", default=True
)

# Create module template with cokiecutter
# Create module template with jinja
self.render_template()

if self.repo_type == "modules":
Expand Down
26 changes: 17 additions & 9 deletions nf_core/modules/install.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,17 +86,25 @@ def install(self, module):

# Check that the module is not already installed
if (current_version is not None and os.path.exists(module_dir)) and not self.force:
log.info("Module is already installed.")
print(f"Module {module} is already installed.")

log.error("Module is already installed.")
repo_flag = (
"" if self.modules_repo.repo_path == NF_CORE_MODULES_NAME else f"-g {self.modules_repo.remote_url} "
)
branch_flag = "" if self.modules_repo.branch == "master" else f"-b {self.modules_repo.branch} "
self.force = questionary.confirm(
f"Module {module} is already installed. Do you want to force the reinstallation?",
style=nf_core.utils.nfcore_question_style,
default=False,
).unsafe_ask()

log.info(
f"To update '{module}' run 'nf-core modules {repo_flag}{branch_flag}update {module}'. To force reinstallation use '--force'"
)
return False
if not self.force:
repo_flag = (
"" if self.modules_repo.repo_path == NF_CORE_MODULES_NAME else f"-g {self.modules_repo.remote_url} "
)
branch_flag = "" if self.modules_repo.branch == "master" else f"-b {self.modules_repo.branch} "

log.info(
f"To update '{module}' run 'nf-core modules {repo_flag}{branch_flag}update {module}'. To force reinstallation use '--force'"
)
return False

if self.sha:
version = self.sha
Expand Down
74 changes: 74 additions & 0 deletions nf_core/modules/modules_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def __init__(self, pipeline_dir):
self.modules_dir = Path(self.dir, "modules")
self.modules_json = None
self.pipeline_modules = None
self.pipeline_subworkflows = None

def create(self):
"""
Expand Down Expand Up @@ -587,6 +588,37 @@ def update(self, modules_repo, module_name, module_version, write_file=True):
if write_file:
self.dump()

def update_subworkflow(self, modules_repo, subworkflow_name, subworkflow_version, write_file=True):
"""
Updates the 'module.json' file with new subworkflow info
Args:
modules_repo (ModulesRepo): A ModulesRepo object configured for the new subworkflow
subworkflow_name (str): Name of new subworkflow
subworkflow_version (str): git SHA for the new subworkflow entry
write_file (bool): whether to write the updated modules.json to a file.
"""
if self.modules_json is None:
self.load()
repo_name = modules_repo.repo_path
remote_url = modules_repo.remote_url
branch = modules_repo.branch
if remote_url not in self.modules_json["repos"]:
self.modules_json["repos"][remote_url] = {"subworkflows": {repo_name: {}}}
if "subworkflows" not in self.modules_json["repos"][remote_url]:
# It's the first subworkflow installed in the pipeline!
self.modules_json["repos"][remote_url]["subworkflows"] = {repo_name: {}}
repo_subworkflows_entry = self.modules_json["repos"][remote_url]["subworkflows"][repo_name]
if subworkflow_name not in repo_subworkflows_entry:
repo_subworkflows_entry[subworkflow_name] = {}
repo_subworkflows_entry[subworkflow_name]["git_sha"] = subworkflow_version
repo_subworkflows_entry[subworkflow_name]["branch"] = branch

# Sort the 'modules.json' repo entries
self.modules_json["repos"] = nf_core.utils.sort_dictionary(self.modules_json["repos"])
if write_file:
self.dump()

def remove_entry(self, module_name, repo_url, install_dir):
"""
Removes an entry from the 'modules.json' file.
Expand Down Expand Up @@ -752,6 +784,29 @@ def get_module_version(self, module_name, repo_url, install_dir):
.get("git_sha", None)
)

def get_subworkflow_version(self, subworkflow_name, repo_url, install_dir):
"""
Returns the version of a subworkflow
Args:
subworkflow_name (str): Name of the module
repo_url (str): URL of the repository
install_dir (str): Name of the directory where subworkflows are installed
Returns:
(str): The git SHA of the subworkflow if it exists, None otherwise
"""
if self.modules_json is None:
self.load()
return (
self.modules_json.get("repos", {})
.get(repo_url, {})
.get("subworkflows", {})
.get(install_dir, {})
.get(subworkflow_name, {})
.get("git_sha", None)
)

def get_all_modules(self):
"""
Retrieves all pipeline modules that are reported in the modules.json
Expand Down Expand Up @@ -815,3 +870,22 @@ def __str__(self):

def __repr__(self):
return self.__str__()

def get_installed_subworkflows(self):
"""
Retrieves all pipeline subworkflows that are reported in the modules.json
Returns:
(dict[str, [(str, str)]]): Dictionary indexed with the repo urls, with a
list of tuples (module_dir, subworkflow) as values
"""
if self.modules_json is None:
self.load()
if self.pipeline_subworkflows is None:
self.pipeline_subworkflows = {}
for repo, repo_entry in self.modules_json.get("repos", {}).items():
if "subworkflows" in repo_entry:
for dir, subworkflow in repo_entry["subworkflows"].items():
self.pipeline_subworkflows[repo] = [(dir, name) for name in subworkflow]

return self.pipeline_subworkflows
Loading

0 comments on commit 87949fb

Please sign in to comment.