diff --git a/planemo/commands/cmd_autoupdate.py b/planemo/commands/cmd_autoupdate.py index 2c562f9ae..b9c238359 100644 --- a/planemo/commands/cmd_autoupdate.py +++ b/planemo/commands/cmd_autoupdate.py @@ -93,7 +93,7 @@ def cli(ctx, paths, **kwds): # noqa C901 if any(r.type in {RunnableType.galaxy_tool, RunnableType.directory} for r in runnables): # update Galaxy tools - for (tool_path, tool_xml) in yield_tool_sources_on_paths(ctx, paths, recursive): + for tool_path, tool_xml in yield_tool_sources_on_paths(ctx, paths, recursive): if tool_path.split("/")[-1] in tools_to_skip: info("Skipping tool %s" % tool_path) continue diff --git a/planemo/commands/cmd_ci_find_tools.py b/planemo/commands/cmd_ci_find_tools.py index 5d174ccb1..217606737 100644 --- a/planemo/commands/cmd_ci_find_tools.py +++ b/planemo/commands/cmd_ci_find_tools.py @@ -27,7 +27,7 @@ def cli(ctx, paths, **kwds): operations over for continuous integration operations. """ tool_paths = [] - for (tool_path, tool_source) in yield_tool_sources_on_paths(ctx, paths, recursive=True): + for tool_path, tool_source in yield_tool_sources_on_paths(ctx, paths, recursive=True): if is_tool_load_error(tool_source): continue tool_paths.append(tool_path) diff --git a/planemo/commands/cmd_shed_diff.py b/planemo/commands/cmd_shed_diff.py index 2d41a2272..be2c7fd97 100644 --- a/planemo/commands/cmd_shed_diff.py +++ b/planemo/commands/cmd_shed_diff.py @@ -84,7 +84,6 @@ def cli(ctx, paths, **kwds): } def diff(realized_repository): - # We create a temporary redirection from kwds's # output to our tempfile. This lets us capture the # diff and redirect it to their requested location as diff --git a/planemo/conda.py b/planemo/conda.py index f2052cc0a..aa6f5880b 100644 --- a/planemo/conda.py +++ b/planemo/conda.py @@ -87,7 +87,7 @@ def collect_conda_targets(ctx, paths, recursive=False, found_tool_callback=None) else: real_paths.append(path) - for (tool_path, tool_source) in yield_tool_sources_on_paths( + for tool_path, tool_source in yield_tool_sources_on_paths( ctx, real_paths, recursive=recursive, exclude_deprecated=True ): if found_tool_callback: @@ -132,9 +132,7 @@ def collect_conda_target_lists_and_tool_paths(ctx, paths, recursive=False, found """ conda_target_lists = set() tool_paths = collections.defaultdict(list) - for (tool_path, tool_source) in yield_tool_sources_on_paths( - ctx, paths, recursive=recursive, yield_load_errors=False - ): + for tool_path, tool_source in yield_tool_sources_on_paths(ctx, paths, recursive=recursive, yield_load_errors=False): try: if found_tool_callback: found_tool_callback(tool_path) @@ -153,7 +151,7 @@ def collect_conda_target_lists_and_tool_paths(ctx, paths, recursive=False, found def tool_source_conda_targets(tool_source): """Load CondaTarget object from supplied abstract tool source.""" - requirements, _ = tool_source.parse_requirements_and_containers() + requirements, *_ = tool_source.parse_requirements_and_containers() return conda_util.requirements_to_conda_targets(requirements) diff --git a/planemo/engine/interface.py b/planemo/engine/interface.py index 88cbcae20..b52852c81 100644 --- a/planemo/engine/interface.py +++ b/planemo/engine/interface.py @@ -75,7 +75,7 @@ def test(self, runnables, test_timeout): test_cases = [t for tl in map(cases, runnables) for t in tl] test_results = self._collect_test_results(test_cases, test_timeout) tests = [] - for (test_case, run_response) in test_results: + for test_case, run_response in test_results: test_case_data = test_case.structured_test_data(run_response) tests.append(test_case_data) test_data = { diff --git a/planemo/galaxy/activity.py b/planemo/galaxy/activity.py index 296e7c33e..758066fe1 100644 --- a/planemo/galaxy/activity.py +++ b/planemo/galaxy/activity.py @@ -414,7 +414,6 @@ def _get_metadata(self, history_content_type, content_id): raise Exception("Unknown history content type encountered [%s]" % history_content_type) def collect_outputs(self, ctx, output_directory): - outputs_dict = {} # TODO: rather than creating a directory just use # Galaxy paths if they are available in this diff --git a/planemo/galaxy/config.py b/planemo/galaxy/config.py index b0bf7f44d..1a6545d2a 100644 --- a/planemo/galaxy/config.py +++ b/planemo/galaxy/config.py @@ -26,8 +26,8 @@ TYPE_CHECKING, ) -from galaxy.containers.docker_model import DockerVolume from galaxy.tool_util.deps import docker_util +from galaxy.tool_util.deps.container_volumes import DockerVolume from galaxy.util.commands import argv_to_str from packaging.version import parse as parse_version diff --git a/planemo/galaxy/workflows.py b/planemo/galaxy/workflows.py index ea389b062..fa968ffcc 100644 --- a/planemo/galaxy/workflows.py +++ b/planemo/galaxy/workflows.py @@ -164,7 +164,7 @@ def describe_outputs(runnable, gi=None): workflow = _raw_dict(runnable.path) outputs = [] - for (order_index, step) in workflow["steps"].items(): + for order_index, step in workflow["steps"].items(): optional = False if not step.get("tool_id"): # One of the parameter types ... need eliminate this guesswork on the Galaxy side @@ -370,7 +370,6 @@ def _elements_to_test_def( def _job_inputs_template_from_invocation(invocation_id, galaxy_url, galaxy_api_key): - user_gi = gi(url=galaxy_url, key=galaxy_api_key) invocation = user_gi.invocations.show_invocation(invocation_id) template = {} diff --git a/planemo/reports/allure.py b/planemo/reports/allure.py index 73aa92e3d..a0210b0a3 100644 --- a/planemo/reports/allure.py +++ b/planemo/reports/allure.py @@ -52,7 +52,6 @@ def process(self, structured_data, file_modication_datetime=None): plugin_manager.unregister(plugin=self.logger) def process_test_case(self, test_case, file_modication_datetime=None): - with self.lifecycle.schedule_test_case() as test_result: test_index = test_case["id"] test_data = test_case.get("data") or {} diff --git a/planemo/shed/__init__.py b/planemo/shed/__init__.py index f850103b5..b398728b6 100644 --- a/planemo/shed/__init__.py +++ b/planemo/shed/__init__.py @@ -963,7 +963,7 @@ def __init__(self, repo_pairs, description=None): def __str__(self): contents = '' % self.description line_template = ' \n' - for (owner, name) in self.repo_pairs: + for owner, name in self.repo_pairs: contents += line_template % (owner, name) contents += "" return contents @@ -1021,7 +1021,7 @@ def _realize_to(self, ctx, directory, name, multiple, **kwds): continue realized_file.realize_to(directory) - for (name, contents) in config.get("_files", {}).items(): + for name, contents in config.get("_files", {}).items(): path = os.path.join(directory, name) with open(path, "w") as f: f.write(contents) diff --git a/planemo/shed_lint.py b/planemo/shed_lint.py index c0dd51e06..38f4ffd4a 100644 --- a/planemo/shed_lint.py +++ b/planemo/shed_lint.py @@ -121,7 +121,7 @@ def lint_repository(ctx, realized_repository, **kwds): def lint_repository_tools(ctx, realized_repository, lint_ctx, lint_args): path = realized_repository.path - for (tool_path, tool_source) in yield_tool_sources(ctx, path, recursive=True): + for tool_path, tool_source in yield_tool_sources(ctx, path, recursive=True): original_path = tool_path.replace(path, realized_repository.real_path) info("+Linting tool %s" % original_path) if handle_tool_load_error(tool_path, tool_source): diff --git a/planemo/tool_lint.py b/planemo/tool_lint.py index f7d83243c..5fce052fb 100644 --- a/planemo/tool_lint.py +++ b/planemo/tool_lint.py @@ -36,7 +36,7 @@ def lint_tools_on_path(ctx, paths, lint_args, **kwds): assert_tools = kwds.get("assert_tools", True) recursive = kwds.get("recursive", False) exit_codes = [] - for (tool_path, tool_xml) in yield_tool_sources_on_paths(ctx, paths, recursive): + for tool_path, tool_xml in yield_tool_sources_on_paths(ctx, paths, recursive): if handle_tool_load_error(tool_path, tool_xml): exit_codes.append(EXIT_CODE_GENERIC_FAILURE) continue diff --git a/planemo/tools.py b/planemo/tools.py index d57d8e413..0f0ec8f5b 100644 --- a/planemo/tools.py +++ b/planemo/tools.py @@ -56,7 +56,7 @@ def yield_tool_sources_on_paths( ) -> Iterator[Tuple[str, Union[ToolSource, object]]]: """Walk paths and yield ToolSource objects discovered.""" for path in paths: - for (tool_path, tool_source) in yield_tool_sources(ctx, path, recursive, yield_load_errors): + for tool_path, tool_source in yield_tool_sources(ctx, path, recursive, yield_load_errors): if exclude_deprecated and "deprecated" in tool_path: continue yield (tool_path, tool_source) @@ -71,7 +71,7 @@ def yield_tool_sources( recursive, register_load_errors=True, ) - for (tool_path, tool_source) in tools: + for tool_path, tool_source in tools: if is_tool_load_error(tool_source): if yield_load_errors: yield (tool_path, tool_source) diff --git a/requirements.txt b/requirements.txt index 8779e1e41..b1f693fd4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,9 +3,8 @@ bioblend>=1.0.0 click!=8.0.2 cwltool>=1.0.20191225192155 ephemeris>=0.10.3 -galaxy-containers<22.5 -galaxy-tool-util>=21.1.1,<22.5 -galaxy-util>=20.5.0,<22.5 +galaxy-tool-util>=22.5.dev2,<23.0 +galaxy-util[template]>=22.5.dev2,<23.0 glob2 gxformat2>=0.14.0 h5py diff --git a/tests/data/data_manager/data_manager_fetch_genome_dbkeys_all_fasta/data_manager/data_manager_fetch_genome_all_fasta_dbkeys.py b/tests/data/data_manager/data_manager_fetch_genome_dbkeys_all_fasta/data_manager/data_manager_fetch_genome_all_fasta_dbkeys.py index bf543412a..3d41f1ff0 100644 --- a/tests/data/data_manager/data_manager_fetch_genome_dbkeys_all_fasta/data_manager/data_manager_fetch_genome_all_fasta_dbkeys.py +++ b/tests/data/data_manager/data_manager_fetch_genome_dbkeys_all_fasta/data_manager/data_manager_fetch_genome_all_fasta_dbkeys.py @@ -373,7 +373,6 @@ def _stream_fasta_to_file(fasta_stream, target_directory, sequence_id, close_str fasta_base_filename = "%s.fa" % sequence_id fasta_filename = os.path.join(target_directory, fasta_base_filename) with open(fasta_filename, "wb+") as fasta_writer: - if isinstance(fasta_stream, list) and len(fasta_stream) == 1: fasta_stream = fasta_stream[0] diff --git a/tox.ini b/tox.ini index 084ea484b..0970b775c 100644 --- a/tox.ini +++ b/tox.ini @@ -65,7 +65,7 @@ setenv = 2101: PLANEMO_TEST_GALAXY_BRANCH=release_21.01 skip_install = doc_test,lint,lint_docs,lint_docstrings,mypy,gxwf_test_test: True -whitelist_externals = +allowlist_externals = lint_docs: make gxwf_test_test: make @@ -73,5 +73,5 @@ whitelist_externals = [testenv:doc_test] commands = bash scripts/run_doc_test.sh skipsdist = True -whitelist_externals = bash +allowlist_externals = bash deps =