diff --git a/aiida/cmdline/commands/cmd_archive.py b/aiida/cmdline/commands/cmd_archive.py index 82f9028371..3a0782c836 100644 --- a/aiida/cmdline/commands/cmd_archive.py +++ b/aiida/cmdline/commands/cmd_archive.py @@ -367,7 +367,7 @@ def import_archive( } for archive, web_based in all_archives: - _import_archive_and_migrate(archive, web_based, import_kwargs, migration) + _import_archive_and_migrate(ctx, archive, web_based, import_kwargs, migration) def _echo_exception(msg: str, exception, warn_only: bool = False): @@ -421,7 +421,9 @@ def _gather_imports(archives, webpages) -> List[Tuple[str, bool]]: return final_archives -def _import_archive_and_migrate(archive: str, web_based: bool, import_kwargs: dict, try_migration: bool): +def _import_archive_and_migrate( + ctx: click.Context, archive: str, web_based: bool, import_kwargs: dict, try_migration: bool +): """Perform the archive import. :param archive: the path or URL to the archive @@ -435,8 +437,9 @@ def _import_archive_and_migrate(archive: str, web_based: bool, import_kwargs: di from aiida.tools.archive.imports import import_archive as _import_archive archive_format = get_format() + filepath = ctx.obj['config'].get_option('storage.sandbox') or None - with SandboxFolder() as temp_folder: + with SandboxFolder(filepath=filepath) as temp_folder: archive_path = archive @@ -462,15 +465,15 @@ def _import_archive_and_migrate(archive: str, web_based: bool, import_kwargs: di new_path = temp_folder.get_abs_path('migrated_archive.aiida') archive_format.migrate(archive_path, new_path, archive_format.latest_version, compression=0) archive_path = new_path - except Exception as exception: - _echo_exception(f'an exception occurred while migrating the archive {archive}', exception) + except Exception as sub_exception: + _echo_exception(f'an exception occurred while migrating the archive {archive}', sub_exception) echo.echo_report('proceeding with import of migrated archive') try: _import_archive(archive_path, archive_format=archive_format, **import_kwargs) - except Exception as exception: + except Exception as sub_exception: _echo_exception( - f'an exception occurred while trying to import the migrated archive {archive}', exception + f'an exception occurred while trying to import the migrated archive {archive}', sub_exception ) else: _echo_exception(f'an exception occurred while trying to import the archive {archive}', exception) diff --git a/aiida/engine/daemon/execmanager.py b/aiida/engine/daemon/execmanager.py index 456fb40d70..2a87c5e6e2 100644 --- a/aiida/engine/daemon/execmanager.py +++ b/aiida/engine/daemon/execmanager.py @@ -27,6 +27,7 @@ from aiida.common.datastructures import CalcInfo from aiida.common.folders import SandboxFolder from aiida.common.links import LinkType +from aiida.manage.configuration import get_config_option from aiida.orm import CalcJobNode, Code, FolderData, Node, RemoteData, load_node from aiida.orm.utils.log import get_dblogger_extra from aiida.repository.common import FileType @@ -438,6 +439,7 @@ def retrieve_calculation(calculation: CalcJobNode, transport: Transport, retriev """ logger_extra = get_dblogger_extra(calculation) workdir = calculation.get_remote_workdir() + filepath_sandbox = get_config_option('storage.sandbox') or None EXEC_LOGGER.debug(f'Retrieving calc {calculation.pk}', extra=logger_extra) EXEC_LOGGER.debug(f'[retrieval of calc {calculation.pk}] chdir {workdir}', extra=logger_extra) @@ -462,7 +464,7 @@ def retrieve_calculation(calculation: CalcJobNode, transport: Transport, retriev retrieve_list = calculation.get_retrieve_list() retrieve_temporary_list = calculation.get_retrieve_temporary_list() - with SandboxFolder() as folder: + with SandboxFolder(filepath_sandbox) as folder: retrieve_files_from_list(calculation, transport, folder.abspath, retrieve_list) # Here I retrieved everything; now I store them inside the calculation retrieved_files.base.repository.put_object_from_tree(folder.abspath) diff --git a/aiida/engine/processes/calcjobs/calcjob.py b/aiida/engine/processes/calcjobs/calcjob.py index 06426b3d07..9d71583374 100644 --- a/aiida/engine/processes/calcjobs/calcjob.py +++ b/aiida/engine/processes/calcjobs/calcjob.py @@ -429,11 +429,14 @@ def _perform_import(self): from aiida.common.datastructures import CalcJobState from aiida.common.folders import SandboxFolder from aiida.engine.daemon.execmanager import retrieve_calculation + from aiida.manage import get_config_option from aiida.transports.plugins.local import LocalTransport + filepath_sandbox = get_config_option('storage.sandbox') or None + with LocalTransport() as transport: - with SandboxFolder() as folder: - with SandboxFolder() as retrieved_temporary_folder: + with SandboxFolder(filepath_sandbox) as folder: + with SandboxFolder(filepath_sandbox) as retrieved_temporary_folder: self.presubmit(folder) self.node.set_remote_workdir( self.inputs.remote_folder.get_remote_path() # type: ignore[union-attr] diff --git a/aiida/engine/processes/calcjobs/tasks.py b/aiida/engine/processes/calcjobs/tasks.py index daa294d9ff..fd5e22cea4 100644 --- a/aiida/engine/processes/calcjobs/tasks.py +++ b/aiida/engine/processes/calcjobs/tasks.py @@ -72,6 +72,7 @@ async def task_upload_job(process: 'CalcJob', transport_queue: TransportQueue, c initial_interval = get_config_option(RETRY_INTERVAL_OPTION) max_attempts = get_config_option(MAX_ATTEMPTS_OPTION) + filepath_sandbox = get_config_option('storage.sandbox') or None authinfo = node.get_authinfo() @@ -79,7 +80,7 @@ async def do_upload(): with transport_queue.request_transport(authinfo) as request: transport = await cancellable.with_interrupt(request) - with SandboxFolder() as folder: + with SandboxFolder(filepath_sandbox) as folder: # Any exception thrown in `presubmit` call is not transient so we circumvent the exponential backoff try: calc_info = process.presubmit(folder) diff --git a/aiida/manage/configuration/schema/config-v8.schema.json b/aiida/manage/configuration/schema/config-v8.schema.json index fdc5714113..9ec70bea22 100644 --- a/aiida/manage/configuration/schema/config-v8.schema.json +++ b/aiida/manage/configuration/schema/config-v8.schema.json @@ -130,6 +130,10 @@ "minimum": 1, "description": "Timeout in seconds for communications with RabbitMQ" }, + "storage.sandbox": { + "type": "string", + "description": "Absolute path to the directory to store sandbox folders." + }, "caching.default_enabled": { "type": "boolean", "default": false, diff --git a/aiida/orm/nodes/repository.py b/aiida/orm/nodes/repository.py index faaba4626d..7641c367e5 100644 --- a/aiida/orm/nodes/repository.py +++ b/aiida/orm/nodes/repository.py @@ -8,6 +8,7 @@ from typing import TYPE_CHECKING, Any, BinaryIO, Dict, Iterable, Iterator, List, Optional, TextIO, Tuple, Union from aiida.common import exceptions +from aiida.manage import get_config_option from aiida.repository import File, Repository from aiida.repository.backend import SandboxRepositoryBackend @@ -80,7 +81,8 @@ def _repository(self) -> Repository: backend = self._node.backend.get_repository() self._repository_instance = Repository.from_serialized(backend=backend, serialized=self.metadata) else: - self._repository_instance = Repository(backend=SandboxRepositoryBackend()) + filepath = get_config_option('storage.sandbox') or None + self._repository_instance = Repository(backend=SandboxRepositoryBackend(filepath)) return self._repository_instance diff --git a/aiida/repository/backend/sandbox.py b/aiida/repository/backend/sandbox.py index 72c8be82aa..ced40bb914 100644 --- a/aiida/repository/backend/sandbox.py +++ b/aiida/repository/backend/sandbox.py @@ -1,9 +1,11 @@ # -*- coding: utf-8 -*- """Implementation of the ``AbstractRepositoryBackend`` using a sandbox folder on disk as the backend.""" +from __future__ import annotations + import contextlib import os import shutil -from typing import BinaryIO, Iterable, Iterator, List, Optional, Tuple +import typing as t import uuid from aiida.common.folders import SandboxFolder @@ -16,8 +18,13 @@ class SandboxRepositoryBackend(AbstractRepositoryBackend): """Implementation of the ``AbstractRepositoryBackend`` using a sandbox folder on disk as the backend.""" - def __init__(self): - self._sandbox: Optional[SandboxFolder] = None + def __init__(self, filepath: str | None = None): + """Construct a new instance. + + :param filepath: The path to the directory in which the sandbox folder should be created. + """ + self._sandbox: SandboxFolder | None = None + self._filepath: str | None = filepath def __str__(self) -> str: """Return the string representation of this repository.""" @@ -30,7 +37,7 @@ def __del__(self): self.erase() @property - def uuid(self) -> Optional[str]: + def uuid(self) -> str | None: """Return the unique identifier of the repository. .. note:: A sandbox folder does not have the concept of a unique identifier and so always returns ``None``. @@ -38,7 +45,7 @@ def uuid(self) -> Optional[str]: return None @property - def key_format(self) -> Optional[str]: + def key_format(self) -> str | None: return 'uuid4' def initialise(self, **kwargs) -> None: @@ -58,7 +65,7 @@ def is_initialised(self) -> bool: def sandbox(self): """Return the sandbox instance of this repository.""" if self._sandbox is None: - self._sandbox = SandboxFolder() + self._sandbox = SandboxFolder(filepath=self._filepath) return self._sandbox @@ -72,7 +79,7 @@ def erase(self): finally: self._sandbox = None - def _put_object_from_filelike(self, handle: BinaryIO) -> str: + def _put_object_from_filelike(self, handle: t.BinaryIO) -> str: """Store the byte contents of a file in the repository. :param handle: filelike object with the byte content to be stored. @@ -87,7 +94,7 @@ def _put_object_from_filelike(self, handle: BinaryIO) -> str: return key - def has_objects(self, keys: List[str]) -> List[bool]: + def has_objects(self, keys: list[str]) -> list[bool]: result = [] dirlist = os.listdir(self.sandbox.abspath) for key in keys: @@ -95,23 +102,23 @@ def has_objects(self, keys: List[str]) -> List[bool]: return result @contextlib.contextmanager - def open(self, key: str) -> Iterator[BinaryIO]: + def open(self, key: str) -> t.Iterator[t.BinaryIO]: super().open(key) with self.sandbox.open(key, mode='rb') as handle: yield handle - def iter_object_streams(self, keys: List[str]) -> Iterator[Tuple[str, BinaryIO]]: + def iter_object_streams(self, keys: list[str]) -> t.Iterator[tuple[str, t.BinaryIO]]: for key in keys: with self.open(key) as handle: # pylint: disable=not-context-manager yield key, handle - def delete_objects(self, keys: List[str]) -> None: + def delete_objects(self, keys: list[str]) -> None: super().delete_objects(keys) for key in keys: os.remove(os.path.join(self.sandbox.abspath, key)) - def list_objects(self) -> Iterable[str]: + def list_objects(self) -> t.Iterable[str]: return self.sandbox.get_content_list() def maintain(self, dry_run: bool = False, live: bool = True, **kwargs) -> None: diff --git a/aiida/storage/sqlite_temp/backend.py b/aiida/storage/sqlite_temp/backend.py index 244c149553..d6f71249be 100644 --- a/aiida/storage/sqlite_temp/backend.py +++ b/aiida/storage/sqlite_temp/backend.py @@ -19,7 +19,7 @@ from sqlalchemy.orm import Session from aiida.common.exceptions import ClosedStorage -from aiida.manage import Profile +from aiida.manage import Profile, get_config_option from aiida.manage.configuration.settings import AIIDA_CONFIG_FOLDER from aiida.orm.entities import EntityTypes from aiida.orm.implementation import BackendEntity, StorageBackend @@ -133,7 +133,7 @@ def get_repository(self) -> SandboxRepositoryBackend: raise ClosedStorage(str(self)) if self._repo is None: # to-do this does not seem to be removing the folder on garbage collection? - self._repo = SandboxRepositoryBackend() + self._repo = SandboxRepositoryBackend(filepath=get_config_option('storage.sandbox') or None) return self._repo @property diff --git a/tests/cmdline/commands/test_archive_import.py b/tests/cmdline/commands/test_archive_import.py index 52f1fc813d..4918bba6e6 100644 --- a/tests/cmdline/commands/test_archive_import.py +++ b/tests/cmdline/commands/test_archive_import.py @@ -7,9 +7,9 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Tests for `verdi import`.""" +# pylint: disable=redefined-outer-name +"""Tests for `verdi archive import`.""" from click.exceptions import BadParameter -from click.testing import CliRunner import pytest from aiida.cmdline.commands import cmd_archive @@ -21,240 +21,199 @@ ARCHIVE_PATH = 'export/migrate' -class TestVerdiImport: - """Tests for `verdi import`.""" - - @pytest.fixture(autouse=True) - def init_cls(self, aiida_profile_clean): # pylint: disable=unused-argument - """Setup for each method""" - # pylint: disable=attribute-defined-outside-init - self.cli_runner = CliRunner() - # Helper variables - self.url_path = 'https://raw.githubusercontent.com/aiidateam/aiida-core/' \ - '0599dabf0887bee172a04f308307e99e3c3f3ff2/aiida/backends/tests/fixtures/export/migrate/' - self.newest_archive = f'export_{ArchiveFormatSqlZip().latest_version}_simple.aiida' - - def test_import_no_archives(self): - """Test that passing no valid archives will lead to command failure.""" - options = [] - result = self.cli_runner.invoke(cmd_archive.import_archive, options) - - assert result.exception is not None, result.output - assert 'Critical' in result.output - assert result.exit_code != 0, result.output - - def test_import_non_existing_archives(self): - """Test that passing a non-existing archive will lead to command failure.""" - options = ['non-existing-archive.aiida'] - result = self.cli_runner.invoke(cmd_archive.import_archive, options) - - assert result.exception is not None, result.output - assert result.exit_code != 0, result.output - - def test_import_archive(self): - """ - Test import for archive files from disk - """ - archives = [ - get_archive_file('arithmetic.add.aiida', filepath='calcjob'), - get_archive_file(self.newest_archive, filepath=ARCHIVE_PATH) - ] - - options = [] + archives - result = self.cli_runner.invoke(cmd_archive.import_archive, options) - - assert result.exception is None, result.output - assert result.exit_code == 0, result.output - - def test_import_to_group(self): - """ - Test import to existing Group and that Nodes are added correctly for multiple imports of the same, - as well as separate, archives. - """ - archives = [ - get_archive_file('arithmetic.add.aiida', filepath='calcjob'), - get_archive_file(self.newest_archive, filepath=ARCHIVE_PATH) - ] - - group_label = 'import_madness' - group = Group(group_label).store() - - assert group.is_empty, 'The Group should be empty.' - - # Invoke `verdi import`, making sure there are no exceptions - options = ['-G', group.label] + [archives[0]] - result = self.cli_runner.invoke(cmd_archive.import_archive, options) - assert result.exception is None, result.output - assert result.exit_code == 0, result.output - - assert not group.is_empty, 'The Group should no longer be empty.' - - nodes_in_group = group.count() - - # Invoke `verdi import` again, making sure Group count doesn't change - options = ['-G', group.label] + [archives[0]] - result = self.cli_runner.invoke(cmd_archive.import_archive, options) - assert result.exception is None, result.output - assert result.exit_code == 0, result.output - - assert group.count() == \ - nodes_in_group, \ - f'The Group count should not have changed from {nodes_in_group}. Instead it is now {group.count()}' - - # Invoke `verdi import` again with new archive, making sure Group count is upped - options = ['-G', group.label] + [archives[1]] - result = self.cli_runner.invoke(cmd_archive.import_archive, options) - assert result.exception is None, result.output - assert result.exit_code == 0, result.output - - assert group.count() > \ - nodes_in_group, \ - 'There should now be more than {} nodes in group {} , instead there are {}'.format( - nodes_in_group, group_label, group.count() - ) - - def test_import_make_new_group(self): - """Make sure imported entities are saved in new Group""" - # Initialization - group_label = 'new_group_for_verdi_import' - archives = [get_archive_file(self.newest_archive, filepath=ARCHIVE_PATH)] - - # Check Group does not already exist - group_search = Group.collection.find(filters={'label': group_label}) - assert len(group_search) == 0, f"A Group with label '{group_label}' already exists, this shouldn't be." - - # Invoke `verdi import`, making sure there are no exceptions - options = ['-G', group_label] + archives - result = self.cli_runner.invoke(cmd_archive.import_archive, options) - assert result.exception is None, result.output - assert result.exit_code == 0, result.output - - # Make sure new Group was created - (group, new_group) = Group.collection.get_or_create(group_label) - assert not new_group, 'The Group should not have been created now, but instead when it was imported.' - assert not group.is_empty, 'The Group should not be empty.' - - def test_no_import_group(self): - """Test '--import-group/--no-import-group' options.""" - archives = [get_archive_file(self.newest_archive, filepath=ARCHIVE_PATH)] - - assert Group.collection.count() == 0, 'There should be no Groups.' - - # Invoke `verdi import` - options = archives - result = self.cli_runner.invoke(cmd_archive.import_archive, options) - assert result.exception is None, result.output - assert result.exit_code == 0, result.output - - assert Group.collection.count() == 5 - - # Invoke `verdi import` again, creating another import group - options = ['--import-group'] + archives - result = self.cli_runner.invoke(cmd_archive.import_archive, options) - assert result.exception is None, result.output - assert result.exit_code == 0, result.output - - assert Group.collection.count() == 6 - - # Invoke `verdi import` again, but with no import group created - options = ['--no-import-group'] + archives - result = self.cli_runner.invoke(cmd_archive.import_archive, options) - assert result.exception is None, result.output - assert result.exit_code == 0, result.output - - assert Group.collection.count() == 6 - - @pytest.mark.skip('Due to summary being logged, this can not be checked against `results.output`.') # pylint: disable=not-callable - def test_comment_mode(self): - """Test toggling comment mode flag""" - archives = [get_archive_file(self.newest_archive, filepath=ARCHIVE_PATH)] - for mode in ['leave', 'newest', 'overwrite']: - options = ['--comment-mode', mode] + archives - result = self.cli_runner.invoke(cmd_archive.import_archive, options) - assert result.exception is None, result.output - assert result.exit_code == 0, result.output - - def test_import_old_url_archives(self): - """ Test import of old URL archives - Expected behavior: Automatically migrate to newest version and import correctly. - """ - archive = 'export_v0.4_no_UPF.aiida' - version = '0.4' - - options = [self.url_path + archive] - result = self.cli_runner.invoke(cmd_archive.import_archive, options) - - assert result.exception is None, result.output - assert result.exit_code == 0, result.output - assert version in result.output, result.exception - assert f'Success: imported archive {options[0]}' in result.output, result.exception - - def test_import_url_and_local_archives(self): - """Test import of both a remote and local archive""" - url_archive = 'export_v0.4_no_UPF.aiida' - local_archive = self.newest_archive - - options = [ - get_archive_file(local_archive, filepath=ARCHIVE_PATH), self.url_path + url_archive, - get_archive_file(local_archive, filepath=ARCHIVE_PATH) - ] - result = self.cli_runner.invoke(cmd_archive.import_archive, options) - - assert result.exception is None, result.output - assert result.exit_code == 0, result.output - - def test_import_url_timeout(self): # pylint: disable=no-self-use - """Test a timeout to valid URL is correctly errored""" - from aiida.cmdline.params.types import PathOrUrl +@pytest.fixture +def newest_archive(): + """Return the name of the export archive at the latest version.""" + return f'export_{ArchiveFormatSqlZip().latest_version}_simple.aiida' - timeout_url = 'http://www.google.com:81' - - test_timeout_path = PathOrUrl(exists=True, readable=True, timeout_seconds=0) - with pytest.raises(BadParameter, match=f'ath "{timeout_url}" could not be reached within 0 s.'): - test_timeout_path(timeout_url) - def test_raise_malformed_url(self): - """Test the correct error is raised when supplying a malformed URL""" - malformed_url = 'htp://www.aiida.net' +def test_import_no_archives(run_cli_command): + """Test that passing no valid archives will lead to command failure.""" + options = [] + result = run_cli_command(cmd_archive.import_archive, options, raises=True) + assert 'Critical' in result.output - result = self.cli_runner.invoke(cmd_archive.import_archive, [malformed_url]) - assert result.exception is not None, result.output - assert result.exit_code != 0, result.output +def test_import_non_existing_archives(run_cli_command): + """Test that passing a non-existing archive will lead to command failure.""" + options = ['non-existing-archive.aiida'] + run_cli_command(cmd_archive.import_archive, options, raises=True) - error_message = 'could not be reached within' - assert error_message in result.output, result.exception - def test_migration(self): - """Test options `--migration`/`--no-migration` +def test_import_archive(run_cli_command, newest_archive): + """ + Test import for archive files from disk + """ + archives = [ + get_archive_file('arithmetic.add.aiida', filepath='calcjob'), + get_archive_file(newest_archive, filepath=ARCHIVE_PATH) + ] + + options = [] + archives + run_cli_command(cmd_archive.import_archive, options) + + +def test_import_to_group(run_cli_command, newest_archive): + """ + Test import to existing Group and that Nodes are added correctly for multiple imports of the same, + as well as separate, archives. + """ + archives = [ + get_archive_file('arithmetic.add.aiida', filepath='calcjob'), + get_archive_file(newest_archive, filepath=ARCHIVE_PATH) + ] + + group_label = 'import_madness' + group = Group(group_label).store() + + assert group.is_empty, 'The Group should be empty.' + + # Invoke `verdi import`, making sure there are no exceptions + options = ['-G', group.label] + [archives[0]] + run_cli_command(cmd_archive.import_archive, options) + assert not group.is_empty, 'The Group should no longer be empty.' + + nodes_in_group = group.count() + + # Invoke `verdi import` again, making sure Group count doesn't change + options = ['-G', group.label] + [archives[0]] + run_cli_command(cmd_archive.import_archive, options) + assert group.count() == \ + nodes_in_group, \ + f'The Group count should not have changed from {nodes_in_group}. Instead it is now {group.count()}' + + # Invoke `verdi import` again with new archive, making sure Group count is upped + options = ['-G', group.label] + [archives[1]] + run_cli_command(cmd_archive.import_archive, options) + assert group.count() > \ + nodes_in_group, \ + 'There should now be more than {} nodes in group {} , instead there are {}'.format( + nodes_in_group, group_label, group.count() + ) + + +def test_import_make_new_group(run_cli_command, newest_archive): + """Make sure imported entities are saved in new Group""" + # Initialization + group_label = 'new_group_for_verdi_import' + archives = [get_archive_file(newest_archive, filepath=ARCHIVE_PATH)] + + # Check Group does not already exist + group_search = Group.collection.find(filters={'label': group_label}) + assert len(group_search) == 0, f"A Group with label '{group_label}' already exists, this shouldn't be." + + # Invoke `verdi import`, making sure there are no exceptions + options = ['-G', group_label] + archives + run_cli_command(cmd_archive.import_archive, options) + + # Make sure new Group was created + (group, new_group) = Group.collection.get_or_create(group_label) + assert not new_group, 'The Group should not have been created now, but instead when it was imported.' + assert not group.is_empty, 'The Group should not be empty.' + + +@pytest.mark.usefixtures('aiida_profile_clean') +def test_no_import_group(run_cli_command, newest_archive): + """Test '--import-group/--no-import-group' options.""" + archives = [get_archive_file(newest_archive, filepath=ARCHIVE_PATH)] + + assert Group.collection.count() == 0, 'There should be no Groups.' + + # Invoke `verdi import` + options = archives + run_cli_command(cmd_archive.import_archive, options) + assert Group.collection.count() == 5 - `migration` = True (default), Expected: No query, migrate - `migration` = False, Expected: No query, no migrate - """ - archive = get_archive_file('export_0.4_simple.aiida', filepath=ARCHIVE_PATH) - success_message = f'Success: imported archive {archive}' + # Invoke `verdi import` again, creating another import group + options = ['--import-group'] + archives + run_cli_command(cmd_archive.import_archive, options) + assert Group.collection.count() == 6 - # Import "normally", but explicitly specifying `--migration`, make sure confirm message is present - # `migration` = True (default), `non_interactive` = False (default), Expected: Query user, migrate - options = ['--migration', archive] - result = self.cli_runner.invoke(cmd_archive.import_archive, options) + # Invoke `verdi import` again, but with no import group created + options = ['--no-import-group'] + archives + run_cli_command(cmd_archive.import_archive, options) + assert Group.collection.count() == 6 - assert result.exception is None, result.output - assert result.exit_code == 0, result.output - assert 'trying migration' in result.output, result.exception - assert success_message in result.output, result.exception +@pytest.mark.skip('Due to summary being logged, this can not be checked against `results.output`.') # pylint: disable=not-callable +def test_comment_mode(run_cli_command, newest_archive): + """Test toggling comment mode flag""" + archives = [get_archive_file(newest_archive, filepath=ARCHIVE_PATH)] + for mode in ['leave', 'newest', 'overwrite']: + options = ['--comment-mode', mode] + archives + run_cli_command(cmd_archive.import_archive, options) - # Import using `--no-migration`, make sure confirm message has gone - # `migration` = False, `non_interactive` = False (default), Expected: No query, no migrate - options = ['--no-migration', archive] - result = self.cli_runner.invoke(cmd_archive.import_archive, options) - assert result.exception is not None, result.output - assert result.exit_code != 0, result.output +def test_import_old_url_archives(run_cli_command): + """Test import of old URL archives - assert 'trying migration' not in result.output, result.exception - assert success_message not in result.output, result.exception + Expected behavior: Automatically migrate to newest version and import correctly. + """ + archive = 'export_v0.4_no_UPF.aiida' + version = '0.4' + url_path = 'https://raw.githubusercontent.com/aiidateam/aiida-core/' \ + '0599dabf0887bee172a04f308307e99e3c3f3ff2/aiida/backends/tests/fixtures/export/migrate/' + options = [url_path + archive] + result = run_cli_command(cmd_archive.import_archive, options) + assert version in result.output, result.exception + assert f'Success: imported archive {options[0]}' in result.output, result.exception + + +def test_import_url_and_local_archives(run_cli_command, newest_archive): + """Test import of both a remote and local archive""" + url_archive = 'export_v0.4_no_UPF.aiida' + local_archive = newest_archive + url_path = 'https://raw.githubusercontent.com/aiidateam/aiida-core/' \ + '0599dabf0887bee172a04f308307e99e3c3f3ff2/aiida/backends/tests/fixtures/export/migrate/' + + options = [ + get_archive_file(local_archive, filepath=ARCHIVE_PATH), url_path + url_archive, + get_archive_file(local_archive, filepath=ARCHIVE_PATH) + ] + run_cli_command(cmd_archive.import_archive, options) + + +def test_import_url_timeout(): + """Test a timeout to valid URL is correctly errored""" + from aiida.cmdline.params.types import PathOrUrl + + timeout_url = 'http://www.google.com:81' + + test_timeout_path = PathOrUrl(exists=True, readable=True, timeout_seconds=0) + with pytest.raises(BadParameter, match=f'ath "{timeout_url}" could not be reached within 0 s.'): + test_timeout_path(timeout_url) + + +def test_raise_malformed_url(run_cli_command): + """Test the correct error is raised when supplying a malformed URL""" + malformed_url = 'htp://www.aiida.net' + + result = run_cli_command(cmd_archive.import_archive, [malformed_url], raises=True) + assert 'could not be reached within' in result.output, result.exception + + +def test_migration(run_cli_command): + """Test options `--migration`/`--no-migration` + + `migration` = True (default), Expected: No query, migrate + `migration` = False, Expected: No query, no migrate + """ + archive = get_archive_file('export_0.4_simple.aiida', filepath=ARCHIVE_PATH) + success_message = f'Success: imported archive {archive}' + + # Import "normally", but explicitly specifying `--migration`, make sure confirm message is present + # `migration` = True (default), `non_interactive` = False (default), Expected: Query user, migrate + options = ['--migration', archive] + result = run_cli_command(cmd_archive.import_archive, options) + assert 'trying migration' in result.output, result.exception + assert success_message in result.output, result.exception + + # Import using `--no-migration`, make sure confirm message has gone + # `migration` = False, `non_interactive` = False (default), Expected: No query, no migrate + options = ['--no-migration', archive] + result = run_cli_command(cmd_archive.import_archive, options, raises=True) + assert 'trying migration' not in result.output, result.exception + assert success_message not in result.output, result.exception @pytest.mark.usefixtures('aiida_profile_clean') @@ -266,6 +225,5 @@ def test_import_old_local_archives(version, run_cli_command): archive, version = (f'export_{version}_simple.aiida', f'{version}') options = [get_archive_file(archive, filepath=ARCHIVE_PATH)] result = run_cli_command(cmd_archive.import_archive, options) - assert version in result.output, result.exception assert f'Success: imported archive {options[0]}' in result.output, result.exception diff --git a/tests/manage/configuration/test_options.py b/tests/manage/configuration/test_options.py index 418f9951dd..7cd19ade95 100644 --- a/tests/manage/configuration/test_options.py +++ b/tests/manage/configuration/test_options.py @@ -13,7 +13,7 @@ from aiida import get_profile from aiida.common.exceptions import ConfigurationError -from aiida.manage.configuration import ConfigValidationError, get_config, get_config_option +from aiida.manage.configuration import ConfigValidationError, config_schema, get_config, get_config_option from aiida.manage.configuration.options import Option, get_option, get_option_names, parse_option @@ -24,7 +24,7 @@ class TestConfigurationOptions: def test_get_option_names(self): """Test `get_option_names` function.""" assert isinstance(get_option_names(), list) - assert len(get_option_names()) == 28 + assert len(get_option_names()) == len(config_schema()['definitions']['options']['properties']) def test_get_option(self): """Test `get_option` function."""