From 4203f162df803946b2396ca820e6b6139a3ecc61 Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Wed, 25 Oct 2023 16:03:45 +0200 Subject: [PATCH] Config: Switch from `jsonschema` to `pydantic` (#6117) The configuration of an AiiDA instance is written in JSON format to the `config.json` file. The schema is defined using `jsonschema` to take care of validation, however, some validation, for example of the config options was still happening manually. Other parts of the code want to start using `pydantic` for model definition and configuration purposes, which has become the de-facto standard for these use-cases in the Python ecosystem. Before introducing another dependency, the existing `jsonschema` approach is replaced by `pydantic` in current code base first. --- aiida/cmdline/commands/cmd_config.py | 7 +- aiida/common/log.py | 3 + aiida/manage/__init__.py | 2 - aiida/manage/configuration/__init__.py | 2 - aiida/manage/configuration/config.py | 208 +++++++++++++++--- aiida/manage/configuration/options.py | 96 +++----- aiida/manage/manager.py | 1 + docs/source/nitpick-exceptions | 13 +- environment.yml | 2 +- pyproject.toml | 3 +- requirements/requirements-py-3.10.txt | 8 +- requirements/requirements-py-3.11.txt | 8 +- requirements/requirements-py-3.9.txt | 8 +- .../configuration/test_configuration.py | 5 +- tests/manage/configuration/test_options.py | 9 +- 15 files changed, 246 insertions(+), 129 deletions(-) diff --git a/aiida/cmdline/commands/cmd_config.py b/aiida/cmdline/commands/cmd_config.py index c788cff1d4..f0a045d145 100644 --- a/aiida/cmdline/commands/cmd_config.py +++ b/aiida/cmdline/commands/cmd_config.py @@ -119,7 +119,8 @@ def verdi_config_set(ctx, option, value, globally, append, remove): List values are split by whitespace, e.g. "a b" becomes ["a", "b"]. """ - from aiida.manage.configuration import Config, ConfigValidationError, Profile + from aiida.common.exceptions import ConfigurationError + from aiida.manage.configuration import Config, Profile if append and remove: echo.echo_critical('Cannot flag both append and remove') @@ -137,7 +138,7 @@ def verdi_config_set(ctx, option, value, globally, append, remove): if append or remove: try: current = config.get_option(option.name, scope=scope) - except ConfigValidationError as error: + except ConfigurationError as error: echo.echo_critical(str(error)) if not isinstance(current, list): echo.echo_critical(f'cannot append/remove to value: {current}') @@ -149,7 +150,7 @@ def verdi_config_set(ctx, option, value, globally, append, remove): # Set the specified option try: value = config.set_option(option.name, value, scope=scope) - except ConfigValidationError as error: + except ConfigurationError as error: echo.echo_critical(str(error)) config.store() diff --git a/aiida/common/log.py b/aiida/common/log.py index 67a32cd7f7..7707053c63 100644 --- a/aiida/common/log.py +++ b/aiida/common/log.py @@ -12,6 +12,7 @@ import collections import contextlib +import enum import logging import types from typing import cast @@ -52,6 +53,8 @@ def report(self, msg: str, *args, **kwargs) -> None: logging.getLevelName(logging.CRITICAL): logging.CRITICAL, } +LogLevels = enum.Enum('LogLevels', {key: key for key in LOG_LEVELS}) # type: ignore[misc] + AIIDA_LOGGER = cast(AiidaLoggerType, logging.getLogger('aiida')) CLI_ACTIVE: bool | None = None diff --git a/aiida/manage/__init__.py b/aiida/manage/__init__.py index 857c19cbc6..a745def690 100644 --- a/aiida/manage/__init__.py +++ b/aiida/manage/__init__.py @@ -34,7 +34,6 @@ 'BROKER_DEFAULTS', 'CURRENT_CONFIG_VERSION', 'Config', - 'ConfigValidationError', 'MIGRATIONS', 'ManagementApiConnectionError', 'OLDEST_COMPATIBLE_CONFIG_VERSION', @@ -43,7 +42,6 @@ 'RabbitmqManagementClient', 'check_and_migrate_config', 'config_needs_migrating', - 'config_schema', 'disable_caching', 'downgrade_config', 'enable_caching', diff --git a/aiida/manage/configuration/__init__.py b/aiida/manage/configuration/__init__.py index ad43c056f8..fe92492bc1 100644 --- a/aiida/manage/configuration/__init__.py +++ b/aiida/manage/configuration/__init__.py @@ -22,14 +22,12 @@ __all__ = ( 'CURRENT_CONFIG_VERSION', 'Config', - 'ConfigValidationError', 'MIGRATIONS', 'OLDEST_COMPATIBLE_CONFIG_VERSION', 'Option', 'Profile', 'check_and_migrate_config', 'config_needs_migrating', - 'config_schema', 'downgrade_config', 'get_current_version', 'get_option', diff --git a/aiida/manage/configuration/config.py b/aiida/manage/configuration/config.py index c50439caa9..dd6811a510 100644 --- a/aiida/manage/configuration/config.py +++ b/aiida/manage/configuration/config.py @@ -7,49 +7,194 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Module that defines the configuration file of an AiiDA instance and functions to create and load it.""" +"""Module that defines the configuration file of an AiiDA instance and functions to create and load it. + +Despite the import of the annotations backport below which enables postponed type annotation evaluation as implemented +with PEP 563 (https://peps.python.org/pep-0563/), this is not compatible with ``pydantic`` for Python 3.9 and older ( +See https://github.com/pydantic/pydantic/issues/2678 for details). +""" +from __future__ import annotations + import codecs -from functools import cache import json import os -from typing import Any, Dict, Optional, Sequence, Tuple +from typing import Any, Dict, List, Optional, Tuple +import uuid + +from pydantic import ( # pylint: disable=no-name-in-module + BaseModel, + ConfigDict, + Field, + ValidationError, + field_serializer, + field_validator, +) from aiida.common.exceptions import ConfigurationError +from aiida.common.log import LogLevels -from . import schema as schema_module from .options import Option, get_option, get_option_names, parse_option from .profile import Profile -__all__ = ('Config', 'config_schema', 'ConfigValidationError') +__all__ = ('Config',) + + +class ConfigVersionSchema(BaseModel, defer_build=True): + """Schema for the version configuration of an AiiDA instance.""" + + CURRENT: int + OLDEST_COMPATIBLE: int + + +class ProfileOptionsSchema(BaseModel, defer_build=True): + """Schema for the options of an AiiDA profile.""" + + model_config = ConfigDict(use_enum_values=True) + + runner__poll__interval: int = Field(60, description='Polling interval in seconds to be used by process runners.') + daemon__default_workers: int = Field( + 1, description='Default number of workers to be launched by `verdi daemon start`.' + ) + daemon__timeout: int = Field( + 2, + description= + 'Used to set default timeout in the :class:`aiida.engine.daemon.client.DaemonClient` for calls to the daemon.' + ) + daemon__worker_process_slots: int = Field( + 200, description='Maximum number of concurrent process tasks that each daemon worker can handle.' + ) + daemon__recursion_limit: int = Field(3000, description='Maximum recursion depth for the daemon workers.') + db__batch_size: int = Field( + 100000, + description='Batch size for bulk CREATE operations in the database. Avoids hitting MaxAllocSize of PostgreSQL ' + '(1GB) when creating large numbers of database records in one go.' + ) + verdi__shell__auto_import: str = Field( + ':', + description='Additional modules/functions/classes to be automatically loaded in `verdi shell`, split by `:`.' + ) + logging__aiida_loglevel: LogLevels = Field( + 'REPORT', description='Minimum level to log to daemon log and the `DbLog` table for the `aiida` logger.' + ) + logging__verdi_loglevel: LogLevels = Field( + 'REPORT', description='Minimum level to log to console when running a `verdi` command.' + ) + logging__db_loglevel: LogLevels = Field('REPORT', description='Minimum level to log to the DbLog table.') + logging__plumpy_loglevel: LogLevels = Field( + 'WARNING', description='Minimum level to log to daemon log and the `DbLog` table for the `plumpy` logger.' + ) + logging__kiwipy_loglevel: LogLevels = Field( + 'WARNING', description='Minimum level to log to daemon log and the `DbLog` table for the `kiwipy` logger' + ) + logging__paramiko_loglevel: LogLevels = Field( + 'WARNING', description='Minimum level to log to daemon log and the `DbLog` table for the `paramiko` logger' + ) + logging__alembic_loglevel: LogLevels = Field( + 'WARNING', description='Minimum level to log to daemon log and the `DbLog` table for the `alembic` logger' + ) + logging__sqlalchemy_loglevel: LogLevels = Field( + 'WARNING', description='Minimum level to log to daemon log and the `DbLog` table for the `sqlalchemy` logger' + ) + logging__circus_loglevel: LogLevels = Field( + 'INFO', description='Minimum level to log to daemon log and the `DbLog` table for the `circus` logger' + ) + logging__aiopika_loglevel: LogLevels = Field( + 'WARNING', description='Minimum level to log to daemon log and the `DbLog` table for the `aiopika` logger' + ) + warnings__showdeprecations: bool = Field(True, description='Whether to print AiiDA deprecation warnings.') + warnings__rabbitmq_version: bool = Field( + True, description='Whether to print a warning when an incompatible version of RabbitMQ is configured.' + ) + transport__task_retry_initial_interval: int = Field( + 20, description='Initial time interval for the exponential backoff mechanism.' + ) + transport__task_maximum_attempts: int = Field( + 5, description='Maximum number of transport task attempts before a Process is Paused.' + ) + rmq__task_timeout: int = Field(10, description='Timeout in seconds for communications with RabbitMQ.') + storage__sandbox: Optional[str] = Field( + None, description='Absolute path to the directory to store sandbox folders.' + ) + caching__default_enabled: bool = Field(False, description='Enable calculation caching by default.') + caching__enabled_for: List[str] = Field([], description='Calculation entry points to enable caching on.') + caching__disabled_for: List[str] = Field([], description='Calculation entry points to disable caching on.') + + @field_validator('caching__enabled_for', 'caching__disabled_for') + @classmethod + def validate_caching_identifier_pattern(cls, value: List[str]) -> List[str]: + """Validate the caching identifier patterns.""" + from aiida.manage.caching import _validate_identifier_pattern + for identifier in value: + _validate_identifier_pattern(identifier=identifier) + + return value -SCHEMA_FILE = 'config-v9.schema.json' +class GlobalOptionsSchema(ProfileOptionsSchema): + """Schema for the global options of an AiiDA instance.""" + autofill__user__email: Optional[str] = Field( + None, description='Default user email to use when creating new profiles.' + ) + autofill__user__first_name: Optional[str] = Field( + None, description='Default user first name to use when creating new profiles.' + ) + autofill__user__last_name: Optional[str] = Field( + None, description='Default user last name to use when creating new profiles.' + ) + autofill__user__institution: Optional[str] = Field( + None, description='Default user institution to use when creating new profiles.' + ) + rest_api__profile_switching: bool = Field( + False, description='Toggle whether the profile can be specified in requests submitted to the REST API.' + ) + warnings__development_version: bool = Field( + True, + description='Whether to print a warning when a profile is loaded while a development version is installed.' + ) -@cache -def config_schema() -> Dict[str, Any]: - """Return the configuration schema.""" - from importlib.resources import files - return json.loads(files(schema_module).joinpath(SCHEMA_FILE).read_text(encoding='utf8')) +class ProfileStorageConfig(BaseModel, defer_build=True): + """Schema for the storage backend configuration of an AiiDA profile.""" + backend: str + config: Dict[str, Any] -class ConfigValidationError(ConfigurationError): - """Configuration error raised when the file contents fails validation.""" - def __init__( - self, message: str, keypath: Sequence[Any] = (), schema: Optional[dict] = None, filepath: Optional[str] = None - ): - super().__init__(message) - self._message = message - self._keypath = keypath - self._filepath = filepath - self._schema = schema +class ProcessControlConfig(BaseModel, defer_build=True): + """Schema for the process control configuration of an AiiDA profile.""" + + broker_protocol: str = Field('amqp', description='Protocol for connecting to the message broker.') + broker_username: str = Field('guest', description='Username for message broker authentication.') + broker_password: str = Field('guest', description='Password for message broker.') + broker_host: str = Field('127.0.0.1', description='Hostname of the message broker.') + broker_port: int = Field(5432, description='Port of the message broker.') + broker_virtual_host: str = Field('', description='Virtual host to use for the message broker.') + broker_parameters: dict[ + str, Any] = Field(default_factory=dict, description='Arguments to be encoded as query parameters.') - def __str__(self) -> str: - prefix = f'{self._filepath}:' if self._filepath else '' - path = '/' + '/'.join(str(k) for k in self._keypath) + ': ' if self._keypath else '' - schema = f'\n schema:\n {self._schema}' if self._schema else '' - return f'Validation Error: {prefix}{path}{self._message}{schema}' + +class ProfileSchema(BaseModel, defer_build=True): + """Schema for the configuration of an AiiDA profile.""" + + uuid: str = Field(description='A UUID that uniquely identifies the profile.', default_factory=uuid.uuid4) + storage: ProfileStorageConfig + process_control: ProcessControlConfig + default_user_email: Optional[str] = None + test_profile: bool = False + options: Optional[ProfileOptionsSchema] = None + + @field_serializer('uuid') + def serialize_dt(self, value: uuid.UUID, _info): + return str(value) + + +class ConfigSchema(BaseModel, defer_build=True): + """Schema for the configuration of an AiiDA instance.""" + + CONFIG_VERSION: Optional[ConfigVersionSchema] = None + profiles: Optional[dict[str, ProfileSchema]] = None + options: Optional[GlobalOptionsSchema] = None + default_profile: Optional[str] = None class Config: # pylint: disable=too-many-public-methods @@ -125,13 +270,10 @@ def _backup(cls, filepath): @staticmethod def validate(config: dict, filepath: Optional[str] = None): """Validate a configuration dictionary.""" - import jsonschema try: - jsonschema.validate(instance=config, schema=config_schema()) - except jsonschema.ValidationError as error: - raise ConfigValidationError( - message=error.message, keypath=error.path, schema=error.schema, filepath=filepath - ) + ConfigSchema(**config) + except ValidationError as exception: + raise ConfigurationError(f'invalid config schema: {filepath}: {str(exception)}') def __init__(self, filepath: str, config: dict, validate: bool = True): """Instantiate a configuration object from a configuration dictionary and its filepath. @@ -470,7 +612,7 @@ def get_options(self, scope: Optional[str] = None) -> Dict[str, Tuple[Option, st elif name in self.options: value = self.options.get(name) source = 'global' - elif 'default' in option.schema: + elif option.default is not None: value = option.default source = 'default' else: diff --git a/aiida/manage/configuration/options.py b/aiida/manage/configuration/options.py index 0240609e7b..a0a467159e 100644 --- a/aiida/manage/configuration/options.py +++ b/aiida/manage/configuration/options.py @@ -18,9 +18,10 @@ class Option: """Represent a configuration option schema.""" - def __init__(self, name: str, schema: Dict[str, Any]): + def __init__(self, name: str, schema: Dict[str, Any], field): self._name = name self._schema = schema + self._field = field def __str__(self) -> str: return f'Option(name={self._name})' @@ -30,97 +31,66 @@ def name(self) -> str: return self._name @property - def schema(self) -> Dict[str, Any]: - return self._schema + def valid_type(self) -> Any: + return self._field.annotation @property - def valid_type(self) -> Any: - return self._schema.get('type', None) + def schema(self) -> Dict[str, Any]: + return self._schema @property def default(self) -> Any: - return self._schema.get('default', None) + return self._field.default @property def description(self) -> str: - return self._schema.get('description', '') + return self._field.description @property def global_only(self) -> bool: - return self._schema.get('global_only', False) + from .config import ProfileOptionsSchema + return self._name.replace('.', '__') not in ProfileOptionsSchema.model_fields - def validate(self, value: Any, cast: bool = True) -> Any: + def validate(self, value: Any) -> Any: """Validate a value :param value: The input value - :param cast: Attempt to cast the value to the required type - :return: The output value - :raise: ConfigValidationError - + :raise: ConfigurationError """ - # pylint: disable=too-many-branches - import jsonschema - - from aiida.manage.caching import _validate_identifier_pattern - - from .config import ConfigValidationError - - if cast: - try: - if self.valid_type == 'boolean': - if isinstance(value, str): - if value.strip().lower() in ['0', 'false', 'f']: - value = False - elif value.strip().lower() in ['1', 'true', 't']: - value = True - else: - value = bool(value) - elif self.valid_type == 'string': - value = str(value) - elif self.valid_type == 'integer': - value = int(value) - elif self.valid_type == 'number': - value = float(value) - elif self.valid_type == 'array' and isinstance(value, str): - value = value.split() - except ValueError: - pass + from pydantic import ValidationError - try: - jsonschema.validate(instance=value, schema=self.schema) - except jsonschema.ValidationError as exc: - raise ConfigValidationError(message=exc.message, keypath=[self.name, *(exc.path or [])], schema=exc.schema) + from .config import GlobalOptionsSchema - # special caching validation - if self.name in ('caching.enabled_for', 'caching.disabled_for'): - for i, identifier in enumerate(value): - try: - _validate_identifier_pattern(identifier=identifier) - except ValueError as exc: - raise ConfigValidationError(message=str(exc), keypath=[self.name, str(i)]) - - return value + attribute = self.name.replace('.', '__') + try: + # There is no straightforward way to validate a single field of a model in pydantic v2.0. The following + # approach is the current work around, see: https://github.com/pydantic/pydantic/discussions/7367 + result = GlobalOptionsSchema.__pydantic_validator__.validate_assignment( + GlobalOptionsSchema.model_construct(), attribute, value + ) + except ValidationError as exception: + raise ConfigurationError(str(exception)) from exception -def get_schema_options() -> Dict[str, Dict[str, Any]]: - """Return schema for options.""" - from .config import config_schema - schema = config_schema() - return schema['definitions']['options']['properties'] + # Return the value from the constructed model as this will have casted the value to the right type + return getattr(result, attribute) def get_option_names() -> List[str]: """Return a list of available option names.""" - return list(get_schema_options()) + from .config import GlobalOptionsSchema + return [key.replace('__', '.') for key in GlobalOptionsSchema.model_fields] def get_option(name: str) -> Option: """Return option.""" - options = get_schema_options() - if name not in options: + from .config import GlobalOptionsSchema + options = GlobalOptionsSchema.model_fields + option_name = name.replace('.', '__') + if option_name not in options: raise ConfigurationError(f'the option {name} does not exist') - return Option(name, options[name]) + return Option(name, GlobalOptionsSchema.model_json_schema()['properties'][option_name], options[option_name]) def parse_option(option_name: str, option_value: Any) -> Tuple[Option, Any]: @@ -132,6 +102,6 @@ def parse_option(option_name: str, option_value: Any) -> Tuple[Option, Any]: """ option = get_option(option_name) - value = option.validate(option_value, cast=True) + value = option.validate(option_value) return option, value diff --git a/aiida/manage/manager.py b/aiida/manage/manager.py index e63a6d3f87..3e6959d064 100644 --- a/aiida/manage/manager.py +++ b/aiida/manage/manager.py @@ -214,6 +214,7 @@ def get_option(self, option_name: str) -> Any: else: if option_name in config.options: return config.get_option(option_name) + # try the defaults (will raise ConfigurationError if not present) option = get_option(option_name) return option.default diff --git a/docs/source/nitpick-exceptions b/docs/source/nitpick-exceptions index 0c3a0eea15..f5a41225e7 100644 --- a/docs/source/nitpick-exceptions +++ b/docs/source/nitpick-exceptions @@ -148,6 +148,17 @@ py:class ndarray py:class paramiko.proxy.ProxyCommand +py:class pydantic.main.BaseModel +py:class ModelPrivateAttr +py:class CoreSchema +py:class _decorators.DecoratorInfos +py:class _generics.PydanticGenericMetadata +py:class SchemaSerializer +py:class SchemaValidator +py:class Signature +py:class ConfigDict +py:class FieldInfo + # These can be removed once they are properly included in the `__all__` in `plumpy` py:class plumpy.ports.PortNamespace py:class plumpy.utils.AttributesDict @@ -218,8 +229,6 @@ py:class CircusClient py:class pgsu.PGSU py:meth pgsu.PGSU.__init__ -py:class jsonschema.exceptions._Error - py:class Session py:class Query py:class importlib_metadata.EntryPoint diff --git a/environment.yml b/environment.yml index 80d67e7bbe..78374edb9e 100644 --- a/environment.yml +++ b/environment.yml @@ -18,7 +18,6 @@ dependencies: - python-graphviz~=0.19 - ipython>=7 - jinja2~=3.0 -- jsonschema~=3.0 - kiwipy[rmq]~=0.7.7 - importlib-metadata~=6.0 - numpy~=1.21 @@ -27,6 +26,7 @@ dependencies: - pgsu~=0.2.1 - psutil~=5.6 - psycopg2-binary~=2.8 +- pydantic~=2.4 - pytz~=2021.1 - pyyaml~=6.0 - requests~=2.0 diff --git a/pyproject.toml b/pyproject.toml index 1d969a6731..0d9fd964e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,6 @@ dependencies = [ "graphviz~=0.19", "ipython>=7", "jinja2~=3.0", - "jsonschema~=3.0", "kiwipy[rmq]~=0.7.7", "importlib-metadata~=6.0", "numpy~=1.21", @@ -45,6 +44,7 @@ dependencies = [ "pgsu~=0.2.1", "psutil~=5.6", "psycopg2-binary~=2.8", + "pydantic~=2.4", "pytz~=2021.1", "pyyaml~=6.0", "requests~=2.0", @@ -416,7 +416,6 @@ module = [ 'graphviz.*', 'importlib._bootstrap.*', 'IPython.*', - 'jsonschema.*', 'kiwipy.*', 'matplotlib.*', 'mayavi.*', diff --git a/requirements/requirements-py-3.10.txt b/requirements/requirements-py-3.10.txt index 6cef93eb6f..f10c1b7242 100644 --- a/requirements/requirements-py-3.10.txt +++ b/requirements/requirements-py-3.10.txt @@ -44,7 +44,6 @@ deprecation==2.1.0 disk-objectstore==1.0.0 docstring-parser==0.15 docutils==0.16 -emmet-core==0.57.1 exceptiongroup==1.1.1 executing==1.2.0 fastjsonschema==2.17.1 @@ -89,8 +88,7 @@ matplotlib-inline==0.1.6 mdit-py-plugins==0.3.5 mdurl==0.1.2 mistune==3.0.1 -monty==2023.5.8 -mp-api==0.33.3 +monty==2023.9.25 mpmath==1.3.0 msgpack==1.0.5 multidict==6.0.4 @@ -133,10 +131,10 @@ py-cpuinfo==9.0.0 pybtex==0.24.0 pycifrw==4.4.5 pycparser==2.21 -pydantic==1.10.9 +pydantic==2.4.0 pydata-sphinx-theme==0.13.3 pygments==2.15.1 -pymatgen==2023.5.31 +pymatgen==2023.9.25 pympler==0.9 pymysql==0.9.3 pynacl==1.5.0 diff --git a/requirements/requirements-py-3.11.txt b/requirements/requirements-py-3.11.txt index fbe1fb4cec..1948ce8008 100644 --- a/requirements/requirements-py-3.11.txt +++ b/requirements/requirements-py-3.11.txt @@ -44,7 +44,6 @@ deprecation==2.1.0 disk-objectstore==1.0.0 docstring-parser==0.15 docutils==0.16 -emmet-core==0.57.1 executing==1.2.0 fastjsonschema==2.17.1 flask==2.3.2 @@ -88,8 +87,7 @@ matplotlib-inline==0.1.6 mdit-py-plugins==0.3.5 mdurl==0.1.2 mistune==3.0.1 -monty==2023.5.8 -mp-api==0.33.3 +monty==2023.9.25 mpmath==1.3.0 msgpack==1.0.5 multidict==6.0.4 @@ -132,10 +130,10 @@ py-cpuinfo==9.0.0 pybtex==0.24.0 pycifrw==4.4.5 pycparser==2.21 -pydantic==1.10.9 +pydantic==2.4.0 pydata-sphinx-theme==0.13.3 pygments==2.15.1 -pymatgen==2023.9.2 +pymatgen==2023.9.25 pympler==0.9 pymysql==0.9.3 pynacl==1.5.0 diff --git a/requirements/requirements-py-3.9.txt b/requirements/requirements-py-3.9.txt index 39e5392cd3..f9996690bf 100644 --- a/requirements/requirements-py-3.9.txt +++ b/requirements/requirements-py-3.9.txt @@ -44,7 +44,6 @@ deprecation==2.1.0 disk-objectstore==1.0.0 docstring-parser==0.15 docutils==0.16 -emmet-core==0.57.1 exceptiongroup==1.1.1 executing==1.2.0 fastjsonschema==2.17.1 @@ -91,8 +90,7 @@ matplotlib-inline==0.1.6 mdit-py-plugins==0.3.5 mdurl==0.1.2 mistune==3.0.1 -monty==2023.5.8 -mp-api==0.33.3 +monty==2023.9.25 mpmath==1.3.0 msgpack==1.0.5 multidict==6.0.4 @@ -135,10 +133,10 @@ py-cpuinfo==9.0.0 pybtex==0.24.0 pycifrw==4.4.5 pycparser==2.21 -pydantic==1.10.9 +pydantic==2.4.0 pydata-sphinx-theme==0.13.3 pygments==2.15.1 -pymatgen==2023.5.31 +pymatgen==2023.9.25 pympler==0.9 pymysql==0.9.3 pynacl==1.5.0 diff --git a/tests/manage/configuration/test_configuration.py b/tests/manage/configuration/test_configuration.py index 344d933d6a..de9fd3a417 100644 --- a/tests/manage/configuration/test_configuration.py +++ b/tests/manage/configuration/test_configuration.py @@ -27,7 +27,8 @@ def test_check_version_release(monkeypatch, capsys, isolated_config): @pytest.mark.parametrize('suppress_warning', (True, False)) -def test_check_version_development(monkeypatch, capsys, isolated_config, suppress_warning): +@pytest.mark.usefixtures('isolated_config') +def test_check_version_development(monkeypatch, capsys, suppress_warning, aiida_profile): """Test that ``Manager.check_version`` prints a warning for a post release development version. The warning can be suppressed by setting the option ``warnings.development_version`` to ``False``. @@ -39,7 +40,7 @@ def test_check_version_development(monkeypatch, capsys, isolated_config, suppres version = '1.0.0.post0' monkeypatch.setattr(aiida, '__version__', version) - isolated_config.set_option('warnings.development_version', not suppress_warning) + aiida_profile.set_option('warnings.development_version', not suppress_warning) get_manager().check_version() captured = capsys.readouterr() diff --git a/tests/manage/configuration/test_options.py b/tests/manage/configuration/test_options.py index 466250c11b..fe9b417c55 100644 --- a/tests/manage/configuration/test_options.py +++ b/tests/manage/configuration/test_options.py @@ -12,7 +12,8 @@ from aiida import get_profile from aiida.common.exceptions import ConfigurationError -from aiida.manage.configuration import ConfigValidationError, config_schema, get_config, get_config_option +from aiida.manage.configuration import get_config, get_config_option +from aiida.manage.configuration.config import GlobalOptionsSchema from aiida.manage.configuration.options import Option, get_option, get_option_names, parse_option @@ -23,7 +24,7 @@ class TestConfigurationOptions: def test_get_option_names(self): """Test `get_option_names` function.""" assert isinstance(get_option_names(), list) - assert len(get_option_names()) == len(config_schema()['definitions']['options']['properties']) + assert len(get_option_names()) == len(GlobalOptionsSchema.model_fields) def test_get_option(self): """Test `get_option` function.""" @@ -38,10 +39,10 @@ def test_get_option(self): def test_parse_option(self): """Test `parse_option` function.""" - with pytest.raises(ConfigValidationError): + with pytest.raises(ConfigurationError): parse_option('logging.aiida_loglevel', 1) - with pytest.raises(ConfigValidationError): + with pytest.raises(ConfigurationError): parse_option('logging.aiida_loglevel', 'INVALID_LOG_LEVEL') def test_options(self):