diff --git a/aiida/orm/implementation/django/querybuilder.py b/aiida/orm/implementation/django/querybuilder.py index adbd39ca15..1582e963dc 100644 --- a/aiida/orm/implementation/django/querybuilder.py +++ b/aiida/orm/implementation/django/querybuilder.py @@ -7,13 +7,14 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Django query builder""" +"""Django query builder implementation""" from aldjemy import core # Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed # pylint: disable=no-name-in-module, import-error from sqlalchemy import and_, or_, not_, case from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.ext.compiler import compiles +from sqlalchemy.sql.compiler import TypeCompiler from sqlalchemy.sql.expression import FunctionElement from sqlalchemy.types import Float, Boolean @@ -26,11 +27,11 @@ class jsonb_array_length(FunctionElement): # pylint: disable=invalid-name @compiles(jsonb_array_length) -def compile(element, compiler, **_kw): # pylint: disable=function-redefined, redefined-builtin +def compile(element, compiler: TypeCompiler, **kwargs): # pylint: disable=function-redefined, redefined-builtin """ Get length of array defined in a JSONB column """ - return f'jsonb_array_length({compiler.process(element.clauses)})' + return f'jsonb_array_length({compiler.process(element.clauses, **kwargs)})' class array_length(FunctionElement): # pylint: disable=invalid-name @@ -38,11 +39,11 @@ class array_length(FunctionElement): # pylint: disable=invalid-name @compiles(array_length) -def compile(element, compiler, **_kw): # pylint: disable=function-redefined +def compile(element, compiler: TypeCompiler, **kwargs): # pylint: disable=function-redefined """ Get length of array defined in a JSONB column """ - return f'array_length({compiler.process(element.clauses)})' + return f'array_length({compiler.process(element.clauses, **kwargs)})' class jsonb_typeof(FunctionElement): # pylint: disable=invalid-name @@ -50,11 +51,11 @@ class jsonb_typeof(FunctionElement): # pylint: disable=invalid-name @compiles(jsonb_typeof) -def compile(element, compiler, **_kw): # pylint: disable=function-redefined +def compile(element, compiler: TypeCompiler, **kwargs): # pylint: disable=function-redefined """ Get length of array defined in a JSONB column """ - return f'jsonb_typeof({compiler.process(element.clauses)})' + return f'jsonb_typeof({compiler.process(element.clauses, **kwargs)})' class DjangoQueryBuilder(BackendQueryBuilder): @@ -62,9 +63,6 @@ class DjangoQueryBuilder(BackendQueryBuilder): # pylint: disable=too-many-public-methods,no-member - def __init__(self, backend): - BackendQueryBuilder.__init__(self, backend) - @property def Node(self): return models.DbNode.sa diff --git a/aiida/orm/implementation/sqlalchemy/querybuilder.py b/aiida/orm/implementation/sqlalchemy/querybuilder.py index 55cd8528f5..405f23ed60 100644 --- a/aiida/orm/implementation/sqlalchemy/querybuilder.py +++ b/aiida/orm/implementation/sqlalchemy/querybuilder.py @@ -13,6 +13,7 @@ from sqlalchemy.types import Float, Boolean from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.sql.expression import case, FunctionElement +from sqlalchemy.sql.compiler import TypeCompiler from sqlalchemy.ext.compiler import compiles from aiida.common.exceptions import NotExistent @@ -24,11 +25,11 @@ class jsonb_array_length(FunctionElement): # pylint: disable=invalid-name @compiles(jsonb_array_length) -def compile(element, compiler, **_kw): # pylint: disable=function-redefined, redefined-builtin +def compile(element, compiler: TypeCompiler, **kwargs): # pylint: disable=function-redefined, redefined-builtin """ Get length of array defined in a JSONB column """ - return f'jsonb_array_length({compiler.process(element.clauses)})' + return f'jsonb_array_length({compiler.process(element.clauses, **kwargs)})' class array_length(FunctionElement): # pylint: disable=invalid-name @@ -36,11 +37,11 @@ class array_length(FunctionElement): # pylint: disable=invalid-name @compiles(array_length) -def compile(element, compiler, **_kw): # pylint: disable=function-redefined +def compile(element, compiler: TypeCompiler, **kwargs): # pylint: disable=function-redefined """ Get length of array defined in a JSONB column """ - return f'array_length({compiler.process(element.clauses)})' + return f'array_length({compiler.process(element.clauses, **kwargs)})' class jsonb_typeof(FunctionElement): # pylint: disable=invalid-name @@ -48,11 +49,11 @@ class jsonb_typeof(FunctionElement): # pylint: disable=invalid-name @compiles(jsonb_typeof) -def compile(element, compiler, **_kw): # pylint: disable=function-redefined +def compile(element, compiler: TypeCompiler, **kwargs): # pylint: disable=function-redefined """ Get length of array defined in a JSONB column """ - return f'jsonb_typeof({compiler.process(element.clauses)})' + return f'jsonb_typeof({compiler.process(element.clauses, **kwargs)})' class SqlaQueryBuilder(BackendQueryBuilder): @@ -64,7 +65,7 @@ class SqlaQueryBuilder(BackendQueryBuilder): # pylint: disable=redefined-outer-name,too-many-public-methods def __init__(self, backend): - BackendQueryBuilder.__init__(self, backend) + super().__init__(backend) self.outer_to_inner_schema['db_dbcomputer'] = {'metadata': '_metadata'} self.outer_to_inner_schema['db_dblog'] = {'metadata': '_metadata'} diff --git a/aiida/orm/querybuilder.py b/aiida/orm/querybuilder.py index b7376dccf8..ed2e310ada 100644 --- a/aiida/orm/querybuilder.py +++ b/aiida/orm/querybuilder.py @@ -19,8 +19,9 @@ An instance of one of the implementation classes becomes a member of the :func:`QueryBuilder` instance when instantiated by the user. """ -from inspect import isclass as inspect_isclass import copy +from datetime import date, datetime, timedelta +from inspect import isclass as inspect_isclass import logging from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Sequence, Tuple, Type, Union, TYPE_CHECKING import warnings @@ -33,7 +34,6 @@ from aiida.common.links import LinkType from aiida.manage.manager import get_manager -from aiida.common.exceptions import ConfigurationError from . import authinfos from . import comments @@ -44,9 +44,18 @@ from . import entities from . import convert +try: + from typing import TypedDict # pylint: disable=ungrouped-imports +except ImportError: + # Python <3.8 backport + from typing_extensions import TypedDict + if TYPE_CHECKING: - from sqlalchemy.orm import Query # pylint: disable=ungrouped-imports - from aiida.orm.implementation import Backend # pylint: disable=ungrouped-imports + # pylint: disable=ungrouped-imports + from sqlalchemy.orm import Query + from sqlalchemy.sql.compiler import SQLCompiler + from sqlalchemy.sql.elements import BooleanClauseList + from aiida.orm.implementation import Backend __all__ = ('QueryBuilder',) @@ -66,21 +75,27 @@ FilterType = Dict[str, Any] # pylint: disable=invalid-name RowType = Any # pylint: disable=invalid-name -try: - # new in python 3.8 - from typing import TypedDict # pylint: disable=ungrouped-imports - class PathItemType(TypedDict): - """An item on the query path""" +class PathItemType(TypedDict): + """An item on the query path""" - entity_type: Any - tag: str - joining_keyword: str - joining_value: str - outerjoin: bool - edge_tag: str -except ImportError: - PathItemType = Dict[str, Any] # type: ignore + entity_type: Any + tag: str + joining_keyword: str + joining_value: str + outerjoin: bool + edge_tag: str + + +class QueryDict(TypedDict, total=False): + """A JSON serialisable representation of a ``QueryBuilder`` instance""" + + path: List[PathItemType] + filters: Dict[str, FilterType] + project: Dict[str, ProjectType] + order_by: List[dict] + offset: Optional[int] + limit: Optional[int] def get_querybuilder_classifiers_from_cls(cls, query): # pylint: disable=invalid-name @@ -225,8 +240,6 @@ def get_node_type_filter(classifiers: dict, subclassing: bool) -> dict: :param subclassing: if True, allow for subclasses of the ormclass :returns: dictionary in QueryBuilder filter language to pass into {"type": ... } - :rtype: dict - """ from aiida.orm.utils.node import get_query_type_from_type_string from aiida.common.escaping import escape_for_sql_like @@ -253,8 +266,6 @@ def get_process_type_filter(classifiers: dict, subclassing: bool) -> dict: :returns: dictionary in QueryBuilder filter language to pass into {"process_type": ... } - :rtype: dict - """ from aiida.common.escaping import escape_for_sql_like from aiida.common.warnings import AiidaEntryPointWarning @@ -316,7 +327,6 @@ def get_group_type_filter(classifiers: dict, subclassing: bool) -> dict: :param subclassing: if True, allow for subclasses of the ormclass :returns: dictionary in QueryBuilder filter language to pass into {'type_string': ... } - :rtype: dict """ from aiida.common.escaping import escape_for_sql_like @@ -439,7 +449,7 @@ def __init__( # is used twice. In that case, the user has to provide a tag! self._cls_to_tag_map: Dict[Any, str] = {} - # Hashing the internal queryhelp avoids rebuild a query + # Hashing the internal query representation avoids rebuilding a query self._hash: Optional[str] = None # The hash being None implies that the query will be build (Check the code in .get_query @@ -491,25 +501,85 @@ def __init__( if order_by: self.order_by(order_by) - def __str__(self) -> str: + def as_dict(self) -> QueryDict: + """Convert to a JSON serialisable dictionary representation of the query.""" + return copy.deepcopy({ # type: ignore[return-value] + 'path': self._path, + 'filters': self._filters, + 'project': self._projections, + 'order_by': self._order_by, + 'limit': self._limit, + 'offset': self._offset, + }) + + @property + def queryhelp(self) -> QueryDict: + """"Legacy name for ``as_dict`` method.""" + return self.as_dict() + + @classmethod + def from_dict(cls, dct: QueryDict) -> 'QueryBuilder': + """Create an instance from a dictionary representation of the query.""" + return cls(**dct) # type: ignore[arg-type] + + @staticmethod + def _compile_query(query: 'Query', literal_binds: bool = False) -> 'SQLCompiler': + """Compile the query to the SQL executable. + + :params literal_binds: Inline bound parameters (this is normally handled by the Python DBAPI). """ - When somebody hits: print(QueryBuilder) or print(str(QueryBuilder)) - I want to print the SQL-query. Because it looks cool... + dialect = query.session.bind.dialect + + class _Compiler(dialect.statement_compiler): # type: ignore[name-defined] + """Override the compiler with additional literal value renderers.""" + + def render_literal_value(self, value, type_): + """Render the value of a bind parameter as a quoted literal. + + See https://www.postgresql.org/docs/current/functions-json.html for serialisation specs + """ + try: + return super().render_literal_value(value, type_) + except NotImplementedError: + if isinstance(value, list): + values = ','.join(self.render_literal_value(item, type_) for item in value) + return f"'[{values}]'" + if isinstance(value, int): + return str(value) + if isinstance(value, (str, date, datetime, timedelta)): + escaped = str(value).replace('"', '\\"') + return f'"{escaped}"' + raise + + return _Compiler(dialect, query.statement, compile_kwargs=dict(literal_binds=literal_binds)) + + def as_sql(self, inline: bool = False) -> str: + """Convert the query to an SQL string representation. + + .. warning:: + + This method should be used for debugging purposes only, + since normally sqlalchemy will handle this process internally. + + :params inline: Inline bound parameters (this is normally handled by the Python DBAPI). """ - from aiida.manage.configuration import get_config + compiled = self._compile_query(self.get_query(), literal_binds=inline) + if inline: + return compiled.string + '\n' + return f'{compiled.string!r} % {compiled.params!r}\n' - config = get_config() - engine = config.current_profile.database_engine + def __repr__(self) -> str: + """Return an unambiguous string representation of the instance.""" + params = ', '.join(f'{key}={value!r}' for key, value in self.as_dict().items()) + return f'QueryBuilder({params})' - if engine.startswith('mysql'): - from sqlalchemy.dialects import mysql as mydialect - elif engine.startswith('postgre'): - from sqlalchemy.dialects import postgresql as mydialect - else: - raise ConfigurationError(f'Unknown DB engine: {engine}') + def __str__(self) -> str: + """Return a readable string representation of the instance.""" + return repr(self) - que = self.get_query() - return str(que.statement.compile(compile_kwargs={'literal_binds': True}, dialect=mydialect.dialect())) + def __deepcopy__(self, memo) -> 'QueryBuilder': + """Create deep copy of the instance.""" + return type(self)(**self.as_dict()) # type: ignore[arg-type] def _get_ormclass(self, cls, ormclass_type_string): """ @@ -1338,14 +1408,14 @@ def offset(self, offset: Optional[int]) -> 'QueryBuilder': self._offset = offset return self - def _build_filters(self, alias, filter_spec): + def _build_filters(self, alias: str, filter_spec: FilterType) -> 'BooleanClauseList': """ Recurse through the filter specification and apply filter operations. :param alias: The alias of the ORM class the filter will be applied on - :param filter_spec: the specification as given by the queryhelp + :param filter_spec: the specification of the filter - :returns: an instance of *sqlalchemy.sql.elements.BinaryExpression*. + :returns: an sqlalchemy expression. """ expressions = [] for path_spec, filter_operation_dict in filter_spec.items(): @@ -1463,7 +1533,9 @@ def _join_inputs(self, joined_entity, entity_to_join, isouterjoin): ).join(entity_to_join, aliased_edge.input_id == entity_to_join.id, isouter=isouterjoin) return aliased_edge - def _join_descendants_recursive(self, joined_entity, entity_to_join, isouterjoin, filter_dict, expand_path=False): + def _join_descendants_recursive( + self, joined_entity, entity_to_join, isouterjoin, filter_dict: FilterType, expand_path=False + ): """ joining descendants using the recursive functionality :TODO: Move the filters to be done inside the recursive query (for example on depth) @@ -1522,7 +1594,9 @@ def _join_descendants_recursive(self, joined_entity, entity_to_join, isouterjoin ) return descendants_recursive.c - def _join_ancestors_recursive(self, joined_entity, entity_to_join, isouterjoin, filter_dict, expand_path=False): + def _join_ancestors_recursive( + self, joined_entity, entity_to_join, isouterjoin, filter_dict: FilterType, expand_path=False + ): """ joining ancestors using the recursive functionality :TODO: Move the filters to be done inside the recursive query (for example on depth) @@ -1768,9 +1842,6 @@ def _get_function_map(self) -> Dict[str, Dict[str, Callable[[Any, Any, bool], No def _get_connecting_node(self, index: int, joining_keyword: str, joining_value: str, **_: Any): """ - :param querydict: - A dictionary specifying how the current node - is linked to other nodes. :param index: Index of this node within the path specification :param joining_keyword: the relation on which to join :param joining_value: the tag of the nodes to be joined @@ -1803,34 +1874,6 @@ def _get_connecting_node(self, index: int, joining_keyword: str, joining_value: f'Key {self._get_tag_from_specification(joining_value)} value is not a string:\n{joining_value}' ) - @property - def queryhelp(self): - """queryhelp dictionary correspondig to QueryBuilder instance. - - The queryhelp can be used to create a copy of the QueryBuilder instance like so:: - - qb = QueryBuilder(limit=3).append(StructureData, project='id').order_by({StructureData:'id'}) - qb2 = QueryBuilder(**qb.queryhelp) - - # The following is True if no change has been made to the database. - # Note that such a comparison can only be True if the order of results is enforced - qb.all() == qb2.all() - - :return: a queryhelp dictionary - """ - return copy.deepcopy({ - 'path': self._path, - 'filters': self._filters, - 'project': self._projections, - 'order_by': self._order_by, - 'limit': self._limit, - 'offset': self._offset, - }) - - def __deepcopy__(self, memo): - """Create deep copy of QueryBuilder instance.""" - return type(self)(**self.queryhelp) - def _build_order(self, alias, entitytag, entityspec): """ Build the order parameter of the query @@ -1864,7 +1907,7 @@ def _build(self): # JOINS ################################ for index, verticespec in enumerate(self._path[1:], start=1): alias = self.tag_to_alias_map[verticespec['tag']] - # looping through the queryhelp + # looping through the query path # ~ if index: # There is nothing to join if that is the first table toconnectwith, connection_func = self._get_connecting_node(index, **verticespec) @@ -2006,46 +2049,34 @@ def get_used_tags(self, vertices: bool = True, edges: bool = True) -> List[str]: given_tags.append(path['edge_tag']) return given_tags - def get_query(self): - """ - Instantiates and manipulates a sqlalchemy.orm.Query instance if this is needed. - First, I check if the query instance is still valid by hashing the queryhelp. - In this way, if a user asks for the same query twice, I am not recreating an instance. + def get_query(self) -> 'Query': + """Return the sqlalchemy.orm.Query instance for the current query specification. - :returns: an instance of sqlalchemy.orm.Query that is specific to the backend used. + To avoid unnecessary re-builds of the query, the hashed dictionary representation of this instance + is compared to the last query returned, which is cached by its hash. """ from aiida.common.hashing import make_hash - # Need_to_build is True by default. - # It describes whether the current query - # which is an attribute _query of this instance is still valid - # The queryhelp_hash is used to determine - # whether the query is still valid - - queryhelp_hash = make_hash(self.queryhelp) - # if self._hash (which is None if this function has not been invoked - # and is a string (hash) if it has) is the same as the queryhelp - # I can use the query again: - # If the query was injected I never build: + need_to_build = True + query_hash = make_hash(self.as_dict()) if self._hash is None: + # this is the first time the query has been built need_to_build = True elif self._injected: need_to_build = False - elif self._hash == queryhelp_hash: + elif self._hash == query_hash: need_to_build = False - else: - need_to_build = True if need_to_build: query = self._build() - self._hash = queryhelp_hash + self._hash = query_hash else: try: query = self._query except AttributeError: _LOGGER.warning('AttributeError thrown even though I should have _query as an attribute') query = self._build() - self._hash = queryhelp_hash + self._hash = query_hash return query @staticmethod @@ -2095,16 +2126,30 @@ def distinct(self) -> 'QueryBuilder': self._query = self.get_query().distinct() return self - def first(self) -> Optional[List[RowType]]: + def analyze_query(self, execute: bool = True, verbose: bool = False) -> str: + """Return the query plan, i.e. a list of SQL statements that will be executed. + + See: https://www.postgresql.org/docs/11/sql-explain.html + + :params execute: Carry out the command and show actual run times and other statistics. + :params verbose: Display additional information regarding the plan. """ - Executes query asking for one instance. - Use as follows:: + query = self.get_query() + if query.session.bind.dialect.name != 'postgresql': + raise NotImplementedError('Only PostgreSQL is supported for this method') + compiled = self._compile_query(query, literal_binds=True) + options = ', '.join((['ANALYZE'] if execute else []) + (['VERBOSE'] if verbose else [])) + options = f' ({options})' if options else '' + rows = self._impl.get_session().execute(f'EXPLAIN{options} {compiled.string}').fetchall() + return '\n'.join(row.values()[0] for row in rows) + + def first(self) -> Optional[List[RowType]]: + """Executes the query, asking for the first row of results. - qb = QueryBuilder(**queryhelp) - qb.first() + Note, this may change if several rows are valid for the query, + as persistent ordering is not guaranteed unless explicitly specified. - :returns: - One row of results as a list + :returns: One row of results as a list, or None if no result returned. """ query = self.get_query() result = self._impl.first(query) diff --git a/aiida/restapi/resources.py b/aiida/restapi/resources.py index b4f9083a57..b36c23b14c 100644 --- a/aiida/restapi/resources.py +++ b/aiida/restapi/resources.py @@ -209,15 +209,23 @@ def get(self, id=None, page=None): # pylint: disable=redefined-builtin,invalid- class QueryBuilder(BaseResource): """ - Representation of a QueryBuilder REST API resource (instantiated with a queryhelp JSON). + Representation of a QueryBuilder REST API resource (instantiated with a serialised QueryBuilder instance). - It supports POST requests taking in JSON :py:func:`~aiida.orm.querybuilder.QueryBuilder.queryhelp` + It supports POST requests taking in JSON :py:func:`~aiida.orm.querybuilder.QueryBuilder.as_dict` objects and returning the :py:class:`~aiida.orm.querybuilder.QueryBuilder` result accordingly. """ from aiida.restapi.translator.nodes.node import NodeTranslator _translator_class = NodeTranslator + GET_MESSAGE = ( + 'Method Not Allowed. Use HTTP POST requests to use the AiiDA QueryBuilder. ' + 'POST JSON data, which MUST be a valid QueryBuilder.as_dict() dictionary as a JSON object. ' + 'See the documentation at ' + 'https://aiida.readthedocs.io/projects/aiida-core/en/latest/topics/database.html' + '#converting-the-querybuilder-to-from-a-dictionary for more information.' + ) + def __init__(self, **kwargs): super().__init__(**kwargs) @@ -227,15 +235,6 @@ def __init__(self, **kwargs): def get(self): # pylint: disable=arguments-differ """Static return to state information about this endpoint.""" - data = { - 'message': ( - 'Method Not Allowed. Use HTTP POST requests to use the AiiDA QueryBuilder. ' - 'POST JSON data, which MUST be a valid QueryBuilder.queryhelp dictionary as a JSON object. ' - 'See the documentation at https://aiida.readthedocs.io/projects/aiida-core/en/latest/topics/' - 'database.html?highlight=QueryBuilder#the-queryhelp for more information.' - ), - } - headers = self.utils.build_headers(url=request.url, total_count=1) return self.utils.build_response( status=405, # Method Not Allowed @@ -247,7 +246,7 @@ def get(self): # pylint: disable=arguments-differ 'path': unquote(request.path), 'query_string': request.query_string.decode('utf-8'), 'resource_type': self.__class__.__name__, - 'data': data, + 'data': {'message': self.GET_MESSAGE}, }, ) @@ -255,7 +254,8 @@ def post(self): # pylint: disable=too-many-branches """ POST method to pass query help JSON. - If the posted JSON is not a valid QueryBuilder queryhelp, the request will fail with an internal server error. + If the posted JSON is not a valid QueryBuilder serialisation, + the request will fail with an internal server error. This uses the NodeTranslator in order to best return Nodes according to the general AiiDA REST API data format, while still allowing the return of other AiiDA entities. @@ -265,9 +265,9 @@ def post(self): # pylint: disable=too-many-branches # pylint: disable=protected-access self.trans._query_help = request.get_json(force=True) # While the data may be correct JSON, it MUST be a single JSON Object, - # equivalent of a QuieryBuilder.queryhelp dictionary. + # equivalent of a QueryBuilder.as_dict() dictionary. assert isinstance(self.trans._query_help, dict), ( - 'POSTed data MUST be a valid QueryBuilder.queryhelp dictionary. ' + 'POSTed data MUST be a valid QueryBuilder.as_dict() dictionary. ' f'Got instead (type: {type(self.trans._query_help)}): {self.trans._query_help}' ) self.trans.__label__ = self.trans._result_type = self.trans._query_help['path'][-1]['tag'] @@ -287,7 +287,7 @@ def post(self): # pylint: disable=too-many-branches pass if empty_projections_counter == number_projections: - # No projections have been specified in the queryhelp. + # No projections have been specified in the dictionary. # To be true to the QueryBuilder response, the last entry in path # is the only entry to be returned, all without edges/links. self.trans._query_help['project'][self.trans.__label__] = self.trans._default diff --git a/aiida/tools/graph/age_rules.py b/aiida/tools/graph/age_rules.py index 0a4250fbdf..7e3eb945e0 100644 --- a/aiida/tools/graph/age_rules.py +++ b/aiida/tools/graph/age_rules.py @@ -75,26 +75,26 @@ def __init__(self, querybuilder, max_iterations=1, track_edges=False): """ super().__init__(max_iterations, track_edges=track_edges) - def get_spec_from_path(queryhelp, idx): + def get_spec_from_path(query_dict, idx): from aiida.orm.querybuilder import GROUP_ENTITY_TYPE_PREFIX if ( - queryhelp['path'][idx]['entity_type'].startswith('node') or - queryhelp['path'][idx]['entity_type'].startswith('data') or - queryhelp['path'][idx]['entity_type'].startswith('process') or - queryhelp['path'][idx]['entity_type'] == '' + query_dict['path'][idx]['entity_type'].startswith('node') or + query_dict['path'][idx]['entity_type'].startswith('data') or + query_dict['path'][idx]['entity_type'].startswith('process') or + query_dict['path'][idx]['entity_type'] == '' ): result = 'nodes' - elif queryhelp['path'][idx]['entity_type'].startswith(GROUP_ENTITY_TYPE_PREFIX): + elif query_dict['path'][idx]['entity_type'].startswith(GROUP_ENTITY_TYPE_PREFIX): result = 'groups' else: - raise Exception(f"not understood entity from ( {queryhelp['path'][idx]['entity_type']} )") + raise Exception(f"not understood entity from ( {query_dict['path'][idx]['entity_type']} )") return result - queryhelp = querybuilder.queryhelp + query_dict = querybuilder.as_dict() # Check if there is any projection: - query_projections = queryhelp['project'] + query_projections = query_dict['project'] for projection_key in query_projections: if query_projections[projection_key] != []: raise ValueError( @@ -103,13 +103,13 @@ def get_spec_from_path(queryhelp, idx): projection_key, query_projections[projection_key] ) ) - for pathspec in queryhelp['path']: + for pathspec in query_dict['path']: if not pathspec['entity_type']: pathspec['entity_type'] = 'node.Node.' - self._qbtemplate = orm.QueryBuilder(**queryhelp) - queryhelp = self._qbtemplate.queryhelp - self._first_tag = queryhelp['path'][0]['tag'] - self._last_tag = queryhelp['path'][-1]['tag'] + self._qbtemplate = orm.QueryBuilder(**query_dict) + query_dict = self._qbtemplate.as_dict() + self._first_tag = query_dict['path'][0]['tag'] + self._last_tag = query_dict['path'][-1]['tag'] self._querybuilder = None # All of these are set in _init_run: @@ -117,8 +117,8 @@ def get_spec_from_path(queryhelp, idx): self._edge_keys = None self._entity_to_identifier = None - self._entity_from = get_spec_from_path(queryhelp, 0) - self._entity_to = get_spec_from_path(queryhelp, -1) + self._entity_from = get_spec_from_path(query_dict, 0) + self._entity_to = get_spec_from_path(query_dict, -1) self._accumulator_set = None def set_edge_keys(self, edge_keys): @@ -161,8 +161,8 @@ def _init_run(self, operational_set): self._accumulator_set = operational_set.copy() # Copying qbtemplate so there's no problem if it is used again in a later run: - queryhelp = self._qbtemplate.queryhelp - self._querybuilder = orm.QueryBuilder(**queryhelp) + query_dict = self._qbtemplate.as_dict() + self._querybuilder = orm.QueryBuilder.from_dict(query_dict) self._entity_to_identifier = operational_set[self._entity_to].identifier @@ -175,7 +175,7 @@ def _init_run(self, operational_set): # that stores the information what I need to project as well, as in (tag, projection) projections = defaultdict(list) self._edge_keys = [] - self._edge_label = queryhelp['path'][-1]['edge_tag'] + self._edge_label = query_dict['path'][-1]['edge_tag'] # Need to get the edge_set: This is given by entity1_entity2. Here, the results needs to # be sorted somehow in order to ensure that the same key is used when entity_from and diff --git a/aiida/tools/importexport/dbexport/main.py b/aiida/tools/importexport/dbexport/main.py index c68a3c242a..e56c6fff49 100644 --- a/aiida/tools/importexport/dbexport/main.py +++ b/aiida/tools/importexport/dbexport/main.py @@ -326,7 +326,7 @@ def _get_starting_node_ids(entities: List[Any]) -> Tuple[DefaultDict[str, Set[st } }, tag='groups', - ).queryhelp + ).as_dict() ) node_query = orm.QueryBuilder(**qh_groups).append(orm.Node, project=['id', 'uuid'], with_group='groups') node_count = node_query.count() diff --git a/docs/source/howto/query.rst b/docs/source/howto/query.rst index f72e419d6b..3af5608485 100644 --- a/docs/source/howto/query.rst +++ b/docs/source/howto/query.rst @@ -21,11 +21,11 @@ Once you are clear about what you want and how you can get it, the :class:`~aiid There are two ways of using the :class:`~aiida.orm.querybuilder.QueryBuilder`: #. In the *appender* method, you construct your query step by step using the ``QueryBuilder.append()`` method. -#. In the *queryhelp* approach, you construct a dictionary that defines your query and pass it to the :class:`~aiida.orm.querybuilder.QueryBuilder`. +#. In the *dictionary* approach, you construct a dictionary that defines your query and pass it to the :class:`~aiida.orm.querybuilder.QueryBuilder`. -Both APIs provide the same functionality - the appender method may be more suitable for interactive use, e.g., in the ``verdi shell``, whereas the queryhelp method can be useful in scripting. +Both APIs provide the same functionality - the appender method may be more suitable for interactive use, e.g., in the ``verdi shell``, whereas the dictionary method can be useful in scripting. In this section we will focus on the basics of the appender method. -For more advanced queries or more details on the queryhelp, see the :ref:`topics section on advanced querying `. +For more advanced queries or more details on the query dictionary, see the :ref:`topics section on advanced querying `. .. _how-to:query:select: diff --git a/docs/source/nitpick-exceptions b/docs/source/nitpick-exceptions index bc0ae72d15..d888619e05 100644 --- a/docs/source/nitpick-exceptions +++ b/docs/source/nitpick-exceptions @@ -168,3 +168,8 @@ py:class Query py:class BackendQueryBuilder py:class importlib_metadata.EntryPoint py:class Command + +py:class BooleanClauseList +py:class SQLCompiler +py:class aiida.orm.querybuilder.QueryDict +py:class sqlalchemy.sql.compiler.TypeCompiler diff --git a/docs/source/topics/database.rst b/docs/source/topics/database.rst index d5a09f4b0b..4ba319015d 100644 --- a/docs/source/topics/database.rst +++ b/docs/source/topics/database.rst @@ -15,7 +15,7 @@ Advanced querying ================= The basics on using the :class:`~aiida.orm.querybuilder.QueryBuilder` to find the data you are interested in is explained in the :ref:`finding and querying how-to`. -This section explains some more advanced methods for querying your database and the :ref:`queryhelp dictionary`. +This section explains some more advanced methods for querying your database and the :ref:`QueryBuilder dictionary`. .. _topics:database:advancedquery:edges: @@ -222,12 +222,17 @@ List of all relationships: +------------------+---------------+--------------------+-------------------------------------------------+ .. _topics:database:advancedquery:queryhelp: +.. _topics:database:advancedquery:querydict: -The queryhelp -------------- +Converting the QueryBuilder to/from a dictionary +------------------------------------------------ -The ``queryhelp`` dictionary is a property of the :class:`~aiida.orm.querybuilder.QueryBuilder` class. -Once you have built your query using the appender method explained in the :ref:`finding and querying for data how-to` and the advanced sections above, you can easily store your query by saving the ``QueryBuilder.queryhelp`` dictionary as a JSON file for later use: +.. important:: + + In aiida-core version 1, this dictionary was accessed with ``QueryBuilder.queryhelp``, which is now deprecated. + +The :class:`~aiida.orm.querybuilder.QueryBuilder` class can be converted to a dictionary and also loaded from a dictionary, for easy serialisation and storage. +Once you have built your query using the appender method explained in the :ref:`finding and querying for data how-to` and the advanced sections above, you can easily store your query by saving the ``QueryBuilder.as_dict()`` dictionary as a JSON file for later use: .. code-block:: python @@ -237,26 +242,26 @@ Once you have built your query using the appender method explained in the :ref:` qb = QueryBuilder() qb.append(CalcJobNode) - with open("queryhelp.json", "w") as file: - file.write(json.dumps(qb.queryhelp, indent=4)) + with open("querydict.json", "w") as file: + file.write(json.dumps(qb.as_dict(), indent=4)) -To use the queryhelp to instantiate the :class:`~aiida.orm.querybuilder.QueryBuilder`, you can use `Python's automatic keyword expansion `_: +To use this dictionary to instantiate the :class:`~aiida.orm.querybuilder.QueryBuilder`, you can use the ``from_dict`` class method: .. code-block:: python - with open("queryhelp.json", "r") as file: - queryhelp = json.load(file) + with open("querydict.json", "r") as file: + query_dict = json.load(file) - qb = QueryBuilder(**queryhelp) + qb = QueryBuilder.from_dict(query_dict) -Alternatively, you can also use the ``queryhelp`` to set up your query by specifying the path, filters and projections and constructing the ``queryhelp`` dictionary by hand. +Alternatively, you can also use a dictionary to set up your query by specifying the path, filters and projections and constructing the dictionary by hand. To do this, you have to specify: * the ``path``: Here, the user specifies the path along which to join tables as a list of dictionaries, where each list item identifies a vertex in your path. You define the vertex class with the ``cls`` key:: - queryhelp = { + query_dict = { 'path':[ {'cls': Data} ] @@ -267,7 +272,7 @@ To do this, you have to specify: However, this will not work if you choose the same class twice in the query. In this case you have to provide the tag using the ``tag`` key:: - queryhelp = { + query_dict = { 'path':[ { 'cls':Node, @@ -287,7 +292,7 @@ To do this, you have to specify: That other node can be specified by an integer or the class or type. The following examples are all valid joining instructions, assuming there is a structure defined at index 2 of the path with tag "struc1":: - edge_specification = queryhelp['path'][3] + edge_specification = query_dict['path'][3] edge_specification['with_incoming'] = 2 edge_specification['with_incoming'] = StructureData edge_specification['with_incoming'] = 'struc1' @@ -295,7 +300,7 @@ To do this, you have to specify: edge_specification['with_outgoing'] = StructureData edge_specification['with_outgoing'] = 'struc1' - * queryhelp_item['direction'] = integer + * ``query_dict['path'][]['direction'] = integer`` If any of the above specs ("with_outgoing", "with_incoming") were not specified, the key "direction" is looked for. Directions are defined as distances in the tree. @@ -305,12 +310,12 @@ To do this, you have to specify: A negative number reverse the direction of the link. The absolute value of the direction defines the table to join to with respect to your own position in the list. An absolute value of 1 joins one table above, a value of 2 to the table defined 2 indices above. - The two following queryhelps yield the same query:: + The two following dictionaries yield the same query:: from aiida.orm import TrajectoryData from aiida_quantumespresso.calculations.pw import PwCalculation from aiida.orm import Dict - qh1 = { + query_dict_1 = { 'path': [ { 'cls':PwCalculation @@ -327,7 +332,7 @@ To do this, you have to specify: # returns same query as: - qh2 = { + query_dict_2 = { 'path':[ { 'cls':PwCalculation @@ -344,7 +349,7 @@ To do this, you have to specify: # Shorter version: - qh3 = { + query_dict_3 = { 'path':[ Dict, PwCalculation, @@ -352,9 +357,9 @@ To do this, you have to specify: ] } -* what to ``project``: Determing which columns the query will return:: +* what to ``project``: Determining which columns the query will return:: - queryhelp = { + query_dict = { 'path':[PwCalculation], 'project':{ PwCalculation:['user_id', 'id'], @@ -363,7 +368,7 @@ To do this, you have to specify: If you are using JSONB columns, you can also project a value stored inside the json:: - queryhelp = { + query_dict = { 'path':[ PwCalculation, StructureData, @@ -384,7 +389,7 @@ To do this, you have to specify: from aiida.common import timezone from datetime import timedelta - queryhelp = { + query_dict = { 'path':[ {'cls':PwCalculation}, # PwCalculation with structure as output {'cls':StructureData} @@ -397,10 +402,10 @@ To do this, you have to specify: } } -If you want to include filters and projections on links between nodes, you will have to add these to filters and projections in the queryhelp. +If you want to include filters and projections on links between nodes, you will have to add these to filters and projections in the query dictionary. Let's take an example from before and add a few filters on the link:: - queryhelp = { + query_dict = { 'path':[ {'cls':PwCalculation, 'tag':'relax'}, # PwCalculation with structure as output {'cls':StructureData, 'tag':'structure'} @@ -424,7 +429,7 @@ Notice that the tag for the link, by default, is the tag of the two connecting n Alternatively, you can choose the tag for the edge in the path when defining the entity to join using ``edge_tag``:: - queryhelp = { + query_dict = { 'path':[ {'cls':PwCalculation, 'tag':'relax'}, # Relaxation with structure as output { @@ -450,10 +455,10 @@ Alternatively, you can choose the tag for the edge in the path when defining the Limits and offset can be set directly like this:: - queryhelp = { + query_dict = { 'path':[Node], 'limit':10, 'offset':20 } -That queryhelp would tell the QueryBuilder to return 10 rows after the first 20 have been skipped. +That ``query_dict`` would tell the QueryBuilder to return 10 rows after the first 20 have been skipped. diff --git a/tests/orm/test_querybuilder.py b/tests/orm/test_querybuilder.py index a9f743d3da..37f69a020b 100644 --- a/tests/orm/test_querybuilder.py +++ b/tests/orm/test_querybuilder.py @@ -10,19 +10,20 @@ # pylint: disable=attribute-defined-outside-init,invalid-name,no-self-use,missing-docstring,too-many-lines,unused-argument """Tests for the QueryBuilder.""" from collections import defaultdict +import copy from datetime import date, datetime, timedelta from itertools import chain import warnings import pytest -from aiida import orm +from aiida import orm, plugins from aiida.common.links import LinkType from aiida.manage import configuration @pytest.mark.usefixtures('clear_database_before_test') -class TestQueryBuilder: +class TestBasic: def test_date_filters_support(self): """Verify that `datetime.date` is supported in filters.""" @@ -546,6 +547,19 @@ def test_append_validation(self): # So this should work now: qb.append(orm.StructureData, tag='s').limit(2).dict() + def test_tuples(self): + """Test appending ``cls`` tuples.""" + orm.Group(label='helloworld').store() + + qb = orm.QueryBuilder().append(orm.Group, filters={'label': 'helloworld'}) + assert qb.count() == 1 + + qb = orm.QueryBuilder().append((orm.Group,), filters={'label': 'helloworld'}) + assert qb.count() == 1 + + qb = orm.QueryBuilder().append(cls=(orm.Group,)) + assert qb.count() == 1 + def test_tags(self): qb = orm.QueryBuilder() qb.append(orm.Node, tag='n1') @@ -628,7 +642,7 @@ def test_direction_keyword(self): qb.append(orm.Data, with_incoming='c1', tag='d2or4') qb.append(orm.CalculationNode, tag='c2', with_incoming='d2or4') qb.append(orm.Data, tag='d3', with_incoming='c2', project='id') - qh = qb.queryhelp # saving query for later + qh = qb.as_dict() # saving query for later qb.append(orm.Data, direction=-4, project='id') res1 = {item[1] for item in qb.all()} assert res1 == {d1.id} @@ -681,16 +695,64 @@ def test_first_multiple_projections(self): assert isinstance(result[1], orm.Data) -class TestQueryHelp: +class TestRepresentations: + """Test representing the query in different formats.""" @pytest.fixture(autouse=True) - def init_db(self, clear_database_before_test, aiida_localhost): - self.computer = aiida_localhost + def init_db(self, clear_database_before_test, data_regression, file_regression): + self.regress_dict = data_regression.check + self.regress_str = file_regression.check + + def test_str(self): + """Test ``str(qb)`` returns the correct string.""" + qb = orm.QueryBuilder().append(orm.Data, project=['id', 'uuid']).order_by({orm.Data: 'id'}) + self.regress_str(str(qb)) + + def test_as_sql(self): + """Test ``qb.as_sql(inline=False)`` returns the correct string.""" + qb = orm.QueryBuilder() + qb.append(orm.Node, project=['uuid'], filters={'extras.tag4': 'appl_pecoal'}) + self.regress_str(qb.as_sql(inline=False)) - def test_queryhelp(self): + def test_as_sql_inline(self): + """Test ``qb.as_sql(inline=True)`` returns the correct string.""" + qb = orm.QueryBuilder() + qb.append(orm.Node, project=['uuid'], filters={'extras.tag4': 'appl_pecoal'}) + self.regress_str(qb.as_sql(inline=True)) + + def test_as_sql_literal_quote(self): + """Test that literal values can be rendered.""" + qb = orm.QueryBuilder() + qb.append(plugins.DataFactory('structure'), project=['uuid'], filters={'extras.elements': {'contains': ['Si']}}) + self.regress_str(qb.as_sql(inline=True)) + + def test_as_dict(self): + """Test ``qb.as_dict()`` returns the correct dict.""" + qb = orm.QueryBuilder() + qb.append(orm.Node, filters={'extras.tag4': 'appl_pecoal'}) + self.regress_dict(qb.as_dict()) + + def test_round_trip(self): + """Test recreating a QueryBuilder from the ``as_dict`` representation + + We test appending a Data node and a Process node for variety, as well + as a generic Node specifically because it translates to `entity_type` + as an empty string (which can potentially cause problems). """ - Here I test the queryhelp by seeing whether results are the same as using the append method. - I also check passing of tuples. + qb1 = orm.QueryBuilder() + qb1.append(orm.Node) + qb1.append(orm.Data) + qb1.append(orm.CalcJobNode) + + qb2 = orm.QueryBuilder.from_dict(qb1.as_dict()) + assert qb1.as_dict() == qb2.as_dict() + + qb3 = copy.deepcopy(qb1) + assert qb1.as_dict() == qb3.as_dict() + + def test_round_trip_append(self): + """Test the `as_dict` and `from_dict` methods, + by seeing whether results are the same as using the append method. """ g = orm.Group(label='helloworld').store() for cls in (orm.StructureData, orm.Dict, orm.Data): @@ -714,44 +776,20 @@ def test_queryhelp(self): qb.append(cls, filters={'attributes.foo-qh2': 'bar'}, subclassing=subclassing, project='uuid') assert qb.count() == expected_count - qh = qb.queryhelp - qb_new = orm.QueryBuilder(**qh) + dct = qb.as_dict() + qb_new = orm.QueryBuilder.from_dict(dct) assert qb_new.count() == expected_count assert sorted([uuid for uuid, in qb.all()]) == sorted([uuid for uuid, in qb_new.all()]) - qb = orm.QueryBuilder().append(orm.Group, filters={'label': 'helloworld'}) - assert qb.count() == 1 - - qb = orm.QueryBuilder().append((orm.Group,), filters={'label': 'helloworld'}) - assert qb.count() == 1 - - # populate computer - self.computer # pylint:disable=pointless-statement - qb = orm.QueryBuilder().append(orm.Computer,) - assert qb.count() == 1 - - qb = orm.QueryBuilder().append(cls=(orm.Computer,)) - assert qb.count() == 1 - def test_recreate_from_queryhelp(self): - """Test recreating a QueryBuilder from the Query Help - - We test appending a Data node and a Process node for variety, as well - as a generic Node specifically because it translates to `entity_type` - as an empty string (which can potentially cause problems). - """ - import copy - - qb1 = orm.QueryBuilder() - qb1.append(orm.Node) - qb1.append(orm.Data) - qb1.append(orm.CalcJobNode) - - qb2 = orm.QueryBuilder(**qb1.queryhelp) - assert qb1.queryhelp == qb2.queryhelp - - qb3 = copy.deepcopy(qb1) - assert qb1.queryhelp == qb3.queryhelp +def test_analyze_query(clear_database_before_test): + """Test the query plan is correctly generated.""" + qb = orm.QueryBuilder() + # include literal values in test + qb.append(orm.Data, filters={'extras.key': {'contains': ['x', 1]}}) + analysis_str = qb.analyze_query(verbose=True) + assert isinstance(analysis_str, str), analysis_str + assert 'uuid' in analysis_str, analysis_str @pytest.mark.usefixtures('clear_database_before_test') @@ -1434,7 +1472,7 @@ def test_statistics_default_class(self): res = list(qb.dict()[0].values())[0] assert res == expected_dict - # Ask the same query as above using queryhelp + # Ask the same query as above using QueryBuilder.as_dict() qh = {'project': {'computer': ['**']}, 'path': [{'tag': 'computer', 'cls': orm.Computer}]} qb = orm.QueryBuilder(**qh) # We expect one result diff --git a/tests/orm/test_querybuilder/test_as_dict.yml b/tests/orm/test_querybuilder/test_as_dict.yml new file mode 100644 index 0000000000..4add4c8a15 --- /dev/null +++ b/tests/orm/test_querybuilder/test_as_dict.yml @@ -0,0 +1,17 @@ +filters: + node_1: + extras.tag4: appl_pecoal + node_type: + like: '%' +limit: null +offset: null +order_by: [] +path: +- edge_tag: null + entity_type: '' + joining_keyword: null + joining_value: null + outerjoin: false + tag: node_1 +project: + node_1: [] diff --git a/tests/orm/test_querybuilder/test_as_sql.txt b/tests/orm/test_querybuilder/test_as_sql.txt new file mode 100644 index 0000000000..02f87c22b5 --- /dev/null +++ b/tests/orm/test_querybuilder/test_as_sql.txt @@ -0,0 +1 @@ +'SELECT db_dbnode_1.uuid \nFROM db_dbnode AS db_dbnode_1 \nWHERE CAST(db_dbnode_1.node_type AS VARCHAR) LIKE %(param_1)s AND CASE WHEN (jsonb_typeof((db_dbnode_1.extras #> %(extras_1)s)) = %(param_2)s) THEN (db_dbnode_1.extras #>> %(extras_1)s) = %(param_3)s ELSE %(param_4)s END' % {'param_1': '%', 'extras_1': ('tag4',), 'param_2': 'string', 'param_3': 'appl_pecoal', 'param_4': False} diff --git a/tests/orm/test_querybuilder/test_as_sql_inline.txt b/tests/orm/test_querybuilder/test_as_sql_inline.txt new file mode 100644 index 0000000000..4a7fce2b5f --- /dev/null +++ b/tests/orm/test_querybuilder/test_as_sql_inline.txt @@ -0,0 +1,3 @@ +SELECT db_dbnode_1.uuid +FROM db_dbnode AS db_dbnode_1 +WHERE CAST(db_dbnode_1.node_type AS VARCHAR) LIKE '%%' AND CASE WHEN (jsonb_typeof((db_dbnode_1.extras #> '{tag4}')) = 'string') THEN (db_dbnode_1.extras #>> '{tag4}') = 'appl_pecoal' ELSE false END diff --git a/tests/orm/test_querybuilder/test_as_sql_literal_quote.txt b/tests/orm/test_querybuilder/test_as_sql_literal_quote.txt new file mode 100644 index 0000000000..9aa14e798a --- /dev/null +++ b/tests/orm/test_querybuilder/test_as_sql_literal_quote.txt @@ -0,0 +1,3 @@ +SELECT db_dbnode_1.uuid +FROM db_dbnode AS db_dbnode_1 +WHERE CAST(db_dbnode_1.node_type AS VARCHAR) LIKE 'data.structure.%%' AND CAST((db_dbnode_1.extras #> '{elements}') AS JSONB) @> '["Si"]' diff --git a/tests/orm/test_querybuilder/test_str.txt b/tests/orm/test_querybuilder/test_str.txt new file mode 100644 index 0000000000..405e5075dc --- /dev/null +++ b/tests/orm/test_querybuilder/test_str.txt @@ -0,0 +1 @@ +QueryBuilder(path=[{'entity_type': 'data.Data.', 'tag': 'Data_1', 'joining_keyword': None, 'joining_value': None, 'edge_tag': None, 'outerjoin': False}], filters={'Data_1': {'node_type': {'like': 'data.%'}}}, project={'Data_1': [{'id': {}}, {'uuid': {}}]}, order_by=[{'Data_1': [{'id': {'order': 'asc'}}]}], limit=None, offset=None) \ No newline at end of file diff --git a/tests/restapi/test_routes.py b/tests/restapi/test_routes.py index d9876a245d..5bc85c38f7 100644 --- a/tests/restapi/test_routes.py +++ b/tests/restapi/test_routes.py @@ -1166,11 +1166,11 @@ def test_download_formats(self): ############### querybuilder ############### def test_querybuilder(self): - """Test POSTing a queryhelp dictionary as JSON to /querybuilder + """Test POSTing a QueryBuilder dictionary as JSON to /querybuilder This also checks that `full_type` is _not_ included in the result no matter the entity. """ - queryhelp = orm.QueryBuilder().append( + query_dict = orm.QueryBuilder().append( orm.CalculationNode, tag='calc', project=['id', 'uuid', 'user_id'], @@ -1180,7 +1180,7 @@ def test_querybuilder(self): 'order': 'desc' } }] - }).queryhelp + }).as_dict() expected_node_uuids = [] # dummy data already ordered 'desc' by 'id' @@ -1189,7 +1189,7 @@ def test_querybuilder(self): expected_node_uuids.append(calc['uuid']) with self.app.test_client() as client: - response = client.post(f'{self.get_url_prefix()}/querybuilder', json=queryhelp).json + response = client.post(f'{self.get_url_prefix()}/querybuilder', json=query_dict).json self.assertEqual('POST', response.get('method', '')) self.assertEqual('QueryBuilder', response.get('resource_type', '')) @@ -1215,6 +1215,8 @@ def test_get_querybuilder(self): This should return with 405 Method Not Allowed. Otherwise, a "conventional" JSON response should be returned with a helpful message. """ + from aiida.restapi.resources import QueryBuilder as qb_api + with self.app.test_client() as client: response_value = client.get(f'{self.get_url_prefix()}/querybuilder') response = response_value.json @@ -1224,21 +1226,14 @@ def test_get_querybuilder(self): self.assertEqual('GET', response.get('method', '')) self.assertEqual('QueryBuilder', response.get('resource_type', '')) - - message = ( - 'Method Not Allowed. Use HTTP POST requests to use the AiiDA QueryBuilder. ' - 'POST JSON data, which MUST be a valid QueryBuilder.queryhelp dictionary as a JSON object. ' - 'See the documentation at https://aiida.readthedocs.io/projects/aiida-core/en/latest/topics/' - 'database.html?highlight=QueryBuilder#the-queryhelp for more information.' - ) - self.assertEqual(message, response.get('data', {}).get('message', '')) + self.assertEqual(qb_api.GET_MESSAGE, response.get('data', {}).get('message', '')) def test_querybuilder_user(self): """Retrieve a User through the use of the /querybuilder endpoint This also checks that `full_type` is _not_ included in the result no matter the entity. """ - queryhelp = orm.QueryBuilder().append( + query_dict = orm.QueryBuilder().append( orm.CalculationNode, tag='calc', project=['id', 'user_id'], @@ -1253,7 +1248,7 @@ def test_querybuilder_user(self): 'order': 'desc' } }] - }).queryhelp + }).as_dict() expected_user_ids = [] for calc in self.get_dummy_data()['calculations']: @@ -1261,7 +1256,7 @@ def test_querybuilder_user(self): expected_user_ids.append(calc['user_id']) with self.app.test_client() as client: - response = client.post(f'{self.get_url_prefix()}/querybuilder', json=queryhelp).json + response = client.post(f'{self.get_url_prefix()}/querybuilder', json=query_dict).json self.assertEqual('POST', response.get('method', '')) self.assertEqual('QueryBuilder', response.get('resource_type', '')) @@ -1290,7 +1285,7 @@ def test_querybuilder_project_explicit(self): Here "project" will use the wildcard (*). This should result in both CalculationNodes and Data to be returned. """ - queryhelp = orm.QueryBuilder().append( + builder = orm.QueryBuilder().append( orm.CalculationNode, tag='calc', project='*', @@ -1307,14 +1302,14 @@ def test_querybuilder_project_explicit(self): expected_calc_uuids = [] expected_data_uuids = [] - for calc, data in queryhelp.all(): + for calc, data in builder.all(): expected_calc_uuids.append(calc.uuid) expected_data_uuids.append(data.uuid) - queryhelp = queryhelp.queryhelp + query_dict = builder.as_dict() with self.app.test_client() as client: - response = client.post(f'{self.get_url_prefix()}/querybuilder', json=queryhelp).json + response = client.post(f'{self.get_url_prefix()}/querybuilder', json=query_dict).json self.assertEqual('POST', response.get('method', '')) self.assertEqual('QueryBuilder', response.get('resource_type', '')) @@ -1348,7 +1343,7 @@ def test_querybuilder_project_implicit(self): Here "project" will be an empty list, resulting in only the Data node being returned. """ - queryhelp = orm.QueryBuilder().append(orm.CalculationNode, tag='calc').append( + builder = orm.QueryBuilder().append(orm.CalculationNode, tag='calc').append( orm.Data, tag='data', with_incoming='calc', @@ -1359,13 +1354,13 @@ def test_querybuilder_project_implicit(self): }]}) expected_data_uuids = [] - for data in queryhelp.all(flat=True): + for data in builder.all(flat=True): expected_data_uuids.append(data.uuid) - queryhelp = queryhelp.queryhelp + query_dict = builder.as_dict() with self.app.test_client() as client: - response = client.post(f'{self.get_url_prefix()}/querybuilder', json=queryhelp).json + response = client.post(f'{self.get_url_prefix()}/querybuilder', json=query_dict).json self.assertEqual('POST', response.get('method', '')) self.assertEqual('QueryBuilder', response.get('resource_type', ''))