Skip to content

Commit

Permalink
QueryBuilder: Remove implementation for has_key in SQLite storage
Browse files Browse the repository at this point in the history
The SQLite based storage plugins implemented the `has_key` operator for
the `QueryBuilder`, however, the implementation is incorrect. At the
very least the negation operator does not work. Since this can silently
return incorrect query results, it is best to remove the implementation
and raise an `NotImplementedError`. The same is done for `contains`
which was not yet implemented but also didn't yet raise an explicit
exception.
  • Loading branch information
sphuber committed Jun 29, 2024
1 parent 310ff1d commit 3dbed91
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 18 deletions.
13 changes: 4 additions & 9 deletions src/aiida/storage/sqlite_zip/orm.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,17 +284,12 @@ def _cast_json_type(comparator: JSON.Comparator, value: Any) -> Tuple[ColumnElem
type_filter, casted_entity = _cast_json_type(database_entity, value)
return case((type_filter, casted_entity.ilike(value, escape='\\')), else_=False)

# if operator == 'contains':
# to-do, see: https://github.com/sqlalchemy/sqlalchemy/discussions/7836
if operator == 'contains':
# to-do, see: https://github.com/sqlalchemy/sqlalchemy/discussions/7836
raise NotImplementedError('The operator `contains` is not implemented for SQLite-based storage plugins.')

Check warning on line 289 in src/aiida/storage/sqlite_zip/orm.py

View check run for this annotation

Codecov / codecov/patch

src/aiida/storage/sqlite_zip/orm.py#L289

Added line #L289 was not covered by tests

if operator == 'has_key':
return case(
(
func.json_type(database_entity) == 'object',
func.json_each(database_entity).table_valued('key', joins_implicitly=True).c.key == value,
),
else_=False,
)
raise NotImplementedError('The operator `has_key` is not implemented for SQLite-based storage plugins.')

Check warning on line 292 in src/aiida/storage/sqlite_zip/orm.py

View check run for this annotation

Codecov / codecov/patch

src/aiida/storage/sqlite_zip/orm.py#L292

Added line #L292 was not covered by tests

if operator == 'in':
type_filter, casted_entity = _cast_json_type(database_entity, value[0])
Expand Down
12 changes: 5 additions & 7 deletions tests/cmdline/commands/test_calcjob.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,10 @@ def test_calcjob_outputcat(self):
retrieved.base.repository._repository.put_object_from_filelike(io.BytesIO(b'5\n'), 'aiida.out')
retrieved.base.repository._update_repository_metadata()

def test_calcjob_cleanworkdir_basic(self, pytestconfig):
# This currently fails with sqlite backend since the filtering relies on the `has_key` filter which is not
# implemented in SQLite, see https://github.com/aiidateam/aiida-core/pull/6497
@pytest.mark.requires_psql
def test_calcjob_cleanworkdir_basic(self):
"""Test verdi calcjob cleanworkdir"""
# Specifying no filtering options and no explicit calcjobs should exit with non-zero status
options = []
Expand All @@ -261,17 +264,12 @@ def test_calcjob_cleanworkdir_basic(self, pytestconfig):
# The flag should have been set
assert self.result_job.outputs.remote_folder.base.extras.get('cleaned') is True

# TODO: This currently fails with sqlite backend,
# since the filtering relies on the `has_key` filter which is not implemented in SQLite.
# https://github.com/aiidateam/aiida-core/issues/6256
marker_opt = pytestconfig.getoption('-m')
if 'not requires_psql' in marker_opt or 'presto' in marker_opt:
pytest.xfail('Known sqlite backend failure')
# Do it again should fail as the calcjob has been cleaned
options = ['-f', str(self.result_job.uuid)]
result = self.cli_runner.invoke(command.calcjob_cleanworkdir, options)
assert result.exception is not None, result.output

@pytest.mark.requires_psql
def test_calcjob_cleanworkdir_advanced(self):
# Check applying both p and o filters
for flag_p in ['-p', '--past-days']:
Expand Down
1 change: 1 addition & 0 deletions tests/orm/test_querybuilder.py
Original file line number Diff line number Diff line change
Expand Up @@ -1537,6 +1537,7 @@ def test_iterall_with_store_group(self):
for pk, pk_clone in zip(pks, [e[1] for e in sorted(pks_clone)]):
assert orm.load_node(pk) == orm.load_node(pk_clone)

@pytest.mark.requires_psql
@pytest.mark.usefixtures('aiida_profile_clean')
def test_iterall_persistence(self, manager):
"""Test that mutations made during ``QueryBuilder.iterall`` context are automatically committed and persisted.
Expand Down
5 changes: 3 additions & 2 deletions tests/storage/sqlite/test_orm.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,9 @@
({'attributes.integer': {'in': [5, 6, 7]}}, 0),
({'attributes.integer': {'in': [1, 2, 3]}}, 1),
# object operators
({'attributes.dict': {'has_key': 'k'}}, 0),
({'attributes.dict': {'has_key': 'key1'}}, 1),
# Reenable when ``has_key`` operator is implemented, see https://github.com/aiidateam/aiida-core/issues/6498
# ({'attributes.dict': {'has_key': 'k'}}, 0),
# ({'attributes.dict': {'has_key': 'key1'}}, 1),
),
ids=json.dumps,
)
Expand Down
1 change: 1 addition & 0 deletions tests/test_nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,7 @@ def init_profile(self, aiida_localhost):
"""Initialize the profile."""
self.computer = aiida_localhost

@pytest.mark.requires_psql
def test_with_subclasses(self):
from aiida.plugins import DataFactory

Expand Down

0 comments on commit 3dbed91

Please sign in to comment.