Skip to content

Commit

Permalink
Rename querybuilder join relationship names
Browse files Browse the repository at this point in the history
Renames the querybuilder relationship names used for joins to the scheme.
Previously, complex names where used, but these were hard to remember. The new
scheme uses simple generic names, e.g. "with_computer" and gets the required
context from the class which is specified in the qb.append() method.

The main changes are:
1. - Change the relationship function map to a two-level dictionary, where the first
     level provides the context, indicating what class of node the relationship applies to.
2. - Add deprecation messages which are printed when the join functions are called
     using the old relationship names
3. - Remove the unimplemented "join_slaves/join_masters" methods

In addition, moving all deprecation warnings to the new system
(AiidaDeprecationWarning) and removing some very old deprecations
  • Loading branch information
ConradJohnston authored and sphuber committed Nov 21, 2018
1 parent fe93250 commit eaee1cc
Show file tree
Hide file tree
Showing 31 changed files with 381 additions and 347 deletions.
1 change: 0 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,6 @@
aiida/orm/calculation/job/__init__.py|
aiida/orm/code.py|
aiida/orm/data/array/bands.py|
aiida/orm/data/array/__init__.py|
aiida/orm/data/array/kpoints.py|
aiida/orm/data/array/projection.py|
aiida/orm/data/array/xy.py|
Expand Down
4 changes: 2 additions & 2 deletions aiida/backends/general/abstractqueries.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def get_statistics_dict(dataset):
q.append(Node, project=['id', 'ctime', 'type'], tag='node')

if user_pk is not None:
q.append(User, creator_of='node', project='email', filters={'pk': user_pk})
q.append(User, with_node='node', project='email', filters={'pk': user_pk})
qb_res = q.all()

# total count
Expand Down Expand Up @@ -156,7 +156,7 @@ def get_bands_and_parents_structure(self, args):
n_days_ago = now - datetime.timedelta(days=args.past_days)
bdata_filters.update({"ctime": {'>=': n_days_ago}})

qb.append(BandsData, tag="bdata", created_by="creator",
qb.append(BandsData, tag="bdata", with_user="creator",
filters=bdata_filters,
project=["id", "label", "ctime"]
)
Expand Down
14 changes: 7 additions & 7 deletions aiida/backends/tests/dataclasses.py
Original file line number Diff line number Diff line change
Expand Up @@ -2744,7 +2744,7 @@ def test_creation(self):
n.set_array('third', third)

# Check if the arrays are there
self.assertEquals(set(['first', 'second', 'third']), set(n.arraynames()))
self.assertEquals(set(['first', 'second', 'third']), set(n.get_arraynames()))
self.assertAlmostEquals(abs(first - n.get_array('first')).max(), 0.)
self.assertAlmostEquals(abs(second - n.get_array('second')).max(), 0.)
self.assertAlmostEquals(abs(third - n.get_array('third')).max(), 0.)
Expand All @@ -2765,7 +2765,7 @@ def test_creation(self):
n.set_array('first', first)

# Check if the arrays are there, and if I am getting the new one
self.assertEquals(set(['first', 'second']), set(n.arraynames()))
self.assertEquals(set(['first', 'second']), set(n.get_arraynames()))
self.assertAlmostEquals(abs(first - n.get_array('first')).max(), 0.)
self.assertAlmostEquals(abs(second - n.get_array('second')).max(), 0.)
self.assertEquals(first.shape, n.get_shape('first'))
Expand All @@ -2774,30 +2774,30 @@ def test_creation(self):
n.store()

# Same checks, after storing
self.assertEquals(set(['first', 'second']), set(n.arraynames()))
self.assertEquals(set(['first', 'second']), set(n.get_arraynames()))
self.assertAlmostEquals(abs(first - n.get_array('first')).max(), 0.)
self.assertAlmostEquals(abs(second - n.get_array('second')).max(), 0.)
self.assertEquals(first.shape, n.get_shape('first'))
self.assertEquals(second.shape, n.get_shape('second'))

# Same checks, again (this is checking the caching features)
self.assertEquals(set(['first', 'second']), set(n.arraynames()))
self.assertEquals(set(['first', 'second']), set(n.get_arraynames()))
self.assertAlmostEquals(abs(first - n.get_array('first')).max(), 0.)
self.assertAlmostEquals(abs(second - n.get_array('second')).max(), 0.)
self.assertEquals(first.shape, n.get_shape('first'))
self.assertEquals(second.shape, n.get_shape('second'))

# Same checks, after reloading
n2 = load_node(uuid=n.uuid)
self.assertEquals(set(['first', 'second']), set(n2.arraynames()))
self.assertEquals(set(['first', 'second']), set(n2.get_arraynames()))
self.assertAlmostEquals(abs(first - n2.get_array('first')).max(), 0.)
self.assertAlmostEquals(abs(second - n2.get_array('second')).max(), 0.)
self.assertEquals(first.shape, n2.get_shape('first'))
self.assertEquals(second.shape, n2.get_shape('second'))

# Same checks, after reloading with UUID
n2 = load_node(n.uuid, sub_classes=(ArrayData,))
self.assertEquals(set(['first', 'second']), set(n2.arraynames()))
self.assertEquals(set(['first', 'second']), set(n2.get_arraynames()))
self.assertAlmostEquals(abs(first - n2.get_array('first')).max(), 0.)
self.assertAlmostEquals(abs(second - n2.get_array('second')).max(), 0.)
self.assertEquals(first.shape, n2.get_shape('first'))
Expand All @@ -2811,7 +2811,7 @@ def test_creation(self):

# Again same checks, to verify that the attempts to delete/overwrite
# arrays did not damage the node content
self.assertEquals(set(['first', 'second']), set(n.arraynames()))
self.assertEquals(set(['first', 'second']), set(n.get_arraynames()))
self.assertAlmostEquals(abs(first - n.get_array('first')).max(), 0.)
self.assertAlmostEquals(abs(second - n.get_array('second')).max(), 0.)
self.assertEquals(first.shape, n.get_shape('first'))
Expand Down
14 changes: 7 additions & 7 deletions aiida/backends/tests/export_and_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,14 +168,14 @@ def test_cycle_structure_data(self):
# Check that there is a StructureData that is an output of a CalculationNode
qb = QueryBuilder()
qb.append(CalculationNode, project=['uuid'], tag='calculation')
qb.append(StructureData, output_of='calculation')
qb.append(StructureData, with_incoming='calculation')
self.assertGreater(len(qb.all()), 0)

# Check that there is a RemoteData that is a child and parent of a CalculationNode
qb = QueryBuilder()
qb.append(CalculationNode, tag='parent')
qb.append(RemoteData, project=['uuid'], output_of='parent', tag='remote')
qb.append(CalculationNode, output_of='remote')
qb.append(RemoteData, project=['uuid'], with_incoming='parent', tag='remote')
qb.append(CalculationNode, with_incoming='remote')
self.assertGreater(len(qb.all()), 0)


Expand Down Expand Up @@ -874,7 +874,7 @@ def get_hash_from_db_content(groupname):
qb.append(ParameterData, tag='p', project='*')
qb.append(CalculationNode, tag='c', project='*', edge_tag='p2c', edge_project=('label', 'type'))
qb.append(ArrayData, tag='a', project='*', edge_tag='c2a', edge_project=('label', 'type'))
qb.append(Group, filters={'name': groupname}, project='*', tag='g', group_of='a')
qb.append(Group, filters={'name': groupname}, project='*', tag='g', with_node='a')
# I want the query to contain something!
self.assertTrue(qb.count() > 0)
# The hash is given from the preservable entries in an export-import cycle,
Expand Down Expand Up @@ -1177,7 +1177,7 @@ def test_same_computer_import(self):
# computer.
qb = QueryBuilder()
qb.append(Computer, tag='comp')
qb.append(CalcJobNode, has_computer='comp', project=['label'])
qb.append(CalcJobNode, with_computer='comp', project=['label'])
self.assertEqual(qb.count(), 2, "Two calculations should be "
"found.")
ret_labels = set(_ for [_] in qb.all())
Expand Down Expand Up @@ -1401,7 +1401,7 @@ def test_different_computer_same_name_import(self):
qb = QueryBuilder()
qb.append(CalcJobNode, project=['label'], tag='jcalc')
qb.append(Computer, project=['name'],
computer_of='jcalc')
with_node='jcalc')
self.assertEqual(qb.count(), 3, "Three combinations expected.")
res = qb.all()
self.assertIn([calc1_label, comp1_name], res,
Expand Down Expand Up @@ -1538,7 +1538,7 @@ def get_all_node_links(self):
qb = QueryBuilder()
qb.append(Node, project='uuid', tag='input')
qb.append(Node, project='uuid', tag='output',
edge_project=['label', 'type'], output_of='input')
edge_project=['label', 'type'], with_incoming='input')
return qb.all()

def test_input_and_create_links(self):
Expand Down
20 changes: 10 additions & 10 deletions aiida/backends/tests/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ def test_simple_query_2(self):
{
'cls': Node,
'tag': 'n2',
'output_of': 'n1'
'with_incoming': 'n1'
}
],
'filters': {
Expand Down Expand Up @@ -234,7 +234,7 @@ def test_simple_query_2(self):
{
'cls': Node,
'tag': 'n2',
'output_of': 'n1'
'with_incoming': 'n1'
}
],
'filters': {
Expand Down Expand Up @@ -412,15 +412,15 @@ def test_tags(self):
from aiida.orm.computers import Computer
qb = QueryBuilder()
qb.append(Node, tag='n1')
qb.append(Node, tag='n2', edge_tag='e1', output_of='n1')
qb.append(Node, tag='n3', edge_tag='e2', output_of='n2')
qb.append(Computer, computer_of='n3', tag='c1', edge_tag='nonsense')
qb.append(Node, tag='n2', edge_tag='e1', with_incoming='n1')
qb.append(Node, tag='n3', edge_tag='e2', with_incoming='n2')
qb.append(Computer, with_node='n3', tag='c1', edge_tag='nonsense')
self.assertEqual(qb.get_used_tags(), ['n1', 'n2', 'e1', 'n3', 'e2', 'c1', 'nonsense'])

# Now I am testing the default tags,
qb = QueryBuilder().append(StructureData).append(ProcessNode).append(
StructureData).append(
ParameterData, input_of=ProcessNode)
ParameterData, with_outgoing=ProcessNode)
self.assertEqual(qb.get_used_tags(), [
'StructureData_1', 'ProcessNode_1',
'StructureData_1--ProcessNode_1', 'StructureData_2',
Expand Down Expand Up @@ -520,15 +520,15 @@ def test_computer_json(self):
qb = QueryBuilder()
qb.append(ProcessNode, project=['id'], tag='calc')
qb.append(Computer, project=['id', 'transport_params'],
outerjoin=True, computer_of='calc')
outerjoin=True, with_node='calc')
qb.all()

# Checking the correct retrieval of _metadata which is
# a JSON field (in both backends).
qb = QueryBuilder()
qb.append(ProcessNode, project=['id'], tag='calc')
qb.append(Computer, project=['id', '_metadata'],
outerjoin=True, computer_of='calc')
outerjoin=True, with_node='calc')
qb.all()


Expand Down Expand Up @@ -797,15 +797,15 @@ def test_joins3_user_group(self):
# Search for the group of the user
qb = orm.QueryBuilder()
qb.append(orm.User, tag='user', filters={'id': {'==': user.id}})
qb.append(orm.Group, belongs_to='user',
qb.append(orm.Group, with_user='user',
filters={'id': {'==': group.id}})
self.assertEquals(qb.count(), 1, "The expected group that belongs to "
"the selected user was not found.")

# Search for the user that owns a group
qb = orm.QueryBuilder()
qb.append(orm.Group, tag='group', filters={'id': {'==': group.id}})
qb.append(orm.User, owner_of='group', filters={'id': {'==': user.id}})
qb.append(orm.User, with_group='group', filters={'id': {'==': user.id}})

self.assertEquals(qb.count(), 1, "The expected user that owns the "
"selected group was not found.")
Expand Down
10 changes: 5 additions & 5 deletions aiida/cmdline/commands/cmd_code.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,12 +289,12 @@ def code_list(computer, input_plugin, all_entries, all_users, show_owner):
qb.append(Code, tag="code", filters=qb_code_filters, project=["id", "label"])
# We have a user assigned to the code so we can ask for the
# presence of a user even if there is no user filter
qb.append(orm.User, creator_of="code", project=["email"], filters=qb_user_filters)
qb.append(orm.User, with_node="code", project=["email"], filters=qb_user_filters)
# We also add the filter on computer. This will automatically
# return codes that have a computer (and of course satisfy the
# other filters). The codes that have a computer attached are the
# remote codes.
qb.append(orm.Computer, computer_of="code", project=["name"], filters=qb_computer_filters)
qb.append(orm.Computer, with_node="code", project=["name"], filters=qb_computer_filters)
qb.order_by({Code: {'id': 'asc'}})
print_list_res(qb, show_owner)

Expand All @@ -306,8 +306,8 @@ def code_list(computer, input_plugin, all_entries, all_users, show_owner):
qb.append(Code, tag="code", filters=qb_code_filters, project=["id", "label"])
# We have a user assigned to the code so we can ask for the
# presence of a user even if there is no user filter
qb.append(orm.User, creator_of="code", project=["email"], filters=qb_user_filters)
qb.append(orm.Computer, computer_of="code", project=["name"])
qb.append(orm.User, with_node="code", project=["email"], filters=qb_user_filters)
qb.append(orm.Computer, with_node="code", project=["name"])
qb.order_by({Code: {'id': 'asc'}})
print_list_res(qb, show_owner)

Expand All @@ -323,7 +323,7 @@ def code_list(computer, input_plugin, all_entries, all_users, show_owner):
qb.append(Code, tag="code", filters=qb_code_filters, project=["id", "label"])
# We have a user assigned to the code so we can ask for the
# presence of a user even if there is no user filter
qb.append(orm.User, creator_of="code", project=["email"], filters=qb_user_filters)
qb.append(orm.User, with_node="code", project=["email"], filters=qb_user_filters)
qb.order_by({Code: {'id': 'asc'}})
print_list_res(qb, show_owner)

Expand Down
2 changes: 1 addition & 1 deletion aiida/cmdline/commands/cmd_data/cmd_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,6 @@ def array_show(data):

for node in data:
the_dict = {}
for arrayname in node.arraynames():
for arrayname in node.get_arraynames():
the_dict[arrayname] = node.get_array(arrayname).tolist()
echo_dictionary(the_dict, 'json+date')
4 changes: 2 additions & 2 deletions aiida/cmdline/commands/cmd_data/cmd_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,13 @@ def query(datatype, project, past_days, group_pks, all_users):
n_days_ago = now - datetime.timedelta(days=past_days)
data_filters.update({"ctime": {'>=': n_days_ago}})

qbl.append(datatype, tag="data", created_by="creator", filters=data_filters, project=project)
qbl.append(datatype, tag="data", with_user="creator", filters=data_filters, project=project)

# If there is a group restriction
if group_pks is not None:
group_filters = dict()
group_filters.update({"id": {"in": group_pks}})
qbl.append(orm.Group, tag="group", filters=group_filters, group_of="data")
qbl.append(orm.Group, tag="group", filters=group_filters, with_node="data")

qbl.order_by({datatype: {'ctime': 'asc'}})

Expand Down
4 changes: 2 additions & 2 deletions aiida/cmdline/commands/cmd_data/cmd_upf.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def upf_listfamilies(elements, with_description):
query.add_filter(UpfData, {'attributes.element': {'in': elements}})
query.append(
orm.Group,
group_of='upfdata',
with_node='upfdata',
tag='group',
project=["name", "description"],
filters={"type": {
Expand All @@ -87,7 +87,7 @@ def upf_listfamilies(elements, with_description):
group_desc = res.get("group").get("description")
query = orm.QueryBuilder()
query.append(orm.Group, tag='thisgroup', filters={"name": {'like': group_name}})
query.append(UpfData, project=["id"], member_of='thisgroup')
query.append(UpfData, project=["id"], with_group='thisgroup')

if with_description:
description_string = ": {}".format(group_desc)
Expand Down
2 changes: 1 addition & 1 deletion aiida/cmdline/commands/cmd_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def show(nodes, print_groups):
# pylint: disable=invalid-name
qb = QueryBuilder()
qb.append(Node, tag='node', filters={'id': {'==': node.pk}})
qb.append(Group, tag='groups', group_of='node', project=['id', 'name'])
qb.append(Group, tag='groups', with_node='node', project=['id', 'name'])

echo.echo("#### GROUPS:")

Expand Down
2 changes: 1 addition & 1 deletion aiida/cmdline/utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ def get_subtree(pk, level=0):
# In the future, we should specify here the type of link
# for now, CALL links are the only ones allowing calc-calc
# (we here really want instead to follow CALL links)
output_of='workcalculation',
with_incoming='workcalculation',
tag='subworkchains')
result = list(itertools.chain(*builder.distinct().all()))

Expand Down
8 changes: 4 additions & 4 deletions aiida/common/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def draw_link_settings(inp_id, out_id, link_label, link_type):
# This query gives me all the inputs of this node, and link labels and types!
input_query = QueryBuilder()
input_query.append(Node, filters={'id': node.pk}, tag='n')
input_query.append(Node, input_of='n', edge_project=('id', 'label', 'type'), project='*', tag='inp')
input_query.append(Node, with_outgoing='n', edge_project=('id', 'label', 'type'), project='*', tag='inp')
for inp, link_id, link_label, link_type in input_query.iterall():
# I removed this check, to me there is no way that this link was already referred to!
# if link_id not in links:
Expand All @@ -131,7 +131,7 @@ def draw_link_settings(inp_id, out_id, link_label, link_type):
# Query for the outputs, giving me also link labels and types:
output_query = QueryBuilder()
output_query.append(Node, filters={'id': node.pk}, tag='n')
output_query.append(Node, output_of='n', edge_project=('id', 'label', 'type'), project='*', tag='out')
output_query.append(Node, with_incoming='n', edge_project=('id', 'label', 'type'), project='*', tag='out')
# Iterate through results
for out, link_id, link_label, link_type in output_query.iterall():
# This link might have been drawn already, because the output is maybe
Expand Down Expand Up @@ -160,7 +160,7 @@ def draw_link_settings(inp_id, out_id, link_label, link_type):
# Query for the outputs:
output_query = QueryBuilder()
output_query.append(Node, filters={'id': node.pk}, tag='n')
output_query.append(Node, output_of='n', edge_project=('id', 'label', 'type'), project='*', tag='out')
output_query.append(Node, with_incoming='n', edge_project=('id', 'label', 'type'), project='*', tag='out')

for out, link_id, link_label, link_type in output_query.iterall():
# Draw the link
Expand All @@ -172,7 +172,7 @@ def draw_link_settings(inp_id, out_id, link_label, link_type):
if include_calculation_inputs and isinstance(node, ProcessNode):
input_query = QueryBuilder()
input_query.append(Node, filters={'id': node.pk}, tag='n')
input_query.append(Node, input_of='n', edge_project=('id', 'label', 'type'), project='*', tag='inp')
input_query.append(Node, with_outgoing='n', edge_project=('id', 'label', 'type'), project='*', tag='inp')
for inp, link_id, link_label, link_type in input_query.iterall():
# Also here, maybe it's just better not to check?
if link_id not in links:
Expand Down
2 changes: 1 addition & 1 deletion aiida/common/hashing.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def is_password_usable(enc_pass):
using_sysrandom = True
except NotImplementedError:
import warnings
warnings.warn('A secure pseudo-random number generator is not available '
warnings.warn('A secure pseudo-random number generator is not available ' # pylint: disable=no-member
'on your system. Falling back to Mersenne Twister.')
using_sysrandom = False # pylint: disable=invalid-name

Expand Down
Loading

0 comments on commit eaee1cc

Please sign in to comment.