diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ecc4e09935..36b70314e2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -208,7 +208,6 @@ aiida/orm/calculation/job/__init__.py| aiida/orm/code.py| aiida/orm/data/array/bands.py| - aiida/orm/data/array/__init__.py| aiida/orm/data/array/kpoints.py| aiida/orm/data/array/projection.py| aiida/orm/data/array/xy.py| diff --git a/aiida/backends/general/abstractqueries.py b/aiida/backends/general/abstractqueries.py index 434b5d6ac3..ebadbcbd3c 100644 --- a/aiida/backends/general/abstractqueries.py +++ b/aiida/backends/general/abstractqueries.py @@ -116,7 +116,7 @@ def get_statistics_dict(dataset): q.append(Node, project=['id', 'ctime', 'type'], tag='node') if user_pk is not None: - q.append(User, creator_of='node', project='email', filters={'pk': user_pk}) + q.append(User, with_node='node', project='email', filters={'pk': user_pk}) qb_res = q.all() # total count @@ -156,7 +156,7 @@ def get_bands_and_parents_structure(self, args): n_days_ago = now - datetime.timedelta(days=args.past_days) bdata_filters.update({"ctime": {'>=': n_days_ago}}) - qb.append(BandsData, tag="bdata", created_by="creator", + qb.append(BandsData, tag="bdata", with_user="creator", filters=bdata_filters, project=["id", "label", "ctime"] ) diff --git a/aiida/backends/tests/dataclasses.py b/aiida/backends/tests/dataclasses.py index 62e3c6d3b8..888a2abac7 100644 --- a/aiida/backends/tests/dataclasses.py +++ b/aiida/backends/tests/dataclasses.py @@ -2744,7 +2744,7 @@ def test_creation(self): n.set_array('third', third) # Check if the arrays are there - self.assertEquals(set(['first', 'second', 'third']), set(n.arraynames())) + self.assertEquals(set(['first', 'second', 'third']), set(n.get_arraynames())) self.assertAlmostEquals(abs(first - n.get_array('first')).max(), 0.) self.assertAlmostEquals(abs(second - n.get_array('second')).max(), 0.) self.assertAlmostEquals(abs(third - n.get_array('third')).max(), 0.) @@ -2765,7 +2765,7 @@ def test_creation(self): n.set_array('first', first) # Check if the arrays are there, and if I am getting the new one - self.assertEquals(set(['first', 'second']), set(n.arraynames())) + self.assertEquals(set(['first', 'second']), set(n.get_arraynames())) self.assertAlmostEquals(abs(first - n.get_array('first')).max(), 0.) self.assertAlmostEquals(abs(second - n.get_array('second')).max(), 0.) self.assertEquals(first.shape, n.get_shape('first')) @@ -2774,14 +2774,14 @@ def test_creation(self): n.store() # Same checks, after storing - self.assertEquals(set(['first', 'second']), set(n.arraynames())) + self.assertEquals(set(['first', 'second']), set(n.get_arraynames())) self.assertAlmostEquals(abs(first - n.get_array('first')).max(), 0.) self.assertAlmostEquals(abs(second - n.get_array('second')).max(), 0.) self.assertEquals(first.shape, n.get_shape('first')) self.assertEquals(second.shape, n.get_shape('second')) # Same checks, again (this is checking the caching features) - self.assertEquals(set(['first', 'second']), set(n.arraynames())) + self.assertEquals(set(['first', 'second']), set(n.get_arraynames())) self.assertAlmostEquals(abs(first - n.get_array('first')).max(), 0.) self.assertAlmostEquals(abs(second - n.get_array('second')).max(), 0.) self.assertEquals(first.shape, n.get_shape('first')) @@ -2789,7 +2789,7 @@ def test_creation(self): # Same checks, after reloading n2 = load_node(uuid=n.uuid) - self.assertEquals(set(['first', 'second']), set(n2.arraynames())) + self.assertEquals(set(['first', 'second']), set(n2.get_arraynames())) self.assertAlmostEquals(abs(first - n2.get_array('first')).max(), 0.) self.assertAlmostEquals(abs(second - n2.get_array('second')).max(), 0.) self.assertEquals(first.shape, n2.get_shape('first')) @@ -2797,7 +2797,7 @@ def test_creation(self): # Same checks, after reloading with UUID n2 = load_node(n.uuid, sub_classes=(ArrayData,)) - self.assertEquals(set(['first', 'second']), set(n2.arraynames())) + self.assertEquals(set(['first', 'second']), set(n2.get_arraynames())) self.assertAlmostEquals(abs(first - n2.get_array('first')).max(), 0.) self.assertAlmostEquals(abs(second - n2.get_array('second')).max(), 0.) self.assertEquals(first.shape, n2.get_shape('first')) @@ -2811,7 +2811,7 @@ def test_creation(self): # Again same checks, to verify that the attempts to delete/overwrite # arrays did not damage the node content - self.assertEquals(set(['first', 'second']), set(n.arraynames())) + self.assertEquals(set(['first', 'second']), set(n.get_arraynames())) self.assertAlmostEquals(abs(first - n.get_array('first')).max(), 0.) self.assertAlmostEquals(abs(second - n.get_array('second')).max(), 0.) self.assertEquals(first.shape, n.get_shape('first')) diff --git a/aiida/backends/tests/export_and_import.py b/aiida/backends/tests/export_and_import.py index aaf54b9962..4442621467 100644 --- a/aiida/backends/tests/export_and_import.py +++ b/aiida/backends/tests/export_and_import.py @@ -168,14 +168,14 @@ def test_cycle_structure_data(self): # Check that there is a StructureData that is an output of a CalculationNode qb = QueryBuilder() qb.append(CalculationNode, project=['uuid'], tag='calculation') - qb.append(StructureData, output_of='calculation') + qb.append(StructureData, with_incoming='calculation') self.assertGreater(len(qb.all()), 0) # Check that there is a RemoteData that is a child and parent of a CalculationNode qb = QueryBuilder() qb.append(CalculationNode, tag='parent') - qb.append(RemoteData, project=['uuid'], output_of='parent', tag='remote') - qb.append(CalculationNode, output_of='remote') + qb.append(RemoteData, project=['uuid'], with_incoming='parent', tag='remote') + qb.append(CalculationNode, with_incoming='remote') self.assertGreater(len(qb.all()), 0) @@ -874,7 +874,7 @@ def get_hash_from_db_content(groupname): qb.append(ParameterData, tag='p', project='*') qb.append(CalculationNode, tag='c', project='*', edge_tag='p2c', edge_project=('label', 'type')) qb.append(ArrayData, tag='a', project='*', edge_tag='c2a', edge_project=('label', 'type')) - qb.append(Group, filters={'name': groupname}, project='*', tag='g', group_of='a') + qb.append(Group, filters={'name': groupname}, project='*', tag='g', with_node='a') # I want the query to contain something! self.assertTrue(qb.count() > 0) # The hash is given from the preservable entries in an export-import cycle, @@ -1177,7 +1177,7 @@ def test_same_computer_import(self): # computer. qb = QueryBuilder() qb.append(Computer, tag='comp') - qb.append(CalcJobNode, has_computer='comp', project=['label']) + qb.append(CalcJobNode, with_computer='comp', project=['label']) self.assertEqual(qb.count(), 2, "Two calculations should be " "found.") ret_labels = set(_ for [_] in qb.all()) @@ -1401,7 +1401,7 @@ def test_different_computer_same_name_import(self): qb = QueryBuilder() qb.append(CalcJobNode, project=['label'], tag='jcalc') qb.append(Computer, project=['name'], - computer_of='jcalc') + with_node='jcalc') self.assertEqual(qb.count(), 3, "Three combinations expected.") res = qb.all() self.assertIn([calc1_label, comp1_name], res, @@ -1538,7 +1538,7 @@ def get_all_node_links(self): qb = QueryBuilder() qb.append(Node, project='uuid', tag='input') qb.append(Node, project='uuid', tag='output', - edge_project=['label', 'type'], output_of='input') + edge_project=['label', 'type'], with_incoming='input') return qb.all() def test_input_and_create_links(self): diff --git a/aiida/backends/tests/query.py b/aiida/backends/tests/query.py index ccd5ac182e..299d28bd3a 100644 --- a/aiida/backends/tests/query.py +++ b/aiida/backends/tests/query.py @@ -199,7 +199,7 @@ def test_simple_query_2(self): { 'cls': Node, 'tag': 'n2', - 'output_of': 'n1' + 'with_incoming': 'n1' } ], 'filters': { @@ -234,7 +234,7 @@ def test_simple_query_2(self): { 'cls': Node, 'tag': 'n2', - 'output_of': 'n1' + 'with_incoming': 'n1' } ], 'filters': { @@ -412,15 +412,15 @@ def test_tags(self): from aiida.orm.computers import Computer qb = QueryBuilder() qb.append(Node, tag='n1') - qb.append(Node, tag='n2', edge_tag='e1', output_of='n1') - qb.append(Node, tag='n3', edge_tag='e2', output_of='n2') - qb.append(Computer, computer_of='n3', tag='c1', edge_tag='nonsense') + qb.append(Node, tag='n2', edge_tag='e1', with_incoming='n1') + qb.append(Node, tag='n3', edge_tag='e2', with_incoming='n2') + qb.append(Computer, with_node='n3', tag='c1', edge_tag='nonsense') self.assertEqual(qb.get_used_tags(), ['n1', 'n2', 'e1', 'n3', 'e2', 'c1', 'nonsense']) # Now I am testing the default tags, qb = QueryBuilder().append(StructureData).append(ProcessNode).append( StructureData).append( - ParameterData, input_of=ProcessNode) + ParameterData, with_outgoing=ProcessNode) self.assertEqual(qb.get_used_tags(), [ 'StructureData_1', 'ProcessNode_1', 'StructureData_1--ProcessNode_1', 'StructureData_2', @@ -520,7 +520,7 @@ def test_computer_json(self): qb = QueryBuilder() qb.append(ProcessNode, project=['id'], tag='calc') qb.append(Computer, project=['id', 'transport_params'], - outerjoin=True, computer_of='calc') + outerjoin=True, with_node='calc') qb.all() # Checking the correct retrieval of _metadata which is @@ -528,7 +528,7 @@ def test_computer_json(self): qb = QueryBuilder() qb.append(ProcessNode, project=['id'], tag='calc') qb.append(Computer, project=['id', '_metadata'], - outerjoin=True, computer_of='calc') + outerjoin=True, with_node='calc') qb.all() @@ -797,7 +797,7 @@ def test_joins3_user_group(self): # Search for the group of the user qb = orm.QueryBuilder() qb.append(orm.User, tag='user', filters={'id': {'==': user.id}}) - qb.append(orm.Group, belongs_to='user', + qb.append(orm.Group, with_user='user', filters={'id': {'==': group.id}}) self.assertEquals(qb.count(), 1, "The expected group that belongs to " "the selected user was not found.") @@ -805,7 +805,7 @@ def test_joins3_user_group(self): # Search for the user that owns a group qb = orm.QueryBuilder() qb.append(orm.Group, tag='group', filters={'id': {'==': group.id}}) - qb.append(orm.User, owner_of='group', filters={'id': {'==': user.id}}) + qb.append(orm.User, with_group='group', filters={'id': {'==': user.id}}) self.assertEquals(qb.count(), 1, "The expected user that owns the " "selected group was not found.") diff --git a/aiida/cmdline/commands/cmd_code.py b/aiida/cmdline/commands/cmd_code.py index c66b9e7257..29612e5b59 100644 --- a/aiida/cmdline/commands/cmd_code.py +++ b/aiida/cmdline/commands/cmd_code.py @@ -289,12 +289,12 @@ def code_list(computer, input_plugin, all_entries, all_users, show_owner): qb.append(Code, tag="code", filters=qb_code_filters, project=["id", "label"]) # We have a user assigned to the code so we can ask for the # presence of a user even if there is no user filter - qb.append(orm.User, creator_of="code", project=["email"], filters=qb_user_filters) + qb.append(orm.User, with_node="code", project=["email"], filters=qb_user_filters) # We also add the filter on computer. This will automatically # return codes that have a computer (and of course satisfy the # other filters). The codes that have a computer attached are the # remote codes. - qb.append(orm.Computer, computer_of="code", project=["name"], filters=qb_computer_filters) + qb.append(orm.Computer, with_node="code", project=["name"], filters=qb_computer_filters) qb.order_by({Code: {'id': 'asc'}}) print_list_res(qb, show_owner) @@ -306,8 +306,8 @@ def code_list(computer, input_plugin, all_entries, all_users, show_owner): qb.append(Code, tag="code", filters=qb_code_filters, project=["id", "label"]) # We have a user assigned to the code so we can ask for the # presence of a user even if there is no user filter - qb.append(orm.User, creator_of="code", project=["email"], filters=qb_user_filters) - qb.append(orm.Computer, computer_of="code", project=["name"]) + qb.append(orm.User, with_node="code", project=["email"], filters=qb_user_filters) + qb.append(orm.Computer, with_node="code", project=["name"]) qb.order_by({Code: {'id': 'asc'}}) print_list_res(qb, show_owner) @@ -323,7 +323,7 @@ def code_list(computer, input_plugin, all_entries, all_users, show_owner): qb.append(Code, tag="code", filters=qb_code_filters, project=["id", "label"]) # We have a user assigned to the code so we can ask for the # presence of a user even if there is no user filter - qb.append(orm.User, creator_of="code", project=["email"], filters=qb_user_filters) + qb.append(orm.User, with_node="code", project=["email"], filters=qb_user_filters) qb.order_by({Code: {'id': 'asc'}}) print_list_res(qb, show_owner) diff --git a/aiida/cmdline/commands/cmd_data/cmd_array.py b/aiida/cmdline/commands/cmd_data/cmd_array.py index 17238b0146..0c90761f89 100644 --- a/aiida/cmdline/commands/cmd_data/cmd_array.py +++ b/aiida/cmdline/commands/cmd_data/cmd_array.py @@ -30,6 +30,6 @@ def array_show(data): for node in data: the_dict = {} - for arrayname in node.arraynames(): + for arrayname in node.get_arraynames(): the_dict[arrayname] = node.get_array(arrayname).tolist() echo_dictionary(the_dict, 'json+date') diff --git a/aiida/cmdline/commands/cmd_data/cmd_list.py b/aiida/cmdline/commands/cmd_data/cmd_list.py index 7a6b45c8aa..474d3e16d9 100644 --- a/aiida/cmdline/commands/cmd_data/cmd_list.py +++ b/aiida/cmdline/commands/cmd_data/cmd_list.py @@ -55,13 +55,13 @@ def query(datatype, project, past_days, group_pks, all_users): n_days_ago = now - datetime.timedelta(days=past_days) data_filters.update({"ctime": {'>=': n_days_ago}}) - qbl.append(datatype, tag="data", created_by="creator", filters=data_filters, project=project) + qbl.append(datatype, tag="data", with_user="creator", filters=data_filters, project=project) # If there is a group restriction if group_pks is not None: group_filters = dict() group_filters.update({"id": {"in": group_pks}}) - qbl.append(orm.Group, tag="group", filters=group_filters, group_of="data") + qbl.append(orm.Group, tag="group", filters=group_filters, with_node="data") qbl.order_by({datatype: {'ctime': 'asc'}}) diff --git a/aiida/cmdline/commands/cmd_data/cmd_upf.py b/aiida/cmdline/commands/cmd_data/cmd_upf.py index 583583e6fd..58afe91fde 100644 --- a/aiida/cmdline/commands/cmd_data/cmd_upf.py +++ b/aiida/cmdline/commands/cmd_data/cmd_upf.py @@ -73,7 +73,7 @@ def upf_listfamilies(elements, with_description): query.add_filter(UpfData, {'attributes.element': {'in': elements}}) query.append( orm.Group, - group_of='upfdata', + with_node='upfdata', tag='group', project=["name", "description"], filters={"type": { @@ -87,7 +87,7 @@ def upf_listfamilies(elements, with_description): group_desc = res.get("group").get("description") query = orm.QueryBuilder() query.append(orm.Group, tag='thisgroup', filters={"name": {'like': group_name}}) - query.append(UpfData, project=["id"], member_of='thisgroup') + query.append(UpfData, project=["id"], with_group='thisgroup') if with_description: description_string = ": {}".format(group_desc) diff --git a/aiida/cmdline/commands/cmd_node.py b/aiida/cmdline/commands/cmd_node.py index e2011ea682..84046fe68e 100644 --- a/aiida/cmdline/commands/cmd_node.py +++ b/aiida/cmdline/commands/cmd_node.py @@ -159,7 +159,7 @@ def show(nodes, print_groups): # pylint: disable=invalid-name qb = QueryBuilder() qb.append(Node, tag='node', filters={'id': {'==': node.pk}}) - qb.append(Group, tag='groups', group_of='node', project=['id', 'name']) + qb.append(Group, tag='groups', with_node='node', project=['id', 'name']) echo.echo("#### GROUPS:") diff --git a/aiida/cmdline/utils/common.py b/aiida/cmdline/utils/common.py index edaf6bb620..c0718a461e 100644 --- a/aiida/cmdline/utils/common.py +++ b/aiida/cmdline/utils/common.py @@ -287,7 +287,7 @@ def get_subtree(pk, level=0): # In the future, we should specify here the type of link # for now, CALL links are the only ones allowing calc-calc # (we here really want instead to follow CALL links) - output_of='workcalculation', + with_incoming='workcalculation', tag='subworkchains') result = list(itertools.chain(*builder.distinct().all())) diff --git a/aiida/common/graph.py b/aiida/common/graph.py index 1cca1860b4..cee90712b7 100644 --- a/aiida/common/graph.py +++ b/aiida/common/graph.py @@ -116,7 +116,7 @@ def draw_link_settings(inp_id, out_id, link_label, link_type): # This query gives me all the inputs of this node, and link labels and types! input_query = QueryBuilder() input_query.append(Node, filters={'id': node.pk}, tag='n') - input_query.append(Node, input_of='n', edge_project=('id', 'label', 'type'), project='*', tag='inp') + input_query.append(Node, with_outgoing='n', edge_project=('id', 'label', 'type'), project='*', tag='inp') for inp, link_id, link_label, link_type in input_query.iterall(): # I removed this check, to me there is no way that this link was already referred to! # if link_id not in links: @@ -131,7 +131,7 @@ def draw_link_settings(inp_id, out_id, link_label, link_type): # Query for the outputs, giving me also link labels and types: output_query = QueryBuilder() output_query.append(Node, filters={'id': node.pk}, tag='n') - output_query.append(Node, output_of='n', edge_project=('id', 'label', 'type'), project='*', tag='out') + output_query.append(Node, with_incoming='n', edge_project=('id', 'label', 'type'), project='*', tag='out') # Iterate through results for out, link_id, link_label, link_type in output_query.iterall(): # This link might have been drawn already, because the output is maybe @@ -160,7 +160,7 @@ def draw_link_settings(inp_id, out_id, link_label, link_type): # Query for the outputs: output_query = QueryBuilder() output_query.append(Node, filters={'id': node.pk}, tag='n') - output_query.append(Node, output_of='n', edge_project=('id', 'label', 'type'), project='*', tag='out') + output_query.append(Node, with_incoming='n', edge_project=('id', 'label', 'type'), project='*', tag='out') for out, link_id, link_label, link_type in output_query.iterall(): # Draw the link @@ -172,7 +172,7 @@ def draw_link_settings(inp_id, out_id, link_label, link_type): if include_calculation_inputs and isinstance(node, ProcessNode): input_query = QueryBuilder() input_query.append(Node, filters={'id': node.pk}, tag='n') - input_query.append(Node, input_of='n', edge_project=('id', 'label', 'type'), project='*', tag='inp') + input_query.append(Node, with_outgoing='n', edge_project=('id', 'label', 'type'), project='*', tag='inp') for inp, link_id, link_label, link_type in input_query.iterall(): # Also here, maybe it's just better not to check? if link_id not in links: diff --git a/aiida/common/hashing.py b/aiida/common/hashing.py index 63abb89c0c..50ec95572e 100644 --- a/aiida/common/hashing.py +++ b/aiida/common/hashing.py @@ -105,7 +105,7 @@ def is_password_usable(enc_pass): using_sysrandom = True except NotImplementedError: import warnings - warnings.warn('A secure pseudo-random number generator is not available ' + warnings.warn('A secure pseudo-random number generator is not available ' # pylint: disable=no-member 'on your system. Falling back to Mersenne Twister.') using_sysrandom = False # pylint: disable=invalid-name diff --git a/aiida/orm/calculation/inline.py b/aiida/orm/calculation/inline.py index f89b0bcd65..5afd6b25b5 100644 --- a/aiida/orm/calculation/inline.py +++ b/aiida/orm/calculation/inline.py @@ -145,8 +145,11 @@ def copy_inline(source): are not catched. .. deprecated:: 1.0.0 + Use the ``@calcfunction`` decorator instead. """ import warnings + # If we call this DeprecationWarning, pycharm will properly strike out the function + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('this function has been deprecated, use `aiida.work.calcfunction` instead', DeprecationWarning) from aiida.work import workfunction @@ -196,8 +199,11 @@ def copy_inline(source=None): In any way the ``copy_inline`` will return the same results. .. deprecated:: 1.0.0 + Use the ``@calcfunction`` decorator instead """ import warnings + # If we call this DeprecationWarning, pycharm will properly strike out the function + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('this function has been deprecated, use `aiida.work.calcfunction` instead', DeprecationWarning) def wrapped_function(*args, **kwargs): diff --git a/aiida/orm/data/array/__init__.py b/aiida/orm/data/array/__init__.py index 5748579f53..f8d0908753 100644 --- a/aiida/orm/data/array/__init__.py +++ b/aiida/orm/data/array/__init__.py @@ -7,13 +7,15 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +""" +AiiDA ORM data class storing (numpy) arrays +""" from __future__ import division from __future__ import print_function from __future__ import absolute_import from aiida.orm import Data - class ArrayData(Data): """ Store a set of arrays on disk (rather than on the database) in an efficient @@ -42,13 +44,9 @@ def delete_array(self, name): :param name: The name of the array to delete from the node. """ - import numpy - fname = '{}.npy'.format(name) if fname not in self.get_folder_list(): - raise KeyError( - "Array with name '{}' not found in node pk= {}".format( - name, self.pk)) + raise KeyError("Array with name '{}' not found in node pk= {}".format(name, self.pk)) # remove both file and attribute self.remove_path(fname) @@ -59,20 +57,6 @@ def delete_array(self, name): # property was not set. pass - def arraynames(self): - """ - Return a list of all arrays stored in the node, listing the files (and - not relying on the properties). - - .. deprecated:: 0.7 - Use :meth:`get_arraynames` instead. - """ - import warnings - - warnings.warn("arraynames is deprecated, use get_arraynames instead", - DeprecationWarning) - return self.get_arraynames() - def get_arraynames(self): """ Return a list of all arrays stored in the node, listing the files (and @@ -95,8 +79,7 @@ def _arraynames_from_properties(self): Return a list of all arrays stored in the node, listing the attributes starting with the correct prefix. """ - return [i[len(self.array_prefix):] for i in - self.attrs() if i.startswith(self.array_prefix)] + return [i[len(self.array_prefix):] for i in self.attrs() if i.startswith(self.array_prefix)] def get_shape(self, name): """ @@ -125,11 +108,12 @@ def get_array(self, name): # raw function used only internally def get_array_from_file(self, name): + """ + Return the array stored in a .npy file + """ fname = '{}.npy'.format(name) if fname not in self.get_folder_list(): - raise KeyError( - "Array with name '{}' not found in node pk= {}".format( - name, self.pk)) + raise KeyError("Array with name '{}' not found in node pk= {}".format(name, self.pk)) array = numpy.load(self.get_abs_path(fname)) return array @@ -168,9 +152,8 @@ def set_array(self, name, array): import numpy - if not (isinstance(array, numpy.ndarray)): - raise TypeError("ArrayData can only store numpy arrays. Convert " - "the object to an array first") + if not isinstance(array, numpy.ndarray): + raise TypeError("ArrayData can only store numpy arrays. Convert " "the object to an array first") # Check if the name is valid if not (name) or re.sub('[0-9a-zA-Z_]', '', name): @@ -179,17 +162,16 @@ def set_array(self, name, array): fname = "{}.npy".format(name) - with tempfile.NamedTemporaryFile() as f: + with tempfile.NamedTemporaryFile() as _file: # Store in a temporary file, and then add to the node - numpy.save(f, array) - f.flush() # Important to flush here, otherwise the next copy command + numpy.save(_file, array) + _file.flush() # Important to flush here, otherwise the next copy command # will just copy an empty file - self.add_path(f.name, fname) + self.add_path(_file.name, fname) # Mainly for convenience, for querying purposes (both stores the fact # that there is an array with that name, and its shape) - self._set_attr("{}{}".format(self.array_prefix, name), - list(array.shape)) + self._set_attr("{}{}".format(self.array_prefix, name), list(array.shape)) def _validate(self): """ @@ -204,8 +186,6 @@ def _validate(self): properties = self._arraynames_from_properties() if set(files) != set(properties): - raise ValidationError( - "Mismatch of files and properties for ArrayData" - " node (pk= {}): {} vs. {}".format(self.pk, - files, properties)) + raise ValidationError("Mismatch of files and properties for ArrayData" + " node (pk= {}): {} vs. {}".format(self.pk, files, properties)) super(ArrayData, self)._validate() diff --git a/aiida/orm/data/array/bands.py b/aiida/orm/data/array/bands.py index 24cdba9cbe..9573c00d3a 100644 --- a/aiida/orm/data/array/bands.py +++ b/aiida/orm/data/array/bands.py @@ -651,6 +651,7 @@ def _prepare_dat_1(self, *args, **kwargs): Use 'dat_multicolumn' format instead """ import warnings + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn( "dat_1 format is deprecated, use dat_multicolumn instead", DeprecationWarning) @@ -691,6 +692,7 @@ def _prepare_dat_2(self, *args, **kwargs): Use 'dat_block' format instead """ import warnings + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn( "dat_2 format is deprecated, use dat_blocks instead", DeprecationWarning) diff --git a/aiida/orm/data/array/kpoints.py b/aiida/orm/data/array/kpoints.py index 5ceb6b9e2d..a9305aef25 100644 --- a/aiida/orm/data/array/kpoints.py +++ b/aiida/orm/data/array/kpoints.py @@ -527,7 +527,8 @@ def bravais_lattice(self): .. deprecated:: 0.11 """ import warnings - warnings.warn('this method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin + warnings.warn('the bravais_lattice method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) return self.get_attr('bravais_lattice') @bravais_lattice.setter @@ -538,7 +539,8 @@ def bravais_lattice(self, value): .. deprecated:: 0.11 """ import warnings - warnings.warn('this method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin + warnings.warn('the bravais_lattice method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) self._set_bravais_lattice(value) def _set_bravais_lattice(self, value): @@ -548,7 +550,8 @@ def _set_bravais_lattice(self, value): .. deprecated:: 0.11 """ import warnings - warnings.warn('this method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin + warnings.warn('the _set_bravais_lattice method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) import copy if not isinstance(value, dict): @@ -592,7 +595,8 @@ def _get_or_create_bravais_lattice(self, :return bravais_lattice: the dictionary containing the symmetry info """ import warnings - warnings.warn('this method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin + warnings.warn('the _get_or_create_bravais_lattice method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) try: bravais_lattice = self.bravais_lattice @@ -640,7 +644,8 @@ def set_kpoints_path(self, value=None, kpoint_distance=None, cartesian=False, """ import warnings - warnings.warn('this method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin + warnings.warn('the set_kpoints_path method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) from aiida.tools.data.array.kpoints.legacy import get_explicit_kpoints_path @@ -680,7 +685,8 @@ def _find_bravais_info(self, epsilon_length=_default_epsilon_length, with extra parameters used by the get_special_points method) """ import warnings - warnings.warn('this method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin + warnings.warn('the _find_bravais_info method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) from aiida.tools.data.array.kpoints.legacy import find_bravais_info return find_bravais_info( @@ -713,7 +719,8 @@ def find_bravais_lattice(self, epsilon_length=_default_epsilon_length, eventual variation """ import warnings - warnings.warn('this method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin + warnings.warn('the find_bravais_lattice method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) if not self.is_stored: bravais_lattice = self._find_bravais_info(epsilon_length=epsilon_length, @@ -757,7 +764,8 @@ def get_special_points(self, cartesian=False, :note: We assume that the cell given by the cell property is the primitive unit cell """ import warnings - warnings.warn('this method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin + warnings.warn('the get_special_points method has been deprecated, see {}'.format(DEPRECATION_DOCS_URL), DeprecationWarning) from aiida.tools.data.array.kpoints.legacy import get_kpoints_path point_coords, path, bravais_info = get_kpoints_path( diff --git a/aiida/orm/data/code.py b/aiida/orm/data/code.py index 8a9370d53e..0997ec3f26 100644 --- a/aiida/orm/data/code.py +++ b/aiida/orm/data/code.py @@ -152,7 +152,7 @@ def get_code_helper(cls, label, machinename=None): qb = QueryBuilder() qb.append(cls, filters={'label': {'==': label}}, project=['*'], tag='code') if machinename: - qb.append(Computer, filters={'name': {'==': machinename}}, computer_of='code') + qb.append(Computer, filters={'name': {'==': machinename}}, with_node='code') if qb.count() == 0: raise NotExistent("'{}' is not a valid code " "name.".format(label)) @@ -471,6 +471,7 @@ def new_calc(self, *args, **kwargs): :raise ValueError: if no default plugin was specified in the code. """ import warnings + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn( 'directly creating and submitting calculations is deprecated, use the {}\nSee:{}'.format( 'ProcessBuilder', DEPRECATION_DOCS_URL), DeprecationWarning) diff --git a/aiida/orm/groups.py b/aiida/orm/groups.py index 06f075049d..c51d2cb878 100644 --- a/aiida/orm/groups.py +++ b/aiida/orm/groups.py @@ -310,7 +310,8 @@ def get_or_create(cls, backend=None, **kwargs): in any case already stored) and created is a boolean saying """ import warnings - warnings.warn('this method has been deprecated use Group.objects.get_or_create() instead', DeprecationWarning) + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin + warnings.warn('this method has been deprecated use Group.objects.get_or_create() instead', DeprecationWarning) # pylint: disable=no-member return cls.objects(backend).get_or_create(**kwargs) diff --git a/aiida/orm/implementation/general/node.py b/aiida/orm/implementation/general/node.py index a6881c46b6..4c7aaf9e77 100644 --- a/aiida/orm/implementation/general/node.py +++ b/aiida/orm/implementation/general/node.py @@ -1709,7 +1709,7 @@ def store(self, with_transaction=True, use_cache=None): if current_autogroup.is_to_be_grouped(self): group_name = current_autogroup.get_group_name() if group_name is not None: - g = Group.get_or_create( + g = Group.objects.get_or_create( name=group_name, type_string=VERDIAUTOGROUP_TYPE)[0] g.add_nodes(self) diff --git a/aiida/orm/importexport.py b/aiida/orm/importexport.py index 3d1ea092fc..3485cec7ad 100644 --- a/aiida/orm/importexport.py +++ b/aiida/orm/importexport.py @@ -1603,19 +1603,19 @@ def fill_in_query(partial_query, originating_entity_str, current_entity_str, relationship_dic = { "Node": { - "Computer": "has_computer", - "Group": "member_of", - "User": "created_by" + "Computer": "with_computer", + "Group": "with_group", + "User": "with_user" }, "Group": { - "Node": "group_of" + "Node": "with_node" }, "Computer": { - "Node": "computer_of" + "Node": "with_node" }, "User": { - "Node": "creator_of", - "Group": "owner_of" + "Node": "with_node", + "Group": "with_group" } } @@ -1757,7 +1757,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, # INPUT(Data, ProcessNode) - Reversed qb = QueryBuilder() qb.append(Data, tag='predecessor', project=['id']) - qb.append(ProcessNode, output_of='predecessor', + qb.append(ProcessNode, with_incoming='predecessor', filters={'id': {'==': curr_node_id}}, edge_filters={ 'type': { @@ -1770,7 +1770,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, qb = QueryBuilder() qb.append(Data, tag='predecessor', project=['id'], filters={'id': {'==': curr_node_id}}) - qb.append(ProcessNode, output_of='predecessor', + qb.append(ProcessNode, with_incoming='predecessor', edge_filters={ 'type': { '==': LinkType.INPUT.value}}) @@ -1781,7 +1781,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, qb = QueryBuilder() qb.append(ProcessNode, tag='predecessor', filters={'id': {'==': curr_node_id}}) - qb.append(Data, output_of='predecessor', project=['id'], + qb.append(Data, with_incoming='predecessor', project=['id'], edge_filters={ 'type': { 'in': [LinkType.CREATE.value, @@ -1793,7 +1793,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, if create_reversed: qb = QueryBuilder() qb.append(ProcessNode, tag='predecessor') - qb.append(Data, output_of='predecessor', project=['id'], + qb.append(Data, with_incoming='predecessor', project=['id'], filters={'id': {'==': curr_node_id}}, edge_filters={ 'type': { @@ -1805,7 +1805,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, if return_reversed: qb = QueryBuilder() qb.append(ProcessNode, tag='predecessor') - qb.append(Data, output_of='predecessor', project=['id'], + qb.append(Data, with_incoming='predecessor', project=['id'], filters={'id': {'==': curr_node_id}}, edge_filters={ 'type': { @@ -1817,7 +1817,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, qb = QueryBuilder() qb.append(ProcessNode, tag='predecessor', filters={'id': {'==': curr_node_id}}) - qb.append(ProcessNode, output_of='predecessor', project=['id'], + qb.append(ProcessNode, with_incoming='predecessor', project=['id'], edge_filters={ 'type': { '==': LinkType.CALL.value}}) @@ -1828,7 +1828,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, if call_reversed: qb = QueryBuilder() qb.append(ProcessNode, tag='predecessor') - qb.append(ProcessNode, output_of='predecessor', project=['id'], + qb.append(ProcessNode, with_incoming='predecessor', project=['id'], filters={'id': {'==': curr_node_id}}, edge_filters={ 'type': { @@ -1852,7 +1852,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, if create_reversed: qb = QueryBuilder() qb.append(ProcessNode, tag='predecessor', project=['id']) - qb.append(Data, output_of='predecessor', + qb.append(Data, with_incoming='predecessor', filters={'id': {'==': curr_node_id}}, edge_filters={ 'type': { @@ -2002,7 +2002,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, links_qb.append(ProcessNode, project=['uuid'], tag='output', edge_filters={'type':{'==':LinkType.INPUT.value}}, - edge_project=['label', 'type'], output_of='input') + edge_project=['label', 'type'], with_incoming='input') for input_uuid, output_uuid, link_label, link_type in links_qb.iterall(): val = { 'input': str(input_uuid), @@ -2020,7 +2020,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, project=['uuid'], tag='output', filters={'id': {'in': all_nodes_pk}}, edge_filters={'type':{'==':LinkType.INPUT.value}}, - edge_project=['label', 'type'], output_of='input') + edge_project=['label', 'type'], with_incoming='input') for input_uuid, output_uuid, link_label, link_type in links_qb.iterall(): val = { 'input': str(input_uuid), @@ -2038,7 +2038,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, links_qb.append(Data, project=['uuid'], tag='output', edge_filters={'type': {'==': LinkType.CREATE.value}}, - edge_project=['label', 'type'], output_of='input') + edge_project=['label', 'type'], with_incoming='input') for input_uuid, output_uuid, link_label, link_type in links_qb.iterall(): val = { 'input': str(input_uuid), @@ -2057,7 +2057,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, links_qb.append(Data, project=['uuid'], tag='output', edge_filters={'type': {'==': LinkType.CREATE.value}}, - edge_project=['label', 'type'], output_of='input') + edge_project=['label', 'type'], with_incoming='input') for input_uuid, output_uuid, link_label, link_type in links_qb.iterall(): val = { 'input': str(input_uuid), @@ -2075,7 +2075,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, links_qb.append(Data, project=['uuid'], tag='output', edge_filters={'type': {'==': LinkType.RETURN.value}}, - edge_project=['label', 'type'], output_of='input') + edge_project=['label', 'type'], with_incoming='input') for input_uuid, output_uuid, link_label, link_type in links_qb.iterall(): val = { 'input': str(input_uuid), @@ -2094,7 +2094,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, project=['uuid'], tag='output', filters={'id': {'in': all_nodes_pk}}, edge_filters={'type': {'==': LinkType.RETURN.value}}, - edge_project=['label', 'type'], output_of='input') + edge_project=['label', 'type'], with_incoming='input') for input_uuid, output_uuid, link_label, link_type in links_qb.iterall(): val = { 'input': str(input_uuid), @@ -2113,7 +2113,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, links_qb.append(ProcessNode, project=['uuid'], tag='output', edge_filters={'type': {'==': LinkType.CALL.value}}, - edge_project=['label', 'type'], output_of='input') + edge_project=['label', 'type'], with_incoming='input') for input_uuid, output_uuid, link_label, link_type in links_qb.iterall(): val = { 'input': str(input_uuid), @@ -2133,7 +2133,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, project=['uuid'], tag='output', filters={'id': {'in': all_nodes_pk}}, edge_filters={'type': {'==': LinkType.CALL.value}}, - edge_project=['label', 'type'], output_of='input') + edge_project=['label', 'type'], with_incoming='input') for input_uuid, output_uuid, link_label, link_type in links_qb.iterall(): val = { 'input': str(input_uuid), @@ -2156,7 +2156,7 @@ def export_tree(what, folder,allowed_licenses=None, forbidden_licenses=None, filters={'id': {'==': curr_group}}, project=['uuid'], tag='group') group_uuid_qb.append(entity_names_to_entities[NODE_ENTITY_NAME], - project=['uuid'], member_of='group') + project=['uuid'], with_group='group') for res in group_uuid_qb.iterall(): if str(res[0]) in groups_uuid: groups_uuid[str(res[0])].append(str(res[1])) diff --git a/aiida/orm/node/process/calculation/calcjob.py b/aiida/orm/node/process/calculation/calcjob.py index 7e7e261a9b..a05ca4d5c4 100644 --- a/aiida/orm/node/process/calculation/calcjob.py +++ b/aiida/orm/node/process/calculation/calcjob.py @@ -659,6 +659,7 @@ def set_queue_name(self, val): :param str val: the queue name """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) if val is not None: @@ -670,6 +671,7 @@ def set_account(self, val): :param str val: the account name """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) if val is not None: @@ -681,6 +683,7 @@ def set_qos(self, val): :param str val: the quality of service """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) if val is not None: @@ -693,6 +696,7 @@ def set_import_sys_environment(self, val): :param bool val: load the environment if True """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) self._set_attr('import_sys_environment', bool(val)) @@ -704,6 +708,7 @@ def get_import_sys_environment(self): :return: a boolean. If True the system environment will be load. """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) return self.get_attr('import_sys_environment', True) @@ -717,6 +722,7 @@ def set_environment_variables(self, env_vars_dict): In the remote-computer submission script, it's going to export variables as ``export 'keys'='values'`` """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) if not isinstance(env_vars_dict, dict): @@ -738,6 +744,7 @@ def get_environment_variables(self): Return an empty dictionary if no special environment variables have to be set for this calculation. """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) return self.get_attr('custom_environment_variables', {}) @@ -748,6 +755,7 @@ def set_priority(self, val): :param val: the values of priority as accepted by the cluster scheduler. """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) self._set_attr('priority', six.text_type(val)) @@ -758,6 +766,7 @@ def set_max_memory_kb(self, val): :param val: an integer. Default=None """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) self._set_attr('max_memory_kb', int(val)) @@ -768,6 +777,7 @@ def get_max_memory_kb(self): :return: an integer """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) return self.get_attr('max_memory_kb', None) @@ -778,6 +788,7 @@ def set_max_wallclock_seconds(self, val): :param val: An integer. Default=None """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) self._set_attr('max_wallclock_seconds', int(val)) @@ -789,6 +800,7 @@ def get_max_wallclock_seconds(self): :return: an integer :rtype: int """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) return self.get_attr('max_wallclock_seconds', None) @@ -802,6 +814,7 @@ def set_resources(self, resources_dict): (scheduler type can be found with calc.get_computer().get_scheduler_type() ) """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) # Note: for the time being, resources are only validated during the @@ -816,6 +829,7 @@ def set_withmpi(self, val): :param val: A boolean. Default=True """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) self._set_attr('withmpi', val) @@ -826,6 +840,7 @@ def get_withmpi(self): :return: a boolean. Default=True. """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) return self.get_attr('withmpi', True) @@ -839,6 +854,7 @@ def get_resources(self, full=False): :return: a dictionary """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) resources_dict = self.get_attr('jobresource_params', {}) @@ -857,6 +873,7 @@ def get_queue_name(self): :return: a string or None. """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) return self.get_attr('queue_name', None) @@ -867,6 +884,7 @@ def get_account(self): :return: a string or None. """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) return self.get_attr('account', None) @@ -877,6 +895,7 @@ def get_qos(self): :return: a string or None. """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) return self.get_attr('qos', None) @@ -887,6 +906,7 @@ def get_priority(self): :return: a string or None """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) return self.get_attr('priority', None) @@ -897,6 +917,7 @@ def get_prepend_text(self): which is going to be prepended in the scheduler-job script, just before the code execution. """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) return self.get_attr("prepend_text", "") @@ -911,6 +932,7 @@ def set_prepend_text(self, val): :param val: a (possibly multiline) string """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) self._set_attr("prepend_text", six.text_type(val)) @@ -921,6 +943,7 @@ def get_append_text(self): which is going to be appended in the scheduler-job script, just after the code execution. """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) return self.get_attr("append_text", "") @@ -933,6 +956,7 @@ def set_append_text(self, val): :param val: a (possibly multiline) string """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) self._set_attr("append_text", six.text_type(val)) @@ -947,6 +971,7 @@ def set_custom_scheduler_commands(self, val): inserted: with this method, the string is inserted before any non-scheduler command. """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) self._set_attr("custom_scheduler_commands", six.text_type(val)) @@ -961,6 +986,7 @@ def get_custom_scheduler_commands(self): :return: the custom scheduler command, or an empty string if no custom command was defined. """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) return self.get_attr("custom_scheduler_commands", "") @@ -974,6 +1000,7 @@ def get_mpirun_extra_params(self): Return an empty list if no parameters have been defined. """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) return self.get_attr("mpirun_extra_params", []) @@ -988,6 +1015,7 @@ def set_mpirun_extra_params(self, extra_params): :param extra_params: must be a list of strings, one for each extra parameter """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) if extra_params is None: @@ -1014,6 +1042,7 @@ def set_parser_name(self, parser): :param parser: a string identifying the module of the parser. Such module must be located within the folder 'aiida/parsers/plugins' """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) self._set_attr('parser', parser) @@ -1026,6 +1055,7 @@ def get_parser_name(self): :return: a string. """ + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('explicit option getter/setter methods are deprecated, use get_option and set_option', DeprecationWarning) @@ -1487,10 +1517,10 @@ def _list_calculations(cls, qb.append(cls, filters=calculation_filters, tag='calculation') if group_filters is not None: - qb.append(type='group', filters=group_filters, group_of='calculation') + qb.append(type='group', filters=group_filters, with_node='calculation') - qb.append(type='computer', computer_of='calculation', tag='computer') - qb.append(type='user', creator_of="calculation", tag="user") + qb.append(type='computer', with_node='calculation', tag='computer') + qb.append(type='user', with_node="calculation", tag="user") projections_dict = {'calculation': [], 'user': [], 'computer': []} @@ -1698,8 +1728,8 @@ def _get_all_with_state(cls, qb = QueryBuilder() qb.append(type="computer", tag='computer', filters=computerfilter) - qb.append(cls, filters=calcfilter, tag='calc', has_computer='computer') - qb.append(type="user", tag='user', filters=userfilter, creator_of="calc") + qb.append(cls, filters=calcfilter, tag='calc', with_computer='computer') + qb.append(type="user", tag='user', filters=userfilter, with_node="calc") if only_computer_user_pairs: qb.add_projection("computer", "*") @@ -1759,6 +1789,7 @@ def submit(self): import warnings from aiida.work.job_processes import ContinueCalcJob from aiida.work.launch import submit + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn('directly creating and submitting calculations is deprecated, use the {}\nSee:{}'.format( 'ProcessBuilder', DEPRECATION_DOCS_URL), DeprecationWarning) diff --git a/aiida/orm/querybuilder.py b/aiida/orm/querybuilder.py index cbc9a63a90..713c31fd81 100644 --- a/aiida/orm/querybuilder.py +++ b/aiida/orm/querybuilder.py @@ -19,33 +19,29 @@ when instantiated by the user. """ -# Warnings are issued for deprecations: from __future__ import division from __future__ import absolute_import from __future__ import print_function -import warnings # Checking for correct input with the inspect module from inspect import isclass as inspect_isclass import copy import logging import six from six.moves import range, zip - -from aiida.orm.node import Node - -# The SQLAlchemy functionalities: from sqlalchemy import and_, or_, not_, func as sa_func, select, join from sqlalchemy.types import Integer from sqlalchemy.orm import aliased from sqlalchemy.sql.expression import cast from sqlalchemy.dialects.postgresql import array -## AIIDA modules: -# For exception handling -from aiida.common.exceptions import InputValidationError, ConfigurationError + +from aiida.common.exceptions import InputValidationError # The way I get column as a an attribute to the orm class from aiida.common.links import LinkType - -from . import backends +from aiida.orm.node import Node +from aiida.orm import backends +from aiida.orm import computers +from aiida.orm import users +from aiida.orm import authinfos from aiida.orm.utils import convert from . import authinfos @@ -183,8 +179,8 @@ def get_querybuilder_classifiers_from_type(ormclasstype, obj): class QueryBuilder(object): """ - The class to query the AiiDA database. - + The class to query the AiiDA database. + Usage:: from aiida.orm.querybuilder import QueryBuilder @@ -259,7 +255,7 @@ def __init__(self, backend=None, **kwargs): # qb = QueryBuilder() # qb.append(PwCalculation) - # qb.append(StructureData, input_of=PwCalculation) + # qb.append(StructureData, with_outgoing=PwCalculation) # The cls_to_tag_map in this case would be: # {PwCalculation:'PwCalculation', StructureData:'StructureData'} @@ -510,7 +506,7 @@ def append(self, # the structure joined as an input qb.append( cls=PwCalculation, - output_of=StructureData + with_incoming=StructureData ) :return: self @@ -545,27 +541,11 @@ def append(self, if not isinstance(type, six.string_types): raise InputValidationError("{} was passed as type, but is not a string".format(type)) - if kwargs.pop('link_tag', None) is not None: - raise DeprecationWarning("link_tag is deprecated, use edge_tag instead") - if kwargs.pop('autotag', None) is not None: - raise DeprecationWarning("autotag=True is now the default behavior, this keyword is deprecated") ormclass, ormclasstype, query_type_string = self._get_ormclass(cls, type) # TAG ################################# # Let's get a tag - label = kwargs.pop('label', None) - if label is not None: - if tag is None: - warnings.warn( - "Use of the keyword 'label' will be deprecated soon\n" - "Please use 'tag' instead", - DeprecationWarning, - ) - tag = label - else: - raise InputValidationError("Both label and tag specified") - if tag: if self._EDGE_TAG_DELIM in tag: raise InputValidationError("tag cannot contain {}\n" @@ -662,7 +642,11 @@ def append(self, # JOINING ##################################### try: # Get the functions that are implemented: - spec_to_function_map = list(self._get_function_map().keys()) + spec_to_function_map = [] + for secondary_dict in self._get_function_map().values(): + for key in secondary_dict.keys(): + if key not in spec_to_function_map: + spec_to_function_map.append(key) joining_keyword = kwargs.pop('joining_keyword', None) joining_value = kwargs.pop('joining_value', None) @@ -681,9 +665,9 @@ def append(self, else: joining_keyword = key joining_value = self._get_tag_from_specification(val) - # the default is that this vertex is 'output_of' the previous one + # the default is that this vertice is 'with_incoming' as the previous one if joining_keyword is None and len(self._path) > 0: - joining_keyword = 'output_of' + joining_keyword = 'with_incoming' joining_value = self._path[-1]['tag'] if joining_keyword == 'direction': @@ -691,9 +675,9 @@ def append(self, raise InputValidationError("direction=n expects n to be an integer") try: if joining_value < 0: - joining_keyword = 'input_of' + joining_keyword = 'with_outgoing' elif joining_value > 0: - joining_keyword = 'output_of' + joining_keyword = 'with_incoming' else: raise InputValidationError("direction=0 is not valid") joining_value = self._path[-abs(joining_value)]['tag'] @@ -1216,32 +1200,14 @@ def _check_dbentities(entities_cls_joined, entities_cls_to_join, relationship): " - {} as entity joined (expected {})\n" " - {} as entity to join (expected {})\n" "\n".format( - entities_cls_joined[0], - relationship, - entities_cls_to_join[0], - entities_cls_joined[0]._sa_class_manager.class_, - entities_cls_joined[1], - entities_cls_to_join[0]._sa_class_manager.class_, - entities_cls_to_join[1], - )) - - def _join_slaves(self, joined_entity, entity_to_join): - raise NotImplementedError("Master - slave relationships are not implemented") - # ~ call = aliased(Call) - # ~ self._query = self._query.join(call, call.caller_id == joined_entity.id) - # ~ self._query = self._query.join( - # ~ entity_to_join, - # ~ call.called_id == entity_to_join.id - # ~ ) - - def _join_masters(self, joined_entity, entity_to_join): - raise NotImplementedError("Master - slave relationships are not implemented") - # ~ call = aliased(Call) - # ~ self._query = self._query.join(call, call.called_id == joined_entity.id) - # ~ self._query = self._query.join( - # ~ entity_to_join, - # ~ call.caller_id == entity_to_join.id - # ~ ) + entities_cls_joined[0].__name__, + relationship, + entities_cls_to_join[0].__name__, + entities_cls_joined[0]._sa_class_manager.class_.__name__, + entities_cls_joined[1].__name__, + entities_cls_to_join[0]._sa_class_manager.class_.__name__, + entities_cls_to_join[1].__name__, + )) def _join_outputs(self, joined_entity, entity_to_join, isouterjoin): """ @@ -1250,14 +1216,14 @@ def _join_outputs(self, joined_entity, entity_to_join, isouterjoin): **joined_entity** and **entity_to_join** are joined with a link from **joined_entity** as input to **enitity_to_join** as output - (**enitity_to_join** is an *output_of* **joined_entity**) + (**enitity_to_join** is *with_incoming* **joined_entity**) """ - self._check_dbentities((joined_entity, self._impl.Node), (entity_to_join, self._impl.Node), 'output_of') + self._check_dbentities((joined_entity, self._impl.Node), (entity_to_join, self._impl.Node), 'with_incoming') aliased_edge = aliased(self._impl.Link) self._query = self._query.join( aliased_edge, aliased_edge.input_id == joined_entity.id, isouter=isouterjoin).join( - entity_to_join, aliased_edge.output_id == entity_to_join.id, isouter=isouterjoin) + entity_to_join, aliased_edge.output_id == entity_to_join.id, isouter=isouterjoin) return aliased_edge def _join_inputs(self, joined_entity, entity_to_join, isouterjoin): @@ -1267,10 +1233,11 @@ def _join_inputs(self, joined_entity, entity_to_join, isouterjoin): **joined_entity** and **entity_to_join** are joined with a link from **joined_entity** as output to **enitity_to_join** as input - (**enitity_to_join** is an *input_of* **joined_entity**) + (**enitity_to_join** is *with_outgoing* **joined_entity**) + """ - self._check_dbentities((joined_entity, self._impl.Node), (entity_to_join, self._impl.Node), 'input_of') + self._check_dbentities((joined_entity, self._impl.Node), (entity_to_join, self._impl.Node), 'with_outgoing') aliased_edge = aliased(self._impl.Link) self._query = self._query.join( aliased_edge, @@ -1328,9 +1295,9 @@ def _join_descendants_recursive(self, joined_entity, entity_to_join, isouterjoin self._query = self._query.join(descendants_recursive, descendants_recursive.c.ancestor_id == joined_entity.id).join( - entity_to_join, - descendants_recursive.c.descendant_id == entity_to_join.id, - isouter=isouterjoin) + entity_to_join, + descendants_recursive.c.descendant_id == entity_to_join.id, + isouter=isouterjoin) return descendants_recursive.c def _join_ancestors_recursive(self, joined_entity, entity_to_join, isouterjoin, filter_dict, expand_path=False): @@ -1383,9 +1350,9 @@ def _join_ancestors_recursive(self, joined_entity, entity_to_join, isouterjoin, self._query = self._query.join(ancestors_recursive, ancestors_recursive.c.descendant_id == joined_entity.id).join( - entity_to_join, - ancestors_recursive.c.ancestor_id == entity_to_join.id, - isouter=isouterjoin) + entity_to_join, + ancestors_recursive.c.ancestor_id == entity_to_join.id, + isouter=isouterjoin) return ancestors_recursive.c def _join_group_members(self, joined_entity, entity_to_join, isouterjoin): @@ -1399,9 +1366,9 @@ def _join_group_members(self, joined_entity, entity_to_join, isouterjoin): **joined_entity** and **entity_to_join** are joined via the table_groups_nodes table. from **joined_entity** as group to **enitity_to_join** as node. - (**enitity_to_join** is an *member_of* **joined_entity**) + (**enitity_to_join** is *with_group* **joined_entity**) """ - self._check_dbentities((joined_entity, self._impl.Group), (entity_to_join, self._impl.Node), 'member_of') + self._check_dbentities((joined_entity, self._impl.Group), (entity_to_join, self._impl.Node), 'with_group') aliased_group_nodes = aliased(self._impl.table_groups_nodes) self._query = self._query.join(aliased_group_nodes, aliased_group_nodes.c.dbgroup_id == joined_entity.id).join( entity_to_join, entity_to_join.id == aliased_group_nodes.c.dbnode_id, isouter=isouterjoin) @@ -1415,9 +1382,9 @@ def _join_groups(self, joined_entity, entity_to_join, isouterjoin): **joined_entity** and **entity_to_join** are joined via the table_groups_nodes table. from **joined_entity** as node to **enitity_to_join** as group. - (**enitity_to_join** is an *group_of* **joined_entity**) + (**enitity_to_join** is a group *with_node* **joined_entity**) """ - self._check_dbentities((joined_entity, self._impl.Node), (entity_to_join, self._impl.Group), 'group_of') + self._check_dbentities((joined_entity, self._impl.Node), (entity_to_join, self._impl.Group), 'with_node') aliased_group_nodes = aliased(self._impl.table_groups_nodes) self._query = self._query.join(aliased_group_nodes, aliased_group_nodes.c.dbnode_id == joined_entity.id).join( entity_to_join, entity_to_join.id == aliased_group_nodes.c.dbgroup_id, isouter=isouterjoin) @@ -1428,7 +1395,7 @@ def _join_creator_of(self, joined_entity, entity_to_join, isouterjoin): :param joined_entity: the aliased node :param entity_to_join: the aliased user to join to that node """ - self._check_dbentities((joined_entity, self._impl.Node), (entity_to_join, self._impl.User), 'creator_of') + self._check_dbentities((joined_entity, self._impl.Node), (entity_to_join, self._impl.User), 'with_node') self._query = self._query.join(entity_to_join, entity_to_join.id == joined_entity.user_id, isouter=isouterjoin) def _join_created_by(self, joined_entity, entity_to_join, isouterjoin): @@ -1436,7 +1403,7 @@ def _join_created_by(self, joined_entity, entity_to_join, isouterjoin): :param joined_entity: the aliased user you want to join to :param entity_to_join: the (aliased) node or group in the DB to join with """ - self._check_dbentities((joined_entity, self._impl.User), (entity_to_join, self._impl.Node), 'created_by') + self._check_dbentities((joined_entity, self._impl.User), (entity_to_join, self._impl.Node), 'with_user') self._query = self._query.join(entity_to_join, entity_to_join.user_id == joined_entity.id, isouter=isouterjoin) def _join_to_computer_used(self, joined_entity, entity_to_join, isouterjoin): @@ -1445,7 +1412,7 @@ def _join_to_computer_used(self, joined_entity, entity_to_join, isouterjoin): :param entity_to_join: the (aliased) node entity """ - self._check_dbentities((joined_entity, self._impl.Computer), (entity_to_join, self._impl.Node), 'has_computer') + self._check_dbentities((joined_entity, self._impl.Computer), (entity_to_join, self._impl.Node), 'with_computer') self._query = self._query.join( entity_to_join, entity_to_join.dbcomputer_id == joined_entity.id, isouter=isouterjoin) @@ -1454,7 +1421,7 @@ def _join_computer(self, joined_entity, entity_to_join, isouterjoin): :param joined_entity: An entity that can use a computer (eg a node) :param entity_to_join: aliased dbcomputer entity """ - self._check_dbentities((joined_entity, self._impl.Node), (entity_to_join, self._impl.Computer), 'computer_of') + self._check_dbentities((joined_entity, self._impl.Node), (entity_to_join, self._impl.Computer), 'with_node') self._query = self._query.join( entity_to_join, joined_entity.dbcomputer_id == entity_to_join.id, isouter=isouterjoin) @@ -1463,7 +1430,7 @@ def _join_group_user(self, joined_entity, entity_to_join, isouterjoin): :param joined_entity: An aliased dbgroup :param entity_to_join: aliased dbuser """ - self._check_dbentities((joined_entity, self._impl.Group), (entity_to_join, self._impl.User), 'computer_of') + self._check_dbentities((joined_entity, self._impl.Group), (entity_to_join, self._impl.User), 'with_group') self._query = self._query.join(entity_to_join, joined_entity.user_id == entity_to_join.id, isouter=isouterjoin) def _join_user_group(self, joined_entity, entity_to_join, isouterjoin): @@ -1471,63 +1438,70 @@ def _join_user_group(self, joined_entity, entity_to_join, isouterjoin): :param joined_entity: An aliased user :param entity_to_join: aliased group """ - self._check_dbentities((joined_entity, self._impl.User), (entity_to_join, self._impl.Group), 'computer_of') + self._check_dbentities((joined_entity, self._impl.User), (entity_to_join, self._impl.Group), 'with_user') self._query = self._query.join(entity_to_join, joined_entity.id == entity_to_join.user_id, isouter=isouterjoin) def _get_function_map(self): - d = { - 'with_incoming': self._join_outputs, - 'input_of': deprecate(self._join_inputs, xxx), - 'output_of': self._join_outputs, - 'slave_of': self._join_slaves, # not implemented - 'master_of': self._join_masters, # not implemented - 'direction': None, - 'group_of': self._join_groups, - 'member_of': self._join_group_members, - 'has_computer': self._join_to_computer_used, - 'computer_of': self._join_computer, - 'created_by': self._join_created_by, - 'creator_of': self._join_creator_of, - 'owner_of': self._join_group_user, - 'belongs_to': self._join_user_group, - 'ancestor_of': self._join_ancestors_recursive, - 'descendant_of': self._join_descendants_recursive + """ + Map relationship type keywords to functions + The new mapping (since 1.0.0a5) is a two level dictionary. The first level defines the entity which has been passed to + the qb.append functon, and the second defines the relationship with respect to a given tag. + """ + mapping = { + 'node': { + 'with_incoming': self._join_outputs, + 'with_outgoing': self._join_inputs, + 'ancestor_of': self._join_ancestors_recursive, + 'descendant_of': self._join_descendants_recursive, + 'with_computer': self._join_to_computer_used, + 'with_user': self._join_created_by, + 'with_group': self._join_group_members, + 'direction': None, + 'input_of': self._deprecate(self._join_inputs, 'input_of', 'with_outgoing'), + 'output_of': self._deprecate(self._join_outputs, 'output_of', 'with_incoming'), + 'has_computer': self._deprecate(self._join_to_computer_used, 'has_computer', 'with_computer'), + 'created_by': self._deprecate(self._join_created_by, 'created_by', 'with_user'), + 'member_of': self._deprecate(self._join_group_members, 'member_of', 'with_group') + }, + 'computer': { + 'with_node': self._join_computer, + 'direction': None, + 'computer_of': self._deprecate(self._join_computer, 'computer_of', 'with_node') + }, + 'user': { + 'with_node': self._join_creator_of, + 'with_group': self._join_group_user, + 'direction': None, + 'creator_of': self._deprecate(self._join_creator_of, 'creator_of', 'with_node'), + 'owner_of': self._deprecate(self._join_group_user, 'owner_of', 'with_group') + }, + 'group': { + 'with_node': self._join_groups, + 'with_user': self._join_user_group, + 'direction': None, + 'group_of': self._deprecate(self._join_groups, 'group_of', 'with_node'), + 'belongs_to': self._deprecate(self._join_user_group, 'belongs_to', 'with_user') + }, } - return d - def deprecate(f, xxx): - import warning - warning.warn() - return f - + return mapping + def _get_connecting_node(self, index, joining_keyword=None, joining_value=None, **kwargs): """ :param querydict: A dictionary specifying how the current node is linked to other nodes. :param index: Index of this node within the path specification - - Valid (currently implemented) keys are: - - * *input_of* - * *output_of* - * *descendant_of* - * *ancestor_of* - * *direction* - * *group_of* - * *member_of* - * *has_computer* - * *computer_of* - * *created_by* - * *creator_of* - * *owner_of* - * *belongs_to* - - Future: - - * *master_of* - * *slave_of* + :param joining_keyword: the relation on which to join + :param joining_value: the tag of the nodes to be joined """ + from aiida.cmdline.utils import echo + + # Set the calling entity - to allow for the correct join relation to be set + if self._path[index]['type'] not in ['computer', 'user', 'group']: + calling_entity = 'node' + else: + calling_entity = self._path[index]['type'] if joining_keyword == 'direction': if joining_value > 0: @@ -1537,7 +1511,11 @@ def _get_connecting_node(self, index, joining_keyword=None, joining_value=None, else: raise Exception("Direction 0 is not valid") else: - func = self._get_function_map()[joining_keyword] + try: + func = self._get_function_map()[calling_entity][joining_keyword] + except KeyError: + echo.echo_critical("'{}' is not a valid joining keyword for a '{}' type entity".format( + joining_keyword, calling_entity)) if isinstance(joining_value, int): returnval = (self._aliased_path[joining_value], func) @@ -1547,8 +1525,8 @@ def _get_connecting_node(self, index, joining_keyword=None, joining_value=None, except KeyError: raise InputValidationError('Key {} is unknown to the types I know about:\n' '{}'.format( - self._get_tag_from_specification(joining_value), - self._tag_to_alias_map.keys())) + self._get_tag_from_specification(joining_value), + self._tag_to_alias_map.keys())) return returnval def _get_json_compatible(self, inp): @@ -1608,6 +1586,9 @@ def get_json_compatible_queryhelp(self): }) def _build_order(self, alias, entitytag, entityspec): + """ + Build the order parameter of the query + """ column_name = entitytag.split('.')[0] attrpath = entitytag.split('.')[1:] if attrpath and 'cast' not in entityspec.keys(): @@ -1649,7 +1630,7 @@ def _build(self): isouterjoin = verticespec.get('outerjoin') edge_tag = verticespec['edge_tag'] - if (verticespec['joining_keyword'] in ('descendant_of', 'ancestor_of')): + if verticespec['joining_keyword'] in ('descendant_of', 'ancestor_of'): # I treat those two cases in a special way. # I give them a filter_dict, to help the recursive function find a good # starting point. TODO: document this! @@ -2079,16 +2060,6 @@ def dict(self, batch_size=None): """ return list(self.iterdict(batch_size=batch_size)) - def get_results_dict(self): - """ - Deprecated, use :meth:`.dict` instead - """ - warnings.warn( - "get_results_dict will be deprecated in the future" - "User iterdict for generator or dict for list", DeprecationWarning) - - return self.dict() - def inputs(self, **kwargs): """ Join to inputs of previous vertice in path. @@ -2097,7 +2068,7 @@ def inputs(self, **kwargs): """ join_to = self._path[-1]['tag'] cls = kwargs.pop('cls', Node) - self.append(cls=cls, input_of=join_to, autotag=True, **kwargs) + self.append(cls=cls, with_outgoing=join_to, autotag=True, **kwargs) return self def outputs(self, **kwargs): @@ -2108,7 +2079,7 @@ def outputs(self, **kwargs): """ join_to = self._path[-1]['tag'] cls = kwargs.pop('cls', Node) - self.append(cls=cls, output_of=join_to, autotag=True, **kwargs) + self.append(cls=cls, with_incoming=join_to, autotag=True, **kwargs) return self def children(self, **kwargs): @@ -2132,3 +2103,29 @@ def parents(self, **kwargs): cls = kwargs.pop('cls', Node) self.append(cls=cls, ancestor_of=join_to, autotag=True, **kwargs) return self + + def _deprecate(self, function, deprecated_name, preferred_name, version='1.0.0a5'): + """ + Wrapper to return a decorated functon which will print a deprecation warning when + it is called. + Specifically for when an old relationship type is used. + Note that it is the way of calling the function which is deprecated, not the function itself + + :param function: a deprecated function to call + :param deprecated_name: the name which is deprecated + :param preferred_name: the new name which is preferred + :param version: aiida version for which this takes effect. + """ + def wrapper(*args, **kwargs): + """ + Decorator to print a deprecation warning + """ + import warnings + from aiida.common.warnings import AiidaDeprecationWarning + warnings.warn( + "The relationship name '{}' is deprecated from version {} onwards. Use '{}' instead.".format( + deprecated_name, version, preferred_name), + AiidaDeprecationWarning, + stacklevel=2) + return function(*args, **kwargs) + return wrapper diff --git a/aiida/orm/utils/loaders.py b/aiida/orm/utils/loaders.py index 40679d2153..b4a35bd95a 100644 --- a/aiida/orm/utils/loaders.py +++ b/aiida/orm/utils/loaders.py @@ -420,7 +420,7 @@ def _get_query_builder_label_identifier(cls, identifier, classes): qb.append(cls=classes, tag='code', project=['*'], filters={'label': {'==': label}}) if machinename: - qb.append(Computer, filters={'name': {'==': machinename}}, computer_of='code') + qb.append(Computer, filters={'name': {'==': machinename}}, with_node='code') return qb diff --git a/aiida/orm/utils/remote.py b/aiida/orm/utils/remote.py index 09e4316922..8ea14b233d 100644 --- a/aiida/orm/utils/remote.py +++ b/aiida/orm/utils/remote.py @@ -79,8 +79,8 @@ def get_calcjob_remote_paths(pks=None, past_days=None, older_than=None, computer qb = orm.QueryBuilder() qb.append(CalcJobNode, tag='calc', project=['attributes.remote_workdir'], filters=filters_calc) - qb.append(orm.Computer, computer_of='calc', tag='computer', project=['*'], filters=filters_computer) - qb.append(orm.User, creator_of='calc', filters={'email': user.email}) + qb.append(orm.Computer, with_node='calc', tag='computer', project=['*'], filters=filters_computer) + qb.append(orm.User, with_node='calc', filters={'email': user.email}) if qb.count() == 0: return None diff --git a/aiida/restapi/translator/base.py b/aiida/restapi/translator/base.py index 920d209e18..8fbb258e98 100644 --- a/aiida/restapi/translator/base.py +++ b/aiida/restapi/translator/base.py @@ -93,7 +93,7 @@ def __init__(self, Class=None, **kwargs): self._query_help = { "path": [{ "type": self._qb_type, - "label": self.__label__ + "tag": self.__label__ }], "filters": {}, "project": {}, @@ -462,9 +462,9 @@ def get_formatted_result(self, label): results = [res[label] for res in self.qbobj.dict()] # TODO think how to make it less hardcoded - if self._result_type == 'input_of': + if self._result_type == 'with_outgoing': return {'inputs': results} - elif self._result_type == 'output_of': + elif self._result_type == 'with_incoming': return {'outputs': results} return {self.__label__: results} diff --git a/aiida/restapi/translator/node.py b/aiida/restapi/translator/node.py index 310e571c10..77f88d4109 100644 --- a/aiida/restapi/translator/node.py +++ b/aiida/restapi/translator/node.py @@ -145,9 +145,9 @@ def set_query_type(self, if query_type == "default": pass elif query_type == "inputs": - self._result_type = 'input_of' + self._result_type = 'with_outgoing' elif query_type == "outputs": - self._result_type = "output_of" + self._result_type = "with_incoming" elif query_type == "attributes": self._content_type = "attributes" self._alist = alist @@ -174,10 +174,10 @@ def set_query_type(self, raise InputValidationError("invalid result/content value: {}".format(query_type)) ## Add input/output relation to the query help - if self._result_type is not self.__label__: + if self._result_type != self.__label__: self._query_help["path"].append({ "type": "node.Node.", - "label": self._result_type, + "tag": self._result_type, self._result_type: self.__label__ }) @@ -606,7 +606,7 @@ def get_node_shape(ntype): # get all inputs qb_obj = QueryBuilder() qb_obj.append(Node, tag="main", project=['*'], filters=self._id_filter) - qb_obj.append(Node, tag="in", project=['*'], edge_project=['label'], input_of='main') + qb_obj.append(Node, tag="in", project=['*'], edge_project=['label'], with_outgoing='main') if qb_obj.count() > 0: for node_input in qb_obj.iterdict(): @@ -645,7 +645,7 @@ def get_node_shape(ntype): # get all outputs qb_obj = QueryBuilder() qb_obj.append(Node, tag="main", project=['*'], filters=self._id_filter) - qb_obj.append(Node, tag="out", project=['*'], edge_project=['label'], output_of='main') + qb_obj.append(Node, tag="out", project=['*'], edge_project=['label'], with_incoming='main') if qb_obj.count() > 0: for output in qb_obj.iterdict(): node = output['out']['*'] diff --git a/docs/source/conf.py b/docs/source/conf.py index 912c36edd2..3b6b898e6d 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -390,7 +390,7 @@ def setup(app): # Warnings to ignore when using the -n (nitpicky) option # We should ignore any python built-in exception, for instance -nitpick_ignore = [] +nitpick_ignore = [('py:class','Warning'), ('py:class', 'exceptions.Warning')] for line in open('nitpick-exceptions'): if line.strip() == "" or line.startswith("#"): diff --git a/docs/source/developer_guide/developers.rst b/docs/source/developer_guide/developers.rst index 5c87098e47..eb1ec1aa2f 100644 --- a/docs/source/developer_guide/developers.rst +++ b/docs/source/developer_guide/developers.rst @@ -474,13 +474,23 @@ In case a method is renamed or removed, this is the procedure to follow: import warnings - warnings.warn( - "OLDMETHODNAME is deprecated, use NEWMETHODNAME instead", - DeprecationWarning) - - (of course, replace ``OLDMETHODNAME`` and ``NEWMETHODNAME`` with the - correct string, and adapt the strings to the correct content if you are - only removing a function, or just adding a new one). + # If we call this DeprecationWarning, pycharm will properly strike out the function + from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin + warnings.warn("", DeprecationWarning) + + # + + (of course replace the parts between ``< >`` symbols with the + correct strings). + + The advantage of the method above is: + + - pycharm will still show the method crossed out + - Our ``AiidaDeprecationWarning`` does not inherit from ``DeprecationWarning``, so it will not be "hidden" by python + - User can disable our warnings (and only those) by using AiiDA + properties with:: + + verdi devel setproperty warnings.showdeprecations False Changing the config.json structure ++++++++++++++++++++++++++++++++++ diff --git a/docs/source/link_types/index.rst b/docs/source/link_types/index.rst index 2391972eda..3077983e0f 100644 --- a/docs/source/link_types/index.rst +++ b/docs/source/link_types/index.rst @@ -65,7 +65,7 @@ The links can be followed in both possible directions (forward & reverse) using the QueryBuilder. This requires to define additional “names” for each direction of the link, and they are documented at the :doc:`QueryBuilder section <../querying/querybuilder/usage>`. For example, -if there is an **INPUT** link from data D to calculation C, D is the -“input_of” C, or equivalently D is the “output_of” C. Currently, in the -QueryBuilder, input_of and output_of refer to any link type, where C is the -head of the arrow and D is the tail. +if there is an **INPUT** link from data D to calculation C, D is “with_outgoing” C, +or equivalently C is "with_incoming” D. Currently, in the QueryBuilder, with_incoming +and with_outgoing refer to any link type, where C is the head of the arrow and +D is the tail. diff --git a/docs/source/querying/querybuilder/usage.rst b/docs/source/querying/querybuilder/usage.rst index 2973a501db..6ac2417b31 100644 --- a/docs/source/querying/querybuilder/usage.rst +++ b/docs/source/querying/querybuilder/usage.rst @@ -273,7 +273,7 @@ Let's join a node to its output, e.g. StructureData and CalcJobNode (as output): qb = QueryBuilder() qb.append(StructureData, tag='structure') - qb.append(CalcJobNode, output_of='structure') + qb.append(CalcJobNode, with_incoming='structure') In above example we are querying structures and calculations, with the predicate that the calculation is an output of the structure (the same as saying that the structure is an input to the calculation) @@ -285,33 +285,33 @@ to a previous entity by using one of the keywords in the above table and as a value the tag of the vertice that it has a relationship with. There are several relationships that entities in Aiida can have: -+------------------+---------------+------------------+-------------------------------------------------+ -| **Entity from** | **Entity to** | **Relationship** | **Explanation** | -+==================+===============+==================+=================================================+ -| Node | Node | *input_of* | One node as input of another node | -+------------------+---------------+------------------+-------------------------------------------------+ -| Node | Node | *output_of* | One node as output of another node | -+------------------+---------------+------------------+-------------------------------------------------+ -| Node | Node | *ancestor_of* | One node as the ancestor of another node (Path) | -+------------------+---------------+------------------+-------------------------------------------------+ -| Node | Node | *descendant_of* | One node as descendant of another node (Path) | -+------------------+---------------+------------------+-------------------------------------------------+ -| Node | Group | *group_of* | The group of a node | -+------------------+---------------+------------------+-------------------------------------------------+ -| Group | Node | *member_of* | The node is a member of a group | -+------------------+---------------+------------------+-------------------------------------------------+ -| Node | Computer | *computer_of* | The computer of a node | -+------------------+---------------+------------------+-------------------------------------------------+ -| Computer | Node | *has_computer* | The node of a computer | -+------------------+---------------+------------------+-------------------------------------------------+ -| Node | User | *creator_of* | The creator of a node is a user | -+------------------+---------------+------------------+-------------------------------------------------+ -| User | Node | *created_by* | The node was created by a user | -+------------------+---------------+------------------+-------------------------------------------------+ -| User | Group | *belongs_to* | The node was created by a user | -+------------------+---------------+------------------+-------------------------------------------------+ -| Group | User | *owner_of* | The node was created by a user | -+------------------+---------------+------------------+-------------------------------------------------+ ++------------------+---------------+------------------+-----------------------------+-------------------------------------------------+ +| **Entity from** | **Entity to** | **Relationship** | **Deprecated Relationship** | **Explanation** | ++==================+===============+==================+=============================+=================================================+ +| Node | Node | *with_outgoing* | *input_of* | One node as input of another node | ++------------------+---------------+------------------+-----------------------------+-------------------------------------------------+ +| Node | Node | *with_incoming* | *output_of* | One node as output of another node | ++------------------+---------------+------------------+-----------------------------+-------------------------------------------------+ +| Node | Node | *ancestor_of* | | One node as the ancestor of another node (Path) | ++------------------+---------------+------------------+-----------------------------+-------------------------------------------------+ +| Node | Node | *descendant_of* | | One node as descendant of another node (Path) | ++------------------+---------------+------------------+-----------------------------+-------------------------------------------------+ +| Node | Group | *with_node* | *group_of* | The group of a node | ++------------------+---------------+------------------+-----------------------------+-------------------------------------------------+ +| Group | Node | *with_group* | *member_of* | The node is a member of a group | ++------------------+---------------+------------------+-----------------------------+-------------------------------------------------+ +| Node | Computer | *with_node* | *computer_of* | The computer of a node | ++------------------+---------------+------------------+-----------------------------+-------------------------------------------------+ +| Computer | Node | *with_computer* | *has_computer* | The node of a computer | ++------------------+---------------+------------------+-----------------------------+-------------------------------------------------+ +| Node | User | *with_node* | *creator_of* | The creator of a node is a user | ++------------------+---------------+------------------+-----------------------------+-------------------------------------------------+ +| User | Node | *with_user* | *created_by* | The node was created by a user | ++------------------+---------------+------------------+-----------------------------+-------------------------------------------------+ +| User | Group | *with_user* | *belongs_to* | The node was created by a user | ++------------------+---------------+------------------+-----------------------------+-------------------------------------------------+ +| Group | User | *with_group* | *owner_of* | The node was created by a user | ++------------------+---------------+------------------+-----------------------------+-------------------------------------------------+ Some more examples:: @@ -319,18 +319,18 @@ Some more examples:: # StructureData as an input of a job calculation qb = QueryBuilder() qb.append(CalcJobNode, tag='calc') - qb.append(StructureData, input_of='calc') + qb.append(StructureData, with_outgoing='calc') # StructureData and ParameterData as inputs to a calculation qb = QueryBuilder() qb.append(CalcJobNode, tag='calc') - qb.append(StructureData, input_of='calc') - qb.append(ParameterDataData, input_of='calc') + qb.append(StructureData, with_outgoing='calc') + qb.append(ParameterDataData, with_outgoing='calc') # Filtering the remote data instance by the computer it ran on (name) qb = QueryBuilder() qb.append(RemoteData, tag='remote') - qb.append(Computer, computer_of='remote', filters={'name':{'==':'mycomputer'}}) + qb.append(Computer, with_node='remote', filters={'name':{'==':'mycomputer'}}) # Find all descendants of a structure with a certain uuid qb = QueryBuilder() @@ -524,7 +524,7 @@ for the wavefunctions has a value above 30.0 Ry:: qb.append(PwCalculation, project=['*'], tag='calc') qb.append( ParameterData, - input_of='calc', + with_outgoing='calc', filters={'attributes.SYSTEM.ecutwfc':{'>':30.0}}, project=[ 'attributes.SYSTEM.ecutwfc', @@ -567,14 +567,14 @@ You need to tell the QueryBuilder that:: ) qb.append( PwCalculation, - output_of='strucure', + with_incoming='strucure', project=['*'], tag='calc' ) qb.append( ParameterData, filters={'attributes.SYSTEM.ecutwfc':{'>':30.0}}, - input_of='calc', + with_outgoing='calc', tag='params' ) @@ -587,7 +587,7 @@ Cheats A few cheats to save some typing: * The default edge specification, if no keyword is provided, is always - *output_of* the previous vertice. + *with_incoming* the previous vertice. * Equality filters ('==') can be shortened, as will be shown below. * Tags are not necessary, you can simply use the class as a label. This works as long as the same Aiida-class is not used again @@ -606,7 +606,7 @@ A shorter version of the previous example:: qb.append( ParameterData, filters={'attributes.SYSTEM.ecutwfc':{'>':30.0}}, - input_of=PwCalculation + with_outgoing=PwCalculation ) @@ -722,8 +722,7 @@ Working with edges Another feature that had to be added are projections, filters and labels on the edges of the graphs, that is to say links or paths between nodes. It works the same way, just that the keyword is preceeded by '*link*'. -Let's take the above example, but put a filter on the label of the link, -project the label and label:: +Let's take the above example, but put a filter on the label of the link and project the link label:: qb = QueryBuilder() qb.append( @@ -864,16 +863,16 @@ What do you have to specify: of the path with tag "struc1":: edge_specification = queryhelp['path'][3] - edge_specification['output_of'] = 2 - edge_specification['output_of'] = StructureData - edge_specification['output_of'] = 'struc1' - edge_specification['input_of'] = 2 - edge_specification['input_of'] = StructureData - edge_specification['input_of'] = 'struc1' + edge_specification['with_incoming'] = 2 + edge_specification['with_incoming'] = StructureData + edge_specification['with_incoming'] = 'struc1' + edge_specification['with_outgoing'] = 2 + edge_specification['with_outgoing'] = StructureData + edge_specification['with_outgoing'] = 'struc1' * queryhelp_item['direction'] = integer - If any of the above specs ("input_of", "output_of") + If any of the above specs ("with_outgoing", "with_incoming") were not specified, the key "direction" is looked for. Directions are defined as distances in the tree. 1 is defined as one step down the tree along a link. @@ -915,7 +914,7 @@ What do you have to specify: }, { 'cls':ParameterData, - 'input_of':PwCalculation + 'with_outgoing':PwCalculation } ] } @@ -1035,9 +1034,9 @@ What do you have to specify: .. ~ ParameterData, .. ~ {'cls':PwCalculation, 'tag':'md'}, .. ~ {'cls':Trajectory}, -.. ~ {'cls':StructureData, 'input_of':'md'}, -.. ~ {'cls':Relax, 'input_of':StructureData}, -.. ~ {'cls':StructureData,'tag':'struc2','input_of':Relax} +.. ~ {'cls':StructureData, 'with_outgoing':'md'}, +.. ~ {'cls':Relax, 'with_outgoing':StructureData}, +.. ~ {'cls':StructureData,'tag':'struc2','with_outgoing':Relax} .. ~ ], .. ~ 'project':{ .. ~ ParameterData:{'attributes.IONS.tempw':{'cast':'f'}}, @@ -1119,14 +1118,14 @@ Let's take an example that we had and add a few filters on the link:: } } -Notice that the label for the link, by default, is the labels of the two connecting +Notice that the tag for the link, by default, is the tag of the two connecting nodes delimited by two dashes '--'. The order does not matter, the following queryhelp would results in the same query:: queryhelp = { 'path':[ - {'cls':Relax, 'label':'relax'}, # Relaxation with structure as output - {'cls':StructureData, 'label':'structure'} + {'cls':Relax, 'tag':'relax'}, # Relaxation with structure as output + {'cls':StructureData, 'tag':'structure'} ], 'filters':{ 'structure':{ @@ -1145,8 +1144,8 @@ The order does not matter, the following queryhelp would results in the same que } } -If you dislike that way to label the link, you can choose the linklabel in the -path when definining the entity to join:: +If you dislike that way to tag the link, you can choose the tag for the edge in the +path when definining the entity to join using ``edge_tag``:: queryhelp = { 'path':[ @@ -1154,7 +1153,7 @@ path when definining the entity to join:: { 'cls':StructureData, 'label':'structure', - 'edge_tag':'ThisIsMyLinkLabel' # Definining the linklabel + 'edge_tag':'ThisIsMyLinkTag' # Definining the link tag } ], 'filters':{ @@ -1162,13 +1161,13 @@ path when definining the entity to join:: 'time':{'>': t}, 'id':{'>': 50} }, - 'ThisIsMyLinkLabel':{ # Using this linklabel + 'ThisIsMyLinkTag':{ # Using this link tag 'time':{'>': t}, 'label':{'like':'output_%'}, } }, 'project':{ - 'ThisIsMyLinkLabel':['label'], + 'ThisIsMyLinkTag':['label'], 'structure':['label'], 'relax':['label', 'state'], }