Skip to content

Commit

Permalink
Fixing the problem and cleaning the test
Browse files Browse the repository at this point in the history
  • Loading branch information
szoupanos committed Nov 28, 2017
1 parent 151eda0 commit a01bcbe
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 41 deletions.
11 changes: 4 additions & 7 deletions aiida/backends/sqlalchemy/models/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,18 +88,14 @@ def set_script_md5(self, md5):
self.script_md5 = md5
self.save()

# def add_data(self, dict, d_type):
# for k in dict.keys():
# p, create = self.data.get_or_create(name=k, data_type=d_type)
# p.set_value(dict[k])

def add_data(self, dict, d_type):
for k in dict.keys():
p, create = self._get_or_create_data(name=k, data_type=d_type)
p.set_value(dict[k])

def _get_or_create_data(self, name, data_type):
match_data = {name: _ for _ in self.data if _.name == name}
match_data = {name: _ for _ in self.data if _.name == name
and _.data_type == data_type}

if not match_data: # create case
dbdata = DbWorkflowData(parent_id=self.id, name=name, data_type=data_type)
Expand Down Expand Up @@ -250,7 +246,8 @@ def set_value(self, arg):
try:
if isinstance(arg, Node) or issubclass(arg.__class__, Node):
if arg.pk is None:
raise ValueError("Cannot add an unstored node as an attribute of a Workflow!")
raise ValueError("Cannot add an unstored node as an "
"attribute of a Workflow!")
sess = get_scoped_session()
self.aiida_obj = sess.merge(arg.dbnode, load=True)
self.value_type = wf_data_value_types.AIIDA
Expand Down
47 changes: 13 additions & 34 deletions aiida/backends/tests/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,56 +242,35 @@ def test_failing_calc_in_wf(self):
handler.setLevel(original_level)

def test_result_parameter_name_colision(self):
"""
This test checks that the the workflow parameters and results do not
collide. This was a problem in SQLA (Issue #960) but a test for both
backends is added (for completeness).
"""
# Creating a simple workflow & storing it
wf = WFTestEmpty()
wf.store()

params = {u'band_calculation_set': {},
u'band_group_name': u'bands_finished_pw_kpts_dist_0p2_smear_cold_psfam_SSSPv0p7effPBE_volthr_0p01_calc_vc-relax_deg_0p02',
u'band_input': {
u'automatic_parallelization': {u'max_num_machines': 1,
u'max_wall_time_seconds': 86400},
u'distance_kpoints_in_dispersion': 0.01},
u'band_parameters_update': {
u'ELECTRONS': {u'conv_thr': 4e-09,
u'diagonalization': u'cg'}},
u'calculation_set': {},
u'codename': u'pw-5.2.0',
u'group_name': u'pw_finished_pw_kpts_dist_0p2_smear_cold_psfam_SSSPv0p7effPBE_volthr_0p01_calc_vc-relax_deg_0p02',
u'input': {
u'automatic_parallelization': {u'max_num_machines': 1,
u'max_wall_time_seconds': 86400,
u'target_time_seconds': 28800},
u'clean_workdir': True,
u'distance_kpoints_in_mesh': 0.2,
u'finish_with_scf': False,
u'relaxation_scheme': u'vc-relax',
u'volume_convergence_threshold': 0.01},
u'parameters': {u'ELECTRONS': {u'conv_thr': 8e-11,
u'electron_maxstep': 100},
u'SYSTEM': {u'degauss': 0.02,
u'ecutrho': 840.0,
u'ecutwfc': 70.0,
u'occupations': u'smearing',
u'smearing': u'cold'}},
u'pseudo_family': u'SSSP_v0.7_eff_PBE'}

# Set some parameters
params = {'band_calculation_set': 2,
'codename': 'pw-5.2.0',
'pseudo_family': 'SSSP_v0.7_eff_PBE'}
wf.set_params(params)

# Add some results that their names collide with the parameter names
wf.add_result('structure', 'test_string_1')
wf.add_result('codename', 'test_string_2')

print "wf.get_parameters() ===> ", wf.get_parameters()
print "wf.get_results() ===> ", wf.get_results()

self.assertDictContainsSubset(
# Check that we have the correct results
self.assertDictEqual(
{'structure': 'test_string_1', 'codename': 'test_string_2'},
wf.get_results(), "The workflow results are not the expected "
"ones.")

# Check that we have the correct parameters
self.assertDictEqual(params, wf.get_parameters(),
"The workflow parameters are not the expected "
"ones.")

def tearDown(self):
"""
Expand Down

0 comments on commit a01bcbe

Please sign in to comment.