Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Removing the unnecessary inputs due to the split between the base and raw parsers and calculations #82

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 26 additions & 48 deletions aiida_lammps/calculations/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,12 +47,6 @@ class LammpsBaseCalculation(CalcJob):
@classmethod
def define(cls, spec):
super().define(spec)
spec.input(
"script",
valid_type=orm.SinglefileData,
required=False,
help="Complete input script to use. If specified, `structure`, `potential` and `parameters` are ignored.",
)
spec.input(
"structure",
valid_type=orm.StructureData,
Expand Down Expand Up @@ -126,7 +120,6 @@ def define(cls, spec):
default=cls._DEFAULT_RESTART_FILENAME,
)
spec.inputs["metadata"]["options"]["parser_name"].default = cls._DEFAULT_PARSER
spec.inputs.validator = cls.validate_inputs

spec.output(
"results",
Expand Down Expand Up @@ -213,17 +206,6 @@ def define(cls, spec):
message="error parsing the final variable file has failed.",
)

@classmethod
def validate_inputs(cls, value, ctx):
"""Validate the top-level inputs namespace."""
if "script" not in value and any(
key not in value for key in ("structure", "potential", "parameters")
):
return (
"Unless `script` is specified the inputs `structure`, `potential` and "
"`parameters` have to be specified."
)

@classmethod
def validate_settings(cls, value, ctx):
"""Validate the ``settings`` input."""
Expand Down Expand Up @@ -299,38 +281,34 @@ def prepare_for_submission(self, folder):
retrieve_list += restart_data.get("retrieve_list", [])
retrieve_temporary_list += restart_data.get("retrieve_temporary_list", [])

if "script" in self.inputs:
input_filecontent = self.inputs.script.get_content()
else:
# Generate the content of the structure file based on the input
# structure
structure_filecontent, _ = generate_lammps_structure(
self.inputs.structure,
self.inputs.potential.atom_style,
)

# Generate the content of the structure file based on the input
# structure
structure_filecontent, _ = generate_lammps_structure(
self.inputs.structure,
self.inputs.potential.atom_style,
)
# Get the name of the structure file and write it to the remote folder
_structure_filename = self.inputs.metadata.options.structure_filename

# Get the name of the structure file and write it to the remote folder
_structure_filename = self.inputs.metadata.options.structure_filename

with folder.open(_structure_filename, "w") as handle:
handle.write(structure_filecontent)

# Write the potential to the remote folder
with folder.open(self._DEFAULT_POTENTIAL_FILENAME, "w") as handle:
handle.write(self.inputs.potential.get_content())

# Write the input file content. This function will also check the
# sanity of the passed parameters when comparing it to a schema
input_filecontent = generate_input_file(
potential=self.inputs.potential,
structure=self.inputs.structure,
parameters=_parameters,
restart_filename=_restart_filename,
trajectory_filename=_trajectory_filename,
variables_filename=_variables_filename,
read_restart_filename=_read_restart_filename,
)
with folder.open(_structure_filename, "w") as handle:
handle.write(structure_filecontent)

# Write the potential to the remote folder
with folder.open(self._DEFAULT_POTENTIAL_FILENAME, "w") as handle:
handle.write(self.inputs.potential.get_content())

# Write the input file content. This function will also check the
# sanity of the passed parameters when comparing it to a schema
input_filecontent = generate_input_file(
potential=self.inputs.potential,
structure=self.inputs.structure,
parameters=_parameters,
restart_filename=_restart_filename,
trajectory_filename=_trajectory_filename,
variables_filename=_variables_filename,
read_restart_filename=_read_restart_filename,
)

# Get the name of the input file, and write it to the remote folder
_input_filename = self.inputs.metadata.options.input_filename
Expand Down
6 changes: 2 additions & 4 deletions aiida_lammps/parsers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,8 +92,7 @@ def parse(self, **kwargs):
final_variables = None
variables_filename = self.node.get_option("variables_filename")
if variables_filename not in list_of_files:
if "script" not in self.node.inputs:
return self.exit_codes.ERROR_FINAL_VARIABLE_FILE_MISSING
return self.exit_codes.ERROR_FINAL_VARIABLE_FILE_MISSING
else:
final_variables = parse_final_data(
file_contents=self.node.outputs.retrieved.base.repository.get_object_content(
Expand All @@ -120,8 +119,7 @@ def parse(self, **kwargs):
# check trajectory file
trajectory_filename = self.node.get_option("trajectory_filename")
if trajectory_filename not in list_of_files:
if "script" not in self.node.inputs:
return self.exit_codes.ERROR_TRAJECTORY_FILE_MISSING
return self.exit_codes.ERROR_TRAJECTORY_FILE_MISSING
else:
with self.node.outputs.retrieved.base.repository.open(
trajectory_filename
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ Setting up Verlet run ...
Current step : 0
Time step : 0.005
Per MPI rank memory allocation (min/avg/max) = 2.039 | 2.065 | 2.082 Mbytes
Step Temp E_pair E_mol TotEng Press
0 1.44 -6.7733681 0 -4.6133724 -5.0196717
1000 0.70380068 -5.6761498 0 -4.6204508 0.70370101
Step Temp E_pair E_mol TotEng Press
0 1.44 -6.7733681 0 -4.6133724 -5.0196717
1000 0.70380068 -5.6761498 0 -4.6204508 0.70370101
Loop time of 3.07792 on 120 procs for 1000 steps with 500000 atoms

Performance: 140354.357 tau/day, 324.894 timesteps/s, 162.447 Matom-step/s
Expand Down
14 changes: 11 additions & 3 deletions tests/parsers/test_raw.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
"""Tests for the :mod:`aiida_lammps.parsers.raw` module."""
# pylint: disable=redefined-outer-name
from aiida.orm import SinglefileData
from aiida.plugins import ParserFactory

from aiida_lammps.calculations.raw import LammpsRawCalculation


def test_default(generate_calc_job_node, data_regression):
"""Test parsing a default output case."""
Expand All @@ -15,3 +12,14 @@ def test_default(generate_calc_job_node, data_regression):
assert calcfunction.is_finished, calcfunction.exception
assert calcfunction.is_finished_ok, calcfunction.exit_message
data_regression.check({"results": results["results"].get_dict()})


def test_alt_timing_info(generate_calc_job_node, data_regression):
"""Test parsing an alt output case."""
node = generate_calc_job_node("lammps.raw", "alt")
parser = ParserFactory("lammps.raw")
results, calcfunction = parser.parse_from_node(node, store_provenance=False)

assert calcfunction.is_finished, calcfunction.exception
assert calcfunction.is_finished_ok, calcfunction.exit_message
data_regression.check({"results": results["results"].get_dict()})
15 changes: 15 additions & 0 deletions tests/parsers/test_raw/test_alt_timing_info.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
results:
compute_variables:
bin: kk/device
bins:
- 60
- 60
- 60
binsize: 1.4
ghost_atom_cutoff: 2.8
master_list_distance_cutoff: 2.8
max_neighbors_atom: 2000
steps_per_second: 324.894
total_wall_time: 0:00:03
total_wall_time_seconds: 3
units_style: lattice
47 changes: 10 additions & 37 deletions tests/test_calculations.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,40 +137,6 @@ def test_lammps_base(
assert sub_value == _step_data[sub_key], _msg


def test_lammps_base_script(generate_calc_job, aiida_local_code_factory):
"""Test the ``LammpsBaseCalculation`` with the ``script`` input."""
from aiida_lammps.calculations.base import LammpsBaseCalculation

inputs = {
"code": aiida_local_code_factory("lammps.base", "bash"),
"metadata": {"options": {"resources": {"num_machines": 1}}},
}

with pytest.raises(
ValueError,
match=r"Unless `script` is specified the inputs .* have to be specified.",
):
generate_calc_job("lammps.base", inputs)

content = textwrap.dedent(
"""
"velocity all create 1.44 87287 loop geom
"pair_style lj/cut 2.5
"pair_coeff 1 1 1.0 1.0 2.5
"neighbor 0.3 bin
"neigh_modify delay 0 every 20 check no
"fix 1 all nve
"run 10000
"""
)
stream = io.StringIO(content)
script = orm.SinglefileData(stream)

inputs["script"] = script
tmp_path, calc_info = generate_calc_job("lammps.base", inputs)
assert (tmp_path / LammpsBaseCalculation._INPUT_FILENAME).read_text() == content


@pytest.mark.lammps_call
@pytest.mark.parametrize(
"parameters,restart_parameters",
Expand Down Expand Up @@ -342,13 +308,20 @@ def test_lammps_base_settings_invalid(generate_calc_job, aiida_local_code_factor
generate_calc_job("lammps.base", inputs)


def test_lammps_base_settings(generate_calc_job, aiida_local_code_factory):
def test_lammps_base_settings(
generate_calc_job,
aiida_local_code_factory,
minimize_parameters,
get_potential_fe_eam,
generate_structure,
):
"""Test the ``LammpsBaseCalculation`` with the ``settings`` input."""
from aiida_lammps.calculations.base import LammpsBaseCalculation

inputs = {
"code": aiida_local_code_factory("lammps.base", "bash"),
"script": orm.SinglefileData(io.StringIO("")),
"parameters": orm.Dict(minimize_parameters),
"potential": get_potential_fe_eam,
"structure": generate_structure,
"settings": orm.Dict({"additional_cmdline_params": ["--option", "value"]}),
"metadata": {"options": {"resources": {"num_machines": 1}}},
}
Expand Down
70 changes: 0 additions & 70 deletions tests/test_parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,76 +47,6 @@ def get_traj_force():
)


def test_lammps_base(db_test_app, data_regression):
"""Check if the log file is produced during calculation."""
filename = os.path.join(
TEST_DIR,
"input_files",
"parsers",
"log.lammps",
)
retrieved = FolderData()
retrieved.base.repository.put_object_from_file(filename, "log.lammps")
retrieved.base.repository.put_object_from_filelike(
io.StringIO(""), "_scheduler-stdout.txt"
)
retrieved.base.repository.put_object_from_filelike(
io.StringIO(""), "_scheduler-stderr.txt"
)
inputs = {"script": SinglefileData(io.StringIO(""))}
calc_node = db_test_app.generate_calcjob_node(
"lammps.base", retrieved, inputs=inputs
)
parser = ParserFactory("lammps.base")
with db_test_app.sandbox_folder() as temp_path:
(
results,
calcfunction,
) = parser.parse_from_node( # pylint: disable=unused-variable
calc_node,
retrieved_temporary_folder=temp_path.abspath,
)

assert calcfunction.is_finished_ok
assert "results" in calcfunction.outputs
data_regression.check({"results": calcfunction.outputs.results.get_dict()})


def test_lammps_base_timing_info(db_test_app, data_regression):
"""Test that the timing performance is properly parsed."""
filename = os.path.join(
TEST_DIR,
"input_files",
"parsers",
"log_alt.lammps",
)
retrieved = FolderData()
retrieved.base.repository.put_object_from_file(filename, "log.lammps")
retrieved.base.repository.put_object_from_filelike(
io.StringIO(""), "_scheduler-stdout.txt"
)
retrieved.base.repository.put_object_from_filelike(
io.StringIO(""), "_scheduler-stderr.txt"
)
inputs = {"script": SinglefileData(io.StringIO(""))}
calc_node = db_test_app.generate_calcjob_node(
"lammps.base", retrieved, inputs=inputs
)
parser = ParserFactory("lammps.base")
with db_test_app.sandbox_folder() as temp_path:
(
results,
calcfunction,
) = parser.parse_from_node( # pylint: disable=unused-variable
calc_node,
retrieved_temporary_folder=temp_path.abspath,
)

assert calcfunction.is_finished_ok
assert "results" in calcfunction.outputs
data_regression.check({"results": calcfunction.outputs.results.get_dict()})


def test_parser_log(data_regression):
"""
Test the parser for the ``log.lammps`` file.
Expand Down