Skip to content

Commit

Permalink
use nowf for delta calculations (#188)
Browse files Browse the repository at this point in the history
* use nowf for delta calculations

* nowf also for cohesive energy workflow

* nowf also for pressure workflow

* Remove invalid caching modification

* clear caching for the ph calculation
  • Loading branch information
unkcpz authored May 4, 2023
1 parent 1620eac commit 875e05c
Show file tree
Hide file tree
Showing 9 changed files with 118 additions and 21 deletions.
4 changes: 2 additions & 2 deletions aiida_sssp_workflow/protocol/delta.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ opsp:
scale_count: 7
scale_increment: 0.02
configurations:
- XO2
- SC
- XO
- BCC

opsp-full:
name: opsp-full
Expand Down
18 changes: 11 additions & 7 deletions aiida_sssp_workflow/workflows/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,15 +38,19 @@ def on_terminated(self):
cleaned_calcs = operate_calcjobs(
self.node, operator=clean_workdir, all_same_nodes=False
)
cache_invalid_calcs = operate_calcjobs(
self.node, operator=invalid_cache, all_same_nodes=False
)

if cleaned_calcs:
self.report(
f"cleaned remote folders of calculations: {' '.join(map(str, cleaned_calcs))}"
)
if cache_invalid_calcs:
self.report(
f"Invalid cache of cached calculations: {' '.join(map(str, cache_invalid_calcs))}"
)

# This is not turned on since it will make all finished workflow not cacheable.
# I need to find a way to properly work around the parent_folder empty issue.
# cache_invalid_calcs = operate_calcjobs(
# self.node, operator=invalid_cache, all_same_nodes=False
# )

# if cache_invalid_calcs:
# self.report(
# f"Invalid cache of cached calculations: {' '.join(map(str, cache_invalid_calcs))}"
# )
8 changes: 8 additions & 0 deletions aiida_sssp_workflow/workflows/convergence/caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,14 @@ def define(cls, spec):
help="If `True`, work directories of all called calculation will be cleaned at the end of execution.",
)

def init_setup(self):
super().init_setup()
self.ctx.extra_pw_parameters = {
"CONTROL": {
"disk_io": "nowf", # no wavefunction file
},
}

def inspect_wfc_convergence_test(self):
"""Override this step to do nothing to parse wavefunction
cutoff test results but only run it."""
Expand Down
18 changes: 14 additions & 4 deletions aiida_sssp_workflow/workflows/convergence/cohesive_energy.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,13 +48,21 @@ class ConvergenceCohesiveEnergyWorkChain(_BaseConvergenceWorkChain):

def init_setup(self):
super().init_setup()
self.ctx.extra_pw_parameters = {}
self.ctx.extra_pw_parameters_for_atom = {}
self.ctx.extra_pw_parameters = {
"CONTROL": {
"disk_io": "nowf", # no wavefunction file
},
}
self.ctx.extra_pw_parameters_for_atom = {
"CONTROL": {
"disk_io": "nowf", # no wavefunction file
},
}

def extra_setup_for_magnetic_element(self):
"""Extra setup for magnetic element, for atom especially"""
super().extra_setup_for_magnetic_element()
self.ctx.extra_pw_parameters_for_atom = {
extra_pw_parameters_for_atom_magnetic_element = {
self.ctx.element: {
"SYSTEM": {
"nspin": 2,
Expand All @@ -69,11 +77,12 @@ def extra_setup_for_magnetic_element(self):
},
}
}
self.ctx.extra_pw_parameters_for_atom = update_dict(extra_pw_parameters_for_atom_magnetic_element, self.ctx.extra_pw_parameters_for_atom)

def extra_setup_for_rare_earth_element(self):
"""Extra setup for rare earth element, for atom especially"""
super().extra_setup_for_rare_earth_element()
self.ctx.extra_pw_parameters_for_atom = {
extra_pw_parameters_for_atom_rare_earth_element = {
self.ctx.element: {
"SYSTEM": {
"nspin": 2,
Expand All @@ -91,6 +100,7 @@ def extra_setup_for_rare_earth_element(self):
},
},
}
self.ctx.extra_pw_parameters_for_atom = update_dict(extra_pw_parameters_for_atom_rare_earth_element, self.ctx.extra_pw_parameters_for_atom)

def setup_code_parameters_from_protocol(self):
"""Input validation"""
Expand Down
6 changes: 5 additions & 1 deletion aiida_sssp_workflow/workflows/convergence/delta.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,11 @@ class ConvergenceDeltaWorkChain(_BaseConvergenceWorkChain):

def init_setup(self):
super().init_setup()
self.ctx.extra_pw_parameters = {}
self.ctx.extra_pw_parameters = {
"CONTROL": {
"disk_io": "nowf", # no wavefunction file
},
}

def extra_setup_for_rare_earth_element(self):
super().extra_setup_for_rare_earth_element()
Expand Down
6 changes: 6 additions & 0 deletions aiida_sssp_workflow/workflows/convergence/pressure.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,11 @@ class ConvergencePressureWorkChain(_BaseConvergenceWorkChain):
def init_setup(self):
super().init_setup()
self.ctx.pw_parameters = {}
self.ctx.extra_pw_parameters = {
"CONTROL": {
"disk_io": "nowf", # no wavefunction file
},
}

def setup_code_parameters_from_protocol(self):
"""Input validation"""
Expand Down Expand Up @@ -200,6 +205,7 @@ def run_reference(self):
},
"parallelization": orm.Dict(dict=self.ctx.parallelization),
},
"clean_workchain": self.inputs.clean_workchain,
}

running = self.submit(_EquationOfStateWorkChain, **inputs)
Expand Down
24 changes: 17 additions & 7 deletions aiida_sssp_workflow/workflows/evaluate/_phonon_frequencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,19 +76,29 @@ def inspect_scf(self):
try:
remote_folder = self.ctx.scf_remote_folder = workchain.outputs.remote_folder

if not remote_folder.is_empty:
# when the remote_folder is not empty we regard it is ready for ph
# even if the subsequent ph is successful
self.ctx.not_ready_for_ph = False
else:
# set all same node to caching off and rerun
if remote_folder.is_empty:
# set all same node to caching off and re-run scf calculation
pw_node = [
c for c in workchain.called if isinstance(c, orm.CalcJobNode)
][0]
all_same_nodes = pw_node.get_all_same_nodes()
all_same_nodes = pw_node.base.caching.get_all_same_nodes()
for node in all_same_nodes:
node.is_valid_cache = False

# also set valid_cache=False for the source node
# It should be included in all_same_nodes, but because of the bug in aiida-core
# that the hash is not stable see: https://github.com/aiidateam/aiida-core/issues/5997
src_node = orm.load_node(pw_node.base.caching.get_cache_source())
src_node.is_valid_cache = False
all_same_nodes = src_node.base.caching.get_all_same_nodes()
for node in all_same_nodes:
node.is_valid_cache = False
else:
# when the remote_folder is not empty we regard it is ready for ph
# This has a potential problem that even the subsequent ph calculation is
# finished, it will be re-run since the remote_folder is changed.
self.ctx.not_ready_for_ph = False

except NotExistentAttributeError:
# set condition to False to break loop
self.ctx.not_ready_for_ph = False
Expand Down
6 changes: 6 additions & 0 deletions aiida_sssp_workflow/workflows/measure/delta.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,6 +211,9 @@ def setup_pw_parameters_from_protocol(self):
self.ctx.scale_count = self._SCALE_COUNT = protocol["scale_count"]
self.ctx.scale_increment = self._SCALE_INCREMENT = protocol["scale_increment"]

# Set the hardcoded parameters
_disk_io = "nowf"

# narrow the configuration list by protocol
# this is used for test protocol which only has limited configurations to be verified
clist = protocol.get("configurations", self.ctx.configuration_list)
Expand All @@ -224,6 +227,9 @@ def setup_pw_parameters_from_protocol(self):
self._ECUTWFC = cutoff_control["max_wfc"]

parameters = {
"CONTROL": {
"disk_io": _disk_io,
},
"SYSTEM": {
"degauss": self._DEGAUSS,
"occupations": self._OCCUPATIONS,
Expand Down
49 changes: 49 additions & 0 deletions examples/exam_pw.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# from aiida.engine import run, submit
# from aiida import orm
# from aiida_quantumespresso.workflows.pw.base import PwBaseWorkChain

# inputs = {
# "metadata": {"call_link_label": "SCF_for_cache"},
# "pw": {
# "structure": orm.load_node(369401),
# "code": orm.load_node(1),
# "pseudos": {
# "H": orm.load_node(369335),
# },
# "parameters": orm.Dict(dict={
# "CONTROL": {
# "calculation": "scf",
# },
# "ELECTRONS": {
# "conv_thr": 1e-05
# },
# "SYSTEM": {
# "degauss": 0.01,
# "ecutrho": 120,
# "ecutwfc": 30.1,
# "occupations": "smearing",
# "smearing": "cold"
# }
# }),
# "parallelization": orm.Dict(dict={
# "npool": 1,
# }),
# "metadata": {
# "options": {
# "resources": {
# "num_machines": 1,
# "num_mpiprocs_per_machine": 2,
# },
# "max_wallclock_seconds": 1800,
# "withmpi": True,
# },
# },
# },
# "kpoints_distance": orm.Float(0.1),
# }

# # submit(PwBaseWorkChain, **inputs)
# run(PwBaseWorkChain, **inputs)

n = load_node(370105)
n._get_objects_to_hash()

0 comments on commit 875e05c

Please sign in to comment.