From f97978a758dfff07829dfc6d216c9e47659c9852 Mon Sep 17 00:00:00 2001 From: Jusong Yu Date: Wed, 26 Jun 2024 18:40:37 +0200 Subject: [PATCH] WIP full verifciation --- pyproject.toml | 2 +- src/aiida_sssp_workflow/utils/__init__.py | 56 +++ .../workflows/convergence/_base.py | 16 +- .../workflows/convergence/bands.py | 2 +- .../workflows/convergence/caching.py | 2 +- .../workflows/convergence/cohesive_energy.py | 2 +- .../workflows/convergence/eos.py | 2 +- .../convergence/phonon_frequencies.py | 2 +- .../workflows/convergence/pressure.py | 4 +- .../workflows/verification.py | 344 +++++++----------- tests/conftest.py | 118 +----- tests/workflows/test_verification.py | 26 +- .../test_default_builder.yml | 142 ++++++++ 13 files changed, 366 insertions(+), 352 deletions(-) create mode 100644 tests/workflows/test_verification/test_default_builder.yml diff --git a/pyproject.toml b/pyproject.toml index a29bd574..e384be9b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,7 +51,7 @@ aiida-sssp-workflow = "aiida_sssp_workflow.cli:cmd_root" "sssp_workflow.convergence.phonon_frequencies" = "aiida_sssp_workflow.workflows.convergence.phonon_frequencies:ConvergencePhononFrequenciesWorkChain" "sssp_workflow.convergence.pressure" = "aiida_sssp_workflow.workflows.convergence.pressure:ConvergencePressureWorkChain" "sssp_workflow.convergence.bands" = "aiida_sssp_workflow.workflows.convergence.bands:ConvergenceBandsWorkChain" -"sssp_workflow.verification" = "aiida_sssp_workflow.workflows.verifications:FullVerificationWorkChain" +"sssp_workflow.verification" = "aiida_sssp_workflow.workflows.verification:FullVerificationWorkChain" [project.urls] Documentation = "https://aiida-sssp-workflow.readthedocs.io/" diff --git a/src/aiida_sssp_workflow/utils/__init__.py b/src/aiida_sssp_workflow/utils/__init__.py index 2af9edb3..5d01846d 100644 --- a/src/aiida_sssp_workflow/utils/__init__.py +++ b/src/aiida_sssp_workflow/utils/__init__.py @@ -36,3 +36,59 @@ def get_default_mpi_options( "max_wallclock_seconds": int(max_wallclock_seconds), "withmpi": with_mpi, } + +def serialize_data(data): + from aiida.orm import ( + AbstractCode, + BaseType, + Data, + Dict, + KpointsData, + List, + RemoteData, + SinglefileData, + ) + from aiida.plugins import DataFactory + + StructureData = DataFactory("core.structure") + UpfData = DataFactory("pseudo.upf") + + if isinstance(data, dict): + return {key: serialize_data(value) for key, value in data.items()} + + if isinstance(data, BaseType): + return data.value + + if isinstance(data, AbstractCode): + return data.full_label + + if isinstance(data, Dict): + return data.get_dict() + + if isinstance(data, List): + return data.get_list() + + if isinstance(data, StructureData): + return data.get_formula() + + if isinstance(data, UpfData): + return f"{data.element}" + + if isinstance(data, RemoteData): + # For `RemoteData` we compute the hash of the repository. The value returned by `Node._get_hash` is not + # useful since it includes the hash of the absolute filepath and the computer UUID which vary between tests + return data.base.repository.hash() + + if isinstance(data, KpointsData): + try: + return data.get_kpoints().tolist() + except AttributeError: + return data.get_kpoints_mesh() + + if isinstance(data, SinglefileData): + return data.get_content() + + if isinstance(data, Data): + return data.base.caching._get_hash() + + return data diff --git a/src/aiida_sssp_workflow/workflows/convergence/_base.py b/src/aiida_sssp_workflow/workflows/convergence/_base.py index 7572b8d3..235e401f 100644 --- a/src/aiida_sssp_workflow/workflows/convergence/_base.py +++ b/src/aiida_sssp_workflow/workflows/convergence/_base.py @@ -234,13 +234,22 @@ def get_builder( pseudo: Union[Path, UpfData], protocol: str, cutoff_list: list, - configuration: str, + configuration: str | None = None, clean_workdir: bool = True, ) -> ProcessBuilder: """Generate builder for the generic convergence workflow""" builder = super().get_builder() builder.protocol = orm.Str(protocol) + if configuration is not None: + builder.configuration = orm.Str(configuration) + configuration_name = configuration + + if ret := is_valid_convergence_configuration(configuration): + raise ValueError(ret) + else: + configuration_name = "default" + # Set the default label and description # The default label is set to be the base file name of PP # The description include which configuration and which protocol is using. @@ -248,7 +257,7 @@ def get_builder( pseudo.filename if isinstance(pseudo, UpfData) else pseudo.name ) builder.metadata.description = ( - f"Run on protocol '{protocol}' and configuration '{configuration}'" + f"Run on protocol '{protocol}' and configuration '{configuration_name}'" ) if isinstance(pseudo, Path): @@ -259,11 +268,8 @@ def get_builder( if ret := is_valid_cutoff_list(cutoff_list): raise ValueError(ret) - if ret := is_valid_convergence_configuration(configuration): - raise ValueError(ret) builder.cutoff_list = orm.List(list=cutoff_list) - builder.configuration = orm.Str(configuration) builder.clean_workdir = orm.Bool(clean_workdir) return builder diff --git a/src/aiida_sssp_workflow/workflows/convergence/bands.py b/src/aiida_sssp_workflow/workflows/convergence/bands.py index af342a0f..5b4eefe7 100644 --- a/src/aiida_sssp_workflow/workflows/convergence/bands.py +++ b/src/aiida_sssp_workflow/workflows/convergence/bands.py @@ -50,8 +50,8 @@ def get_builder( pseudo: Union[Path, UpfData], protocol: str, cutoff_list: list, - configuration: str, code: orm.AbstractCode, + configuration: str | None = None, parallelization: dict | None = None, mpi_options: dict | None = None, clean_workdir: bool = True, # default to clean workdir diff --git a/src/aiida_sssp_workflow/workflows/convergence/caching.py b/src/aiida_sssp_workflow/workflows/convergence/caching.py index 29b285b9..4e092bc9 100644 --- a/src/aiida_sssp_workflow/workflows/convergence/caching.py +++ b/src/aiida_sssp_workflow/workflows/convergence/caching.py @@ -52,8 +52,8 @@ def get_builder( pseudo: Path, protocol: str, cutoff_list: list, - configuration: str, code: orm.AbstractCode, + configuration: str | None = None, parallelization: dict | None = None, mpi_options: dict | None = None, clean_workdir: bool = False, diff --git a/src/aiida_sssp_workflow/workflows/convergence/cohesive_energy.py b/src/aiida_sssp_workflow/workflows/convergence/cohesive_energy.py index 91dec93f..52ffb719 100644 --- a/src/aiida_sssp_workflow/workflows/convergence/cohesive_energy.py +++ b/src/aiida_sssp_workflow/workflows/convergence/cohesive_energy.py @@ -62,8 +62,8 @@ def get_builder( pseudo: Union[Path, UpfData], protocol: str, cutoff_list: list, - configuration: str, code: orm.AbstractCode, + configuration: str | None = None, bulk_parallelization: dict | None = None, bulk_mpi_options: dict | None = None, atom_parallelization: dict | None = None, diff --git a/src/aiida_sssp_workflow/workflows/convergence/eos.py b/src/aiida_sssp_workflow/workflows/convergence/eos.py index 02d280bb..84d5cee4 100644 --- a/src/aiida_sssp_workflow/workflows/convergence/eos.py +++ b/src/aiida_sssp_workflow/workflows/convergence/eos.py @@ -48,8 +48,8 @@ def get_builder( pseudo: Union[Path, UpfData], protocol: str, cutoff_list: list, - configuration: str, code: orm.AbstractCode, + configuration: str | None = None, parallelization: dict | None = None, mpi_options: dict | None = None, clean_workdir: bool = True, # default to clean workdir diff --git a/src/aiida_sssp_workflow/workflows/convergence/phonon_frequencies.py b/src/aiida_sssp_workflow/workflows/convergence/phonon_frequencies.py index 23809367..aa3437f0 100644 --- a/src/aiida_sssp_workflow/workflows/convergence/phonon_frequencies.py +++ b/src/aiida_sssp_workflow/workflows/convergence/phonon_frequencies.py @@ -68,9 +68,9 @@ def get_builder( pseudo: Union[Path, UpfData], protocol: str, cutoff_list: list, - configuration: str, pw_code: orm.AbstractCode, ph_code: orm.AbstractCode, + configuration: str | None = None, pw_parallelization: dict | None = None, ph_settings: dict | None = None, pw_mpi_options: dict | None = None, diff --git a/src/aiida_sssp_workflow/workflows/convergence/pressure.py b/src/aiida_sssp_workflow/workflows/convergence/pressure.py index b257c7d0..9b01919d 100644 --- a/src/aiida_sssp_workflow/workflows/convergence/pressure.py +++ b/src/aiida_sssp_workflow/workflows/convergence/pressure.py @@ -46,11 +46,11 @@ def define(cls, spec): @classmethod def get_builder( cls, + code: orm.AbstractCode, pseudo: Union[Path, UpfData], protocol: str, cutoff_list: list, - configuration: str, - code: orm.AbstractCode, + configuration: str | None = None, parallelization: dict | None = None, mpi_options: dict | None = None, clean_workdir: bool = True, # clean workdir by default diff --git a/src/aiida_sssp_workflow/workflows/verification.py b/src/aiida_sssp_workflow/workflows/verification.py index cc267d65..8375d1cb 100644 --- a/src/aiida_sssp_workflow/workflows/verification.py +++ b/src/aiida_sssp_workflow/workflows/verification.py @@ -13,6 +13,7 @@ from aiida_pseudo.data.pseudo import UpfData from aiida_sssp_workflow.utils.protocol import get_protocol +from aiida_sssp_workflow.utils import get_default_mpi_options, parse, serialize_data from aiida_sssp_workflow.workflows import SelfCleanWorkChain # TODO: simplipy me @@ -23,10 +24,9 @@ from aiida_sssp_workflow.workflows.convergence.pressure import ConvergencePressureWorkChain from aiida_sssp_workflow.workflows.convergence.report import ConvergenceReport -from aiida_sssp_workflow.workflows.measure.bands import BandStructureWorkChain -from aiida_sssp_workflow.workflows.measure.transferability import EOSTransferabilityWorkChain +from aiida_sssp_workflow.workflows.transferability.bands import TransferabilityBandsWorkChain +from aiida_sssp_workflow.workflows.transferability.eos import TransferabilityEOSWorkChain -from aiida_sssp_workflow.utils.pseudo import extract_pseudo_info, get_proper_dual, parse @calcfunction @@ -43,14 +43,14 @@ def parse_pseudo_info(pseudo): DEFAULT_CONVERGENCE_PROPERTIES_LIST = [ "convergence.cohesive_energy", "convergence.pressure", - "convergence.delta", + "convergence.eos", "convergence.bands", "convergence.phonon_frequencies", ] DEFAULT_MEASURE_PROPERTIES_LIST = [ - "measure.eos", - "measure.bands", + "transferability.eos", + "transferability.bands", ] DEFAULT_PROPERTIES_LIST = ( @@ -95,65 +95,52 @@ class FullVerificationWorkChain(SelfCleanWorkChain): # This two class attributes will control whether a WF flow is # run and results write to outputs ports. - _VALID_CONGENCENCE_WF = [ - "convergence.cohesive_energy", - "convergence.phonon_frequencies", - "convergence.pressure", - "convergence.delta", - "convergence.bands", - ] - _VALID_MEASURE_WF = [ - "measure.eos", - "measure.bands", - ] - _CRITERIA = 'standard' + _VALID_CONGENCENCE_WF = DEFAULT_CONVERGENCE_PROPERTIES_LIST + _VALID_MEASURE_WF = DEFAULT_MEASURE_PROPERTIES_LIST + _CRITERIA = 'v2024.1001' @classmethod def define(cls, spec): super().define(spec) - # Expose all for convergence workchains - # spec.expose_inputs(ConvergenceCohesiveEnergyWorkChain, namespace='convergence.cohesive_energy', - # exclude=['code', 'pseudo', 'clean_workdir']) - # spec.expose_inputs(ConvergencePhononFrequenciesWorkChain, namespace='convergence.phonon_frequencies', - # exclude=['pw_code', 'ph_code', 'pseudo', 'clean_workdir']) - # spec.expose_inputs(ConvergenceEOSWorkChain, namespace='convergence.eos', - # exclude=['code', 'pseudo', 'clean_workdir']) - # spec.expose_inputs(ConvergenceBandsWorkChain, namespace='convergence.bands', - # exclude=['code', 'pseudo', 'clean_workdir']) - # spec.expose_inputs(ConvergencePressureWorkChain, namespace='convergence.pressure', - # exclude=['code', 'pseudo', 'clean_workdir']) - # - # # Expose all for transferability workchains: band structure and EOS - # spec.expose_inputs(BandStructureWorkChain, namespace='transferability.bands', - # exclude=['code', 'pseudo', 'clean_workdir']) - # spec.expose_inputs(EOSTransferabilityWorkChain, namespace='transferability.eos', - # exclude=['code', 'pseudo', 'clean_workdir']) - spec.input('pw_code', valid_type=orm.AbstractCode, help='The `pw.x` code use for the `PwCalculation`.') - spec.input('ph_code', valid_type=orm.AbstractCode, required=False, + spec.input('ph_code', valid_type=orm.AbstractCode, required=True, help='The `ph.x` code use for the `PhCalculation`.') spec.input('pseudo', valid_type=UpfData, required=True, help='Pseudopotential to be verified') spec.input('protocol', valid_type=orm.Str, - help='Verification protocol') + help='Verification protocol') # XXX: validate, can only be standard, quick, test + spec.input('curate_type', valid_type=orm.Str, required=True, + help='sssp or nc, which oxygen to use') # XXX: validation + spec.input('dry_run', valid_type=orm.Bool, default=lambda: orm.Bool(False)) + spec.input( + "parallelization", + valid_type=orm.Dict, + required=False, + help="The parallelization settings for the `PwCalculation`.", + ) + spec.input( + "mpi_options", + valid_type=orm.Dict, + required=False, + help="The MPI options for the `PwCalculation`.", + ) spec.outline( - cls.prepare_subworkchain_builders, - # cls._setup_code, - # cls._parse_pseudo, - # cls._init_setup, - # if_(cls._do_run_convergence)( - # cls._run_convergence, - # cls._inspect_convergence, - # ), - # if_(cls._do_run_measure)( - # cls._run_measure, - # cls._inspect_measure, - # ), + cls._prepare_subworkchain_builders, + if_(cls._not_dry_run)( + cls._run_convergence_test, + cls._inspect_convergence_test, + cls._set_cutoffs, + cls._run_transferability_verification, + cls._inspect_transferability_verification, + ), ) + spec.output('pseudo_info', valid_type=orm.Dict, required=True, - help='pseudopotential info') + help='pseudopotential info') + spec.output_namespace('builders', dynamic=True, + help='Flat out subworkchain builders info, only output this port when it is in dry run.') for wfname in cls._VALID_MEASURE_WF: spec.output_namespace(wfname, dynamic=True, help=f'results of {wfname} calculation.') @@ -173,7 +160,10 @@ def get_builder( ph_code: orm.Code, pseudo: Path, protocol: str, - properties_list: list | None = None, + curate_type: str, + dry_run: bool = False, + parallelization: dict | None = None, + mpi_options: dict | None = None, clean_workdir: bool = True, ) -> ProcessBuilder: builder = super().get_builder() @@ -182,194 +172,107 @@ def get_builder( builder.ph_code = ph_code builder.pseudo = UpfData.get_or_create(pseudo) builder.clean_workdir = orm.Bool(clean_workdir) + builder.curate_type = orm.Str(curate_type) + builder.dry_run = orm.Bool(dry_run) - if properties_list is None: - properties_list = DEFAULT_PROPERTIES_LIST - # convergence - for property in [p for p in properties_list if p.startwith('convergence')]: - builder.convergence[property].cutoff_list = get_cutoff_list(protocol) - builder.convergence[property].protocol = get_convergence_protocol(protocol) + if parallelization: + builder.parallelization = orm.Dict(parallelization) + else: + builder.parallelization = orm.Dict() + + if mpi_options: + builder.mpi_options = orm.Dict(mpi_options) + else: + builder.mpi_options = orm.Dict(get_default_mpi_options()) + return builder - def prepare_subworkchain_builders(self): + def _prepare_subworkchain_builders(self): """Use input prepare builder for each property subworkchain It will builder as a dict ctx further called `builders` has properties name as key. """ protocol = self.inputs.protocol.value + mapping_to_convergence = { + 'standard': 'balanced', + 'quick': 'balanced', + 'test': 'test', + } builders = {} - for property in ['eos', 'bands']: - if f"convergence.{property}" in self.inputs: - _ConvergenceWorkChain = entry_point(f'{property}') - builder = self.exposed_inputs(_ConvergenceWorkChain, namespace="convergence.{property}") - - if 'phonon_frequencies' in property: - builder.convergence[property].pw_code = self.inputs.pw_code - builder.convergence[property].ph_code = self.inputs.ph_code - else: - builder.convergence[property].code = self.inputs.pw_code - - builders[f"convergence.{property}"] = builder + for property in self._VALID_CONGENCENCE_WF: + _WorkChain = WorkflowFactory(f"sssp_workflow.{property}") + builder_inputs = { + "pseudo": self.inputs.pseudo, + "protocol": mapping_to_convergence[protocol], + "cutoff_list": [(20, 80)], + "clean_workdir": self.inputs.clean_workdir.value, + } + if "phonon_frequencies" in property: + builder_inputs['pw_code'] = self.inputs.pw_code + builder_inputs['ph_code'] = self.inputs.ph_code + else: + builder_inputs['code'] = self.inputs.pw_code + + # XXX: parall & mpi - if "eos" in self.inputs.transferability: - _WorkChain = WorkflowFactory("sssp.workflows.transferability.eos") builder: ProcessBuilder = _WorkChain.get_builder( - code=self.inputs.pw_code, - pseudo=self.inputs.pseudo, - protocol=get_eos_protocol(protocol), - curate_type=self.inputs.transferability.eos.curate_type, + **builder_inputs, ) - builder.parallelization = self.inputs.parallelization - builder.mpi_options = self.inputs.mpi_options - builders['transferability.eos'] = builder + builders[property] = builder - if "bands" in self.inputs.transferability: - _WorkChain = WorkflowFactory("sssp.workflows.transferability.bands") - builder: ProcessBuilder = _WorkChain.get_builder( - code=self.inputs.pw_code, - pseudo=self.inputs.pseudo, - protocol=get_eos_protocol(protocol), - ) - builder.parallelization = self.inputs.parallelization - builder.mpi_options = self.inputs.mpi_options - - builders['transferability.eos'] = builder - - self.ctx.builders = builders - - def _setup_code(self): - """ - setup resource options and parallelization for `PwCalculation` from inputs - """ - if "options" in self.inputs: - self.ctx.options = self.inputs.options.get_dict() - else: - from aiida_sssp_workflow.utils import get_default_options - - self.ctx.options = get_default_options( - with_mpi=True, - ) - - if "parallelization" in self.inputs: - self.ctx.parallelization = self.inputs.parallelization.get_dict() - else: - self.ctx.parallelization = {} - - def init_setup(self): - """prepare inputs for all verification process""" - - if "label" in self.inputs: - label = self.inputs.label.value - else: - label = self._label_from_pseudo_info(self.ctx.pseudo_info) - - self.node.base.extras.set("label", label) + mapping_to_eos = { + 'standard': 'standard', + 'quick': 'standard', + 'test': 'test', + } - # Properties list - valid_list = self._VALID_MEASURE_WF + self._VALID_CONGENCENCE_WF - self.ctx.properties_list = [ - p for p in self.inputs.properties_list.get_list() if p in valid_list - ] + mapping_to_bands = { + 'standard': 'balanced', + 'quick': 'balanced', + 'test': 'test', + } - # Measure workflow: bands measure and precision measure workflows inputs setting - measure_inputs = self.exposed_inputs(_BaseMeasureWorkChain, namespace="measure") - measure_inputs["pseudo"] = self.inputs.pseudo - measure_inputs["code"] = self.inputs.pw_code - measure_inputs["options"] = self.inputs.options - measure_inputs["parallelization"] = self.inputs.parallelization + _WorkChain = WorkflowFactory("sssp_workflow.transferability.eos") + builder: ProcessBuilder = _WorkChain.get_builder( + code=self.inputs.pw_code, + pseudo=self.inputs.pseudo, + protocol=mapping_to_eos[protocol], + curate_type=self.inputs.curate_type.value, + clean_workdir=self.inputs.clean_workdir.value, + ) + builder.parallelization = self.inputs.parallelization + builder.mpi_options = self.inputs.mpi_options - measure_inputs["clean_workdir"] = self.inputs.clean_workdir + builders['transferability.eos'] = builder - self.ctx.measure_inputs = { - "precision": measure_inputs.copy(), - "bands": measure_inputs.copy(), - } + _WorkChain = WorkflowFactory("sssp_workflow.transferability.bands") + builder: ProcessBuilder = _WorkChain.get_builder( + code=self.inputs.pw_code, + pseudo=self.inputs.pseudo, + protocol=mapping_to_bands[protocol], + clean_workdir=self.inputs.clean_workdir.value, + ) + builder.parallelization = self.inputs.parallelization + builder.mpi_options = self.inputs.mpi_options - # Convergence inputs setting, the properties of convergence test are: - # 1. cohesive energy - # 2. phonon frequencies - # 3. pressue - # 4. delta - # 5. bands distance - self.ctx.convergence_inputs = dict() + builders['transferability.bands'] = builder - convergence_inputs = self.exposed_inputs( - _BaseConvergenceWorkChain, namespace="convergence" - ) - convergence_inputs["code"] = self.inputs.pw_code - convergence_inputs["pseudo"] = self.inputs.pseudo - convergence_inputs["options"] = self.inputs.options - convergence_inputs["parallelization"] = self.inputs.parallelization - - convergence_inputs["clean_workdir"] = self.inputs.clean_workdir - - for prop in ["delta", "pressure"]: - self.ctx.convergence_inputs[prop] = convergence_inputs.copy() - - # The cohesive energy evaluation may hit the ran out of memory issue, - # so use the pw_code_large_memory if provided. - if "convergence.cohesive_energy" in self.ctx.properties_list: - inputs_cohesive_energy = convergence_inputs.copy() - if "pw_code_large_memory" in self.inputs: - inputs_cohesive_energy["pw_code_large_memory"] = ( - self.inputs.pw_code_large_memory - ) + self.ctx.builders = builders - self.ctx.convergence_inputs["cohesive_energy"] = inputs_cohesive_energy - - # Here, the shallow copy can be used since the type of convergence_inputs - # is AttributesDict. - # The deepcopy can't be used, since it will create new data node. - if "convergence.phonon_frequencies" in self.ctx.properties_list: - inputs_phonon_frequencies = convergence_inputs.copy() - inputs_phonon_frequencies.pop("code", None) - inputs_phonon_frequencies["pw_code"] = self.inputs.pw_code - inputs_phonon_frequencies["ph_code"] = self.inputs.ph_code - inputs_phonon_frequencies["clean_workdir"] = orm.Bool( - False - ) # For phonon frequencies convergence workflow, the clean dir is taken care by the the finalize step of the verification workflow. - - self.ctx.convergence_inputs["phonon_frequencies"] = ( - inputs_phonon_frequencies - ) + def _not_dry_run(self): + dry_run = self.inputs.dry_run.value - if "convergence.bands" in self.ctx.properties_list: - inputs_bands = convergence_inputs.copy() - inputs_bands["clean_workdir"] = orm.Bool( - False - ) # For bands convergence workflow, the clean dir is taken care by the the finalize step of the verification workflow. - - self.ctx.convergence_inputs["bands"] = inputs_bands - - # Caching inputs setting - # The running strategy of caching is: - # 1. run phonon_frequencies/bands convergence workflow - # 2. run cleandir for workchains (which will be the finalize step of phonon_frequencies/bands convergence workflow) - # 3. run cohesive_energy/pressure/delta convergence workflow which will use the cached data and clean on the fly - # 4. get the recommended cutoffs - # 5. run measure workflow using the recommended cutoffs - self.ctx.caching_inputs = convergence_inputs.copy() - self.ctx.caching_inputs["clean_workdir"] = orm.Bool( - False - ) # shouldn't clean until last, default of _caching but do it here explicitly - - # to collect workchains in a dict - self.ctx.workchains = dict() - - # For store the finished_ok workflow - self.ctx.finished_ok_wf = dict() - - def inspect_measure(self): - """Inspect delta measure results""" - return self._report_and_results(wname_list=self._VALID_MEASURE_WF) + # Write to the output of all builder for check if it is dry run + # which is helpful for test and sanity check. + if dry_run: + serialized_builders = {k: serialize_data(builder._inputs(prune=True)) for k, builder in self.ctx.builders.items()} - def _do_run_convergence_test(self): - """Whether to run convergence test workflows""" + self.out("builders", serialized_builders) - return len(self.ctx.convergence_properties_list) > 0 + return not dry_run def _run_convergence_test(self): for property in self.ctx.convergence_properties_list: @@ -385,9 +288,6 @@ def _run_convergence_test(self): def _inspect_convergence_test(self): self._report_and_results(workchains=self.ctx.convergence_workchains) - def _do_run_transferability_verification(self): - return len(self.ctx.transferability_propertios_list) > 0 - def _set_cutoffs(self): """Set cutoffs for the transferability verification, if full convergence test are run, then use the maximum cutoff for the transferability run. @@ -397,15 +297,19 @@ def _set_cutoffs(self): builder = builders.get(property) builder.wavefunction_cutoff, builder.change_density_cutoff = wavefunction_cutoff, charge_density_cutoff - def _run_transferability_verefication(self): + def _run_transferability_verification(self): """Run delta measure sub-workflow""" - for property in self.ctx.transferability_properties_list + for property in self.ctx.transferability_properties_list: running = self.submit(builders.get(property)) self.report(f"Submit {property} measure workchain pk={running.pk}") self.to_context(_=running) self.ctx.transferability_workchains[f"{property}"] = running + def _inspect_transferability_verification(self): + """Inspect delta measure results""" + return self._report_and_results(wname_list=self._VALID_MEASURE_WF) + def _report_and_results(self, workchains): """result to respective output namespace""" @@ -413,7 +317,7 @@ def _report_and_results(self, workchains): for wname, workchain in workchains.items(): # dump all output as it is to verification workflow output self.ctx.finished_ok_wf[wname] = workchain.pk - self.out(f"{wname}", workchain.outputs) + self.out(wname, workchain.outputs) # XXX:??? am I needed? # for label in workchain.outputs: diff --git a/tests/conftest.py b/tests/conftest.py index 7cd1d2a6..4bd13d70 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,7 @@ from aiida import orm from aiida.orm.utils.managers import NodeLinksManager from aiida.engine import ProcessBuilder +from aiida_sssp_workflow.utils import serialize_data pytest_plugins = ["aiida.manage.tests.pytest_fixtures"] @@ -75,63 +76,6 @@ def _pseudo_path(element="Al"): return _pseudo_path -def _serialize_data(data): - from aiida.orm import ( - AbstractCode, - BaseType, - Data, - Dict, - KpointsData, - List, - RemoteData, - SinglefileData, - ) - from aiida.plugins import DataFactory - - StructureData = DataFactory("core.structure") - UpfData = DataFactory("pseudo.upf") - - if isinstance(data, dict): - return {key: _serialize_data(value) for key, value in data.items()} - - if isinstance(data, BaseType): - return data.value - - if isinstance(data, AbstractCode): - return data.full_label - - if isinstance(data, Dict): - return data.get_dict() - - if isinstance(data, List): - return data.get_list() - - if isinstance(data, StructureData): - return data.get_formula() - - if isinstance(data, UpfData): - return f"{data.element}" - - if isinstance(data, RemoteData): - # For `RemoteData` we compute the hash of the repository. The value returned by `Node._get_hash` is not - # useful since it includes the hash of the absolute filepath and the computer UUID which vary between tests - return data.base.repository.hash() - - if isinstance(data, KpointsData): - try: - return data.get_kpoints().tolist() - except AttributeError: - return data.get_kpoints_mesh() - - if isinstance(data, SinglefileData): - return data.get_content() - - if isinstance(data, Data): - return data.base.caching._get_hash() - - return data - - @pytest.fixture def serialize_inputs(): """Serialize the given process inputs into a dictionary with nodes turned into their value representation. @@ -141,69 +85,13 @@ def serialize_inputs(): :return: dictionary """ - def _serialize_data(data): - from aiida.orm import ( - AbstractCode, - BaseType, - Data, - Dict, - KpointsData, - List, - RemoteData, - SinglefileData, - ) - from aiida.plugins import DataFactory - - StructureData = DataFactory("core.structure") - UpfData = DataFactory("pseudo.upf") - - if isinstance(data, dict): - return {key: _serialize_data(value) for key, value in data.items()} - - if isinstance(data, BaseType): - return data.value - - if isinstance(data, AbstractCode): - return data.full_label - - if isinstance(data, Dict): - return data.get_dict() - - if isinstance(data, List): - return data.get_list() - - if isinstance(data, StructureData): - return data.get_formula() - - if isinstance(data, UpfData): - return f"{data.element}" - - if isinstance(data, RemoteData): - # For `RemoteData` we compute the hash of the repository. The value returned by `Node._get_hash` is not - # useful since it includes the hash of the absolute filepath and the computer UUID which vary between tests - return data.base.repository.hash() - - if isinstance(data, KpointsData): - try: - return data.get_kpoints().tolist() - except AttributeError: - return data.get_kpoints_mesh() - - if isinstance(data, SinglefileData): - return data.get_content() - - if isinstance(data, Data): - return data.base.caching._get_hash() - - return data - def _serialize_inputs(inputs: NodeLinksManager): # NodeLinksManager -> dict _inputs = {} for key in inputs._get_keys(): _inputs[key] = inputs[key] - return _serialize_data(_inputs) + return serialize_data(_inputs) return _serialize_inputs @@ -218,6 +106,6 @@ def serialize_builder(): """ def _serialize_builder(builder: ProcessBuilder): - return _serialize_data(builder._inputs(prune=True)) + return serialize_data(builder._inputs(prune=True)) return _serialize_builder diff --git a/tests/workflows/test_verification.py b/tests/workflows/test_verification.py index 95c9cb84..21e1705e 100644 --- a/tests/workflows/test_verification.py +++ b/tests/workflows/test_verification.py @@ -1,8 +1,26 @@ +import pytest + +from aiida.engine import ProcessBuilder, run_get_node from aiida.plugins import WorkflowFactory -def test_default_builder(code_generator): +def test_default_builder(code_generator, pseudo_path, data_regression): """Check the builder is created from inputs""" - _WorkChain = WorkflowFactory("sssp.workflows.verification") - pw_code = code_generator("pw") - ph_code = code_generator("ph") + _WorkChain = WorkflowFactory('sssp_workflow.verification') + + builder: ProcessBuilder = _WorkChain.get_builder( + pw_code=code_generator('pw'), + ph_code=code_generator('ph'), + pseudo=pseudo_path('Al'), + protocol='test', + curate_type='sssp', + dry_run=True, + ) + + result, node = run_get_node(builder) + + data_regression.check(result['builders']) + + + + diff --git a/tests/workflows/test_verification/test_default_builder.yml b/tests/workflows/test_verification/test_default_builder.yml new file mode 100644 index 00000000..049bad77 --- /dev/null +++ b/tests/workflows/test_verification/test_default_builder.yml @@ -0,0 +1,142 @@ +convergence.bands: + clean_workdir: true + code: pw-docker@localhost + cutoff_list: + - - 20 + - 80 + metadata: + call_link_label: convergence_eos + description: Run on protocol 'test' and configuration 'default' + label: Al.paw.pbe.z_3.ld1.psl.v0.1.upf + mpi_options: + max_wallclock_seconds: 1800 + resources: + num_machines: 1 + withmpi: false + parallelization: {} + protocol: test + pseudo: Al +convergence.cohesive_energy: + atom_mpi_options: + max_wallclock_seconds: 1800 + resources: + num_machines: 1 + withmpi: false + atom_parallelization: {} + bulk_mpi_options: + max_wallclock_seconds: 1800 + resources: + num_machines: 1 + withmpi: false + bulk_parallelization: {} + clean_workdir: true + code: pw-docker@localhost + cutoff_list: + - - 20 + - 80 + metadata: + call_link_label: convergence_cohesive_energy + description: Run on protocol 'test' and configuration 'default' + label: Al.paw.pbe.z_3.ld1.psl.v0.1.upf + protocol: test + pseudo: Al +convergence.eos: + clean_workdir: true + code: pw-docker@localhost + cutoff_list: + - - 20 + - 80 + metadata: + call_link_label: convergence_eos + description: Run on protocol 'test' and configuration 'default' + label: Al.paw.pbe.z_3.ld1.psl.v0.1.upf + mpi_options: + max_wallclock_seconds: 1800 + resources: + num_machines: 1 + withmpi: false + parallelization: {} + protocol: test + pseudo: Al +convergence.phonon_frequencies: + clean_workdir: true + cutoff_list: + - - 20 + - 80 + metadata: + call_link_label: convergence_phonon_frequencies + description: Run on protocol 'test' and configuration 'default' + label: Al.paw.pbe.z_3.ld1.psl.v0.1.upf + ph_code: ph-docker@localhost + ph_mpi_options: + max_wallclock_seconds: 1800 + resources: + num_machines: 1 + withmpi: false + ph_settings: {} + protocol: test + pseudo: Al + pw_code: pw-docker@localhost + pw_mpi_options: + max_wallclock_seconds: 1800 + resources: + num_machines: 1 + withmpi: false + pw_parallelization: {} +convergence.pressure: + clean_workdir: true + code: pw-docker@localhost + cutoff_list: + - - 20 + - 80 + metadata: + call_link_label: convergence_pressure + description: Run on protocol 'test' and configuration 'default' + label: Al.paw.pbe.z_3.ld1.psl.v0.1.upf + mpi_options: + max_wallclock_seconds: 1800 + resources: + num_machines: 1 + withmpi: false + parallelization: {} + protocol: test + pseudo: Al +transferability.bands: + charge_density_cutoff: 1600 + clean_workdir: true + code: pw-docker@localhost + metadata: + call_link_label: band_structure_verification + description: Run on protocol 'test' | configuration 'default' | base (ecutwfc, + ecutrho) = (200, 1600) + label: Al.paw.pbe.z_3.ld1.psl.v0.1.upf + mpi_options: + max_wallclock_seconds: 1800 + resources: + num_machines: 1 + withmpi: false + parallelization: {} + protocol: test + pseudo: Al + wavefunction_cutoff: 200 +transferability.eos: + charge_density_cutoff: 1600 + clean_workdir: true + code: pw-docker@localhost + metadata: + call_link_label: transferability_eos + description: Run on protocol 'test' | configurations 'all' | with oxygen_pseudo + 'O.paw.pbe.z_6.ld1.psl.v0.1.upf' | base (ecutwfc, ecutrho) = (200, 1600) + label: Al.paw.pbe.z_3.ld1.psl.v0.1.upf + mpi_options: + max_wallclock_seconds: 1800 + resources: + num_machines: 1 + withmpi: false + oxygen_ecutrho: 560.0 + oxygen_ecutwfc: 70.0 + oxygen_pseudo: O + parallelization: {} + protocol: test + pseudo: Al + wavefunction_cutoff: 200