Skip to content

Commit

Permalink
Merge branch 'main' into defect_patch
Browse files Browse the repository at this point in the history
  • Loading branch information
utf authored Jul 18, 2024
2 parents 55284b3 + 0291106 commit 8ad242c
Show file tree
Hide file tree
Showing 42 changed files with 347 additions and 137 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ default_language_version:
exclude: ^(.github/|tests/test_data/abinit/)
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.4.4
rev: v0.4.10
hooks:
- id: ruff
args: [--fix]
Expand Down Expand Up @@ -38,7 +38,7 @@ repos:
- tokenize-rt==4.1.0
- types-paramiko
- repo: https://github.com/codespell-project/codespell
rev: v2.2.6
rev: v2.3.0
hooks:
- id: codespell
stages: [commit, commit-msg]
Expand Down
4 changes: 2 additions & 2 deletions docs/dev/abinit_tests.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ be added to this pseudopotential table.
Note that information from the real pseudopotential files is used in the creation
of the jobs and flows, hence fake pseudopotentials are not an option here.


### File sizes

The files produced by ABINIT are generally large and would overwhelm the size of the
Expand Down Expand Up @@ -121,7 +120,7 @@ atm dev abinit-test-data TEST_NAME

You should change `TEST_NAME` to be a name for the workflow test. Note, `TEST_NAME` should not
contain spaces or punctuation. For example, the band structure workflow test data was
genenerated using `atm dev vasp-test-data Si_band_structure`.
generated using `atm dev vasp-test-data Si_band_structure`.

This will automatically detect whether the Maker is a Job Maker or a Flow Maker and
copy files in the corresponding `tests/test_data/abinit/jobs/NameOfMaker/TEST_NAME`
Expand All @@ -145,6 +144,7 @@ a unique name. For example, there cannot be two calculations called "relax".
Instead you should ensure they are named something like "relax 1" and "relax 2".

Each `REF_RUN_FOLDER` contains:

- A folder called "inputs" with the run.abi and abinit_input.json, as well as with the
indata, outdata and tmpdata directories. The indata directory potentially contains
the reference fake input files needed for the job to be executed (e.g. a fake link to a
Expand Down
36 changes: 19 additions & 17 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -61,12 +61,12 @@ docs = [
"FireWorks==2.0.3",
"autodoc_pydantic==2.1.0",
"furo==2024.5.6",
"ipython==8.25.0",
"ipython==8.26.0",
"jsonschema[format]",
"myst_parser==2.0.0",
"numpydoc==1.7.0",
"sphinx-copybutton==0.5.2",
"sphinx==7.2.6",
"sphinx==7.4.0",
"sphinx_design==0.6.0",
]
dev = ["pre-commit>=2.12.1"]
Expand All @@ -84,23 +84,23 @@ strict = [
"cclib==1.8.1",
"chgnet==0.3.8",
"click==8.1.7",
"custodian==2024.4.18",
"custodian==2024.6.24",
"dscribe==2.1.1",
"emmet-core==0.82.2",
"ijson==3.3.0",
"jobflow==0.1.17",
"lobsterpy==0.4.4",
"lobsterpy==0.4.5",
"mace-torch>=0.3.3",
"matgl==1.1.2",
"monty==2024.5.24",
"mp-api==0.41.2",
"numpy",
"phonopy==2.24.2",
"pydantic-settings==2.3.1",
"pydantic==2.7.3",
"phonopy==2.26.5",
"pydantic-settings==2.3.4",
"pydantic==2.8.2",
"pymatgen-analysis-defects==2024.5.11",
"pymatgen==2024.5.31",
"python-ulid==2.6.0",
"pymatgen==2024.6.10",
"python-ulid==2.7.0",
"quippy-ase==0.9.14",
"seekpath==2.1.0",
"torch==2.2.1",
Expand Down Expand Up @@ -165,15 +165,14 @@ select = ["ALL"]
ignore = [
"ANN002",
"ANN003",
"ANN101", # missing self type annotation
"ANN102",
"ANN101", # missing self type annotation
"ANN102", # missing cls annotation
"ANN401",
"ARG002", # unused method argument
"BLE001",
"ARG002", # unused method argument
# "BLE001",
"C408", # Unnecessary (dict/list/tuple) call - remove call
"C901", # function too complex
"COM812", # trailing comma missing
"DTZ", # datetime-tz-now
"EM", # exception message must not use f-string literal
"ERA001", # found commented out code
"FBT001",
Expand All @@ -183,15 +182,18 @@ ignore = [
"ISC001",
"PD011", # pandas-use-of-dot-values
"PERF203", # try-except-in-loop
"PLR", # pylint-refactor
"PLR0911", # too many returns
"PLR0912", # too many branches
"PLR0913", # too many arguments
"PLR0915", # too many local statements
"PLR2004",
"PT004", # pytest-missing-fixture-name-underscore
"PT006", # pytest-parametrize-names-wrong-type
"PT013", # pytest-incorrect-pytest-import
"PTH", # prefer Pathlib to os.path
"RUF013", # implicit-optional
"S324", # use of insecure hash function
"S507", # paramiko auto trust
"SLF", # private member accessed outside class
"TD", # TODOs
"TRY003", # long message outside exception class
]
Expand All @@ -204,7 +206,7 @@ docstring-code-format = true

[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401"]
"**/tests/*" = ["ANN", "ARG001", "D", "INP001", "S101"]
"**/tests/*" = ["ANN", "ARG001", "D", "INP001", "PLR2004", "S101"]
# flake8-type-checking (TCH): things inside TYPE_CHECKING aren't available
# at runtime and so can't be used by pydantic models
# flake8-future-annotations (FA): pipe operator for type unions only work in pydantic models in python 3.10+
Expand Down
1 change: 0 additions & 1 deletion src/atomate2/abinit/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,4 +62,3 @@ def run_abinit(
process.terminate()

process.wait()
return
8 changes: 5 additions & 3 deletions src/atomate2/abinit/schemas/calculation.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import logging
import os
from datetime import datetime
from datetime import datetime, timezone
from pathlib import Path
from typing import Optional, Union

Expand Down Expand Up @@ -236,7 +236,9 @@ def from_abinit_files(

abinit_gsr = GsrFile.from_file(abinit_gsr_file)

completed_at = str(datetime.fromtimestamp(os.stat(abinit_log_file).st_mtime))
completed_at = str(
datetime.fromtimestamp(os.stat(abinit_log_file).st_mtime, tz=timezone.utc)
)

output_doc = CalculationOutput.from_abinit_gsr(abinit_gsr)

Expand All @@ -255,7 +257,7 @@ def from_abinit_files(
if report.run_completed:
has_abinit_completed = TaskState.SUCCESS

except Exception as exc:
except (ValueError, RuntimeError, Exception) as exc:
msg = f"{cls} exception while parsing event_report:\n{exc}"
logger.critical(msg)

Expand Down
20 changes: 9 additions & 11 deletions src/atomate2/abinit/sets/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ def get_input_set(
else:
if prev_outputs is not None and not self.prev_outputs_deps:
raise RuntimeError(
f"Previous outputs not allowed for {self.__class__.__name__}."
f"Previous outputs not allowed for {type(self).__name__}."
)
abinit_input = self.get_abinit_input(
structure=structure,
Expand Down Expand Up @@ -573,7 +573,7 @@ def get_abinit_input(
if self.factory_prev_inputs_kwargs:
if not prev_outputs:
raise RuntimeError(
f"No previous_outputs. Required for {self.__class__.__name__}."
f"No previous_outputs. Required for {type(self).__name__}."
)

# TODO consider cases where structure might be defined even if
Expand All @@ -588,18 +588,16 @@ def get_abinit_input(
)
total_factory_kwargs.update(abinit_inputs)

else:
# TODO check if this should be removed or the check be improved
if structure is None:
msg = (
f"Structure is mandatory for {self.__class__.__name__} "
f"generation since no previous output is used."
)
raise RuntimeError(msg)
elif structure is None:
msg = (
f"Structure is mandatory for {type(self).__name__} "
f"generation since no previous output is used."
)
raise RuntimeError(msg)

if not self.prev_outputs_deps and prev_outputs:
msg = (
f"Previous outputs not allowed for {self.__class__.__name__} "
f"Previous outputs not allowed for {type(self).__name__} "
"Consider if restart_from argument of get_input_set method "
"can fit your needs instead."
)
Expand Down
2 changes: 1 addition & 1 deletion src/atomate2/abinit/utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ def get_event_report(ofile: File, mpiabort_file: File) -> EventReport | None:
report.append(last_abort_event)
else:
report.append(last_abort_event)
except Exception as exc:
except (ValueError, RuntimeError, Exception) as exc:
# Return a report with an error entry with info on the exception.
logger.critical(f"{ofile}: Exception while parsing ABINIT events:\n {exc!s}")
return parser.report_exception(ofile.path, exc)
Expand Down
82 changes: 82 additions & 0 deletions src/atomate2/aims/flows/eos.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
"""Equation of state workflow for FHI-aims. Based on the common EOS workflow."""

from __future__ import annotations

from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any

from atomate2.aims.flows.core import DoubleRelaxMaker
from atomate2.aims.jobs.core import RelaxMaker
from atomate2.common.flows.eos import CommonEosMaker

if TYPE_CHECKING:
from jobflow import Maker


@dataclass
class AimsEosMaker(CommonEosMaker):
"""
Generate equation of state data (based on common EOS maker).
First relaxes a structure using initial_relax_maker, then perform a series of
deformations on the relaxed structure, and evaluate single-point energies with
static_maker.
Parameters
----------
name : str
Name of the flows produced by this maker.
initial_relax_maker : .Maker | None
Maker to relax the input structure, defaults to double relaxation.
eos_relax_maker : .Maker
Maker to relax deformed structures for the EOS fit.
static_maker : .Maker | None
Maker to generate statics after each relaxation, defaults to None.
strain : tuple[float]
Percentage linear strain to apply as a deformation, default = -5% to 5%.
number_of_frames : int
Number of strain calculations to do for EOS fit, default = 6.
postprocessor : .atomate2.common.jobs.EOSPostProcessor
Optional postprocessing step, defaults to
`atomate2.common.jobs.PostProcessEosEnergy`.
_store_transformation_information : .bool = False
Whether to store the information about transformations. Unfortunately
needed at present to handle issues with emmet and pydantic validation
"""

name: str = "aims eos"
initial_relax_maker: Maker | None = field(
default_factory=lambda: DoubleRelaxMaker.from_parameters({})
)
eos_relax_maker: Maker | None = field(
default_factory=lambda: RelaxMaker.fixed_cell_relaxation(
user_params={"species_dir": "tight"}
)
)

@classmethod
def from_parameters(cls, parameters: dict[str, Any], **kwargs) -> AimsEosMaker:
"""Creation of AimsEosMaker from parameters.
Parameters
----------
parameters : dict
Dictionary of common parameters for both makers. The one exception is
`species_dir` which can be either a string or a dict with keys [`initial`,
`eos`]. If a string is given, it will be interpreted as the `species_dir`
for the `eos` Maker; the initial double relaxation will be done then with
the default `light` and `tight` species' defaults.
kwargs
Keyword arguments passed to `CommonEosMaker`.
"""
species_dir = parameters.setdefault("species_dir", "tight")
initial_params = parameters.copy()
eos_params = parameters.copy()
if isinstance(species_dir, dict):
initial_params["species_dir"] = species_dir.get("initial")
eos_params["species_dir"] = species_dir.get("eos", "tight")
return cls(
initial_relax_maker=DoubleRelaxMaker.from_parameters(initial_params),
eos_relax_maker=RelaxMaker.fixed_cell_relaxation(user_params=eos_params),
**kwargs,
)
2 changes: 1 addition & 1 deletion src/atomate2/aims/jobs/convergence.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def make(
"convergence_field_values": [],
"epsilon": self.epsilon,
}
convergence_data.update({"idx": idx, "converged": converged})
convergence_data.update(idx=idx, converged=converged)

if prev_dir is not None:
split_prev_dir = str(prev_dir).split(":")[-1]
Expand Down
6 changes: 4 additions & 2 deletions src/atomate2/aims/schemas/calculation.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import os
from collections.abc import Sequence
from datetime import datetime
from datetime import datetime, timezone
from pathlib import Path
from typing import TYPE_CHECKING, Any, Optional, Union

Expand Down Expand Up @@ -291,7 +291,9 @@ def from_aims_files(
volumetric_files = [] if volumetric_files is None else volumetric_files
aims_output = AimsOutput.from_outfile(aims_output_file)

completed_at = str(datetime.fromtimestamp(os.stat(aims_output_file).st_mtime))
completed_at = str(
datetime.fromtimestamp(os.stat(aims_output_file).st_mtime, tz=timezone.utc)
)

output_file_paths = _get_output_file_paths(volumetric_files)
aims_objects: dict[AimsObject, Any] = _get_volumetric_data(
Expand Down
4 changes: 2 additions & 2 deletions src/atomate2/aims/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""A collection of helper utils found in atomate2 package."""

from datetime import datetime
from datetime import datetime, timezone


def datetime_str() -> str:
Expand All @@ -12,4 +12,4 @@ def datetime_str() -> str:
str
The current time.
"""
return str(datetime.utcnow())
return str(datetime.now(tz=timezone.utc))
Loading

0 comments on commit 8ad242c

Please sign in to comment.