Skip to content

Commit

Permalink
More Tests (#155)
Browse files Browse the repository at this point in the history
  • Loading branch information
marvinfriede authored Jun 30, 2024
1 parent 7e3d61d commit 56db401
Show file tree
Hide file tree
Showing 37 changed files with 765 additions and 566 deletions.
69 changes: 35 additions & 34 deletions docs/source/01_quickstart/getting_started.rst
Original file line number Diff line number Diff line change
Expand Up @@ -73,40 +73,6 @@ use corresponding getters :meth:`~dxtb.Calculator.get_energy`:
We recommend using the getters, as they provide the familiar ASE-like interface.

.. warning::

If you supply the **same inputs** to the calculator multiple times with
gradient tracking enabled, you have to reset the calculator in between with
:meth:`~dxtb.Calculator.reset_all`. Otherwise, the gradients will be wrong.

.. admonition:: Example
:class: toggle

.. code-block:: python
import torch
import dxtb
dd = {"dtype": torch.double, "device": torch.device("cpu")}
numbers = torch.tensor([3, 1], device=dd["device"])
positions = torch.tensor([[0.0, 0.0, 0.0], [0.0, 0.0, 1.0]], **dd)
calc = dxtb.calculators.GFN1Calculator(numbers, **dd)
pos = positions.clone().requires_grad_(True)
energy = calc.energy(pos)
(g1,) = torch.autograd.grad(energy, pos)
# wrong gradients without reset here
calc.reset_all()
pos = positions.clone().requires_grad_(True)
energy = calc.energy(pos)
(g2,) = torch.autograd.grad(energy, pos)
assert torch.allclose(g1, g2)

Gradients
---------
Expand Down Expand Up @@ -146,6 +112,41 @@ The equivalency of the two methods (except for the sign) can be verified by
the example `here <https://github.com/grimme-lab/dxtb/blob/main/examples/forces.py>`_.


.. warning::

If you supply the **same inputs** to the calculator multiple times with
gradient tracking enabled, you have to reset the calculator in between with
:meth:`~dxtb.Calculator.reset_all`. Otherwise, the gradients will be wrong.

.. admonition:: Example
:class: toggle

.. code-block:: python
import torch
import dxtb
dd = {"dtype": torch.double, "device": torch.device("cpu")}
numbers = torch.tensor([3, 1], device=dd["device"])
positions = torch.tensor([[0.0, 0.0, 0.0], [0.0, 0.0, 1.0]], **dd)
calc = dxtb.calculators.GFN1Calculator(numbers, **dd)
pos = positions.clone().requires_grad_(True)
energy = calc.energy(pos)
(g1,) = torch.autograd.grad(energy, pos)
# wrong gradients without reset here
calc.reset_all()
pos = positions.clone().requires_grad_(True)
energy = calc.energy(pos)
(g2,) = torch.autograd.grad(energy, pos)
assert torch.allclose(g1, g2)
More Properties
---------------

Expand Down
Binary file added docs/source/_static/dxtb-favicon.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/source/_static/dxtb.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
7 changes: 5 additions & 2 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@

html_theme = "sphinx_book_theme"
html_title = project
html_logo = "_static/dxtb.svg"
html_favicon = "_static/dxtb-favicon.svg"
html_logo = "_static/dxtb.png"
html_favicon = "_static/dxtb-favicon.png"

html_theme_options = {
"navigation_with_keys": False,
Expand Down Expand Up @@ -81,6 +81,7 @@
"python": ("https://docs.python.org/3", None),
"tad_dftd3": ("https://tad-dftd3.readthedocs.io/en/latest/", None),
"tad_dftd4": ("https://tad-dftd4.readthedocs.io/en/latest/", None),
"tad_libcint": ("https://tad-libcint.readthedocs.io/en/latest/", None),
"tad_mctc": ("https://tad-mctc.readthedocs.io/en/latest/", None),
"tad_multicharge": ("https://tad-multicharge.readthedocs.io/en/latest/", None),
"torch": ("https://pytorch.org/docs/stable/", None),
Expand Down Expand Up @@ -113,7 +114,9 @@
exclude_patterns = [
# Sometimes sphinx reads its own outputs as inputs!
"build/html",
"_build/html",
"build/jupyter_execute",
"_build/jupyter_execute",
"notebooks/README.md",
"README.md",
"notebooks/*.md",
Expand Down
2 changes: 1 addition & 1 deletion examples/integrals.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Simple integral interface.
Simple integral interface. Can be helpful for testing.
"""
from pathlib import Path

Expand Down
2 changes: 1 addition & 1 deletion examples/limitation_xitorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Calculating forces for vancomycin via AD.
Example for xitorch's inability to be used together with functorch.
"""
from pathlib import Path

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ source = ["./src"]
omit = [
"./src/dxtb/_src/exlibs/xitorch/*",
"./src/dxtb/_src/exlibs/scipy/*",
"./src/dxtb/_src/typing.py",
"./src/dxtb/_src/typing/*",
"./src/dxtb/components/*",
]

Expand Down
2 changes: 1 addition & 1 deletion src/dxtb/__version__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,5 +22,5 @@

__all__ = ["__version__", "__tversion__"]

__version__ = "0.0.0"
__version__ = "0.0.1"
"""Version of ``dxtb`` in semantic versioning."""
4 changes: 2 additions & 2 deletions src/dxtb/_src/cli/driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def _set_attr(self, attr: str) -> int | list[int]:
for path in self.base:
# use charge (or spin) from file or set to zero
if Path(path, FILES[attr]).is_file():
vals.append(io.read_chrg(Path(path, FILES[attr])))
vals.append(read.read_chrg_from_path(Path(path, FILES[attr])))
else:
vals.append(0)

Expand Down Expand Up @@ -176,7 +176,7 @@ def singlepoint(self) -> Result | None:
numbers = pack(_n)
positions = pack(_p)
else:
_n, _p = io.read_structure_from_file(args.file[0], args.filetype)
_n, _p = read.read_from_path(args.file[0], args.filetype)
numbers = torch.tensor(_n, dtype=torch.long, device=dd["device"])
positions = torch.tensor(_p, **dd)

Expand Down
10 changes: 0 additions & 10 deletions src/dxtb/_src/io/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,5 @@
Functions for reading and writing files.
"""

from . import read
from .handler import *
from .logutils import DEFAULT_LOG_CONFIG
from .output import *
from .read import (
read_chrg,
read_coord,
read_orca_engrad,
read_qcschema,
read_structure_from_file,
read_xyz,
)
2 changes: 1 addition & 1 deletion src/dxtb/_src/io/logutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Logging
Logging.
"""

from __future__ import annotations
Expand Down
2 changes: 1 addition & 1 deletion src/dxtb/_src/io/output/header.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
WIDTH = 70


def get_header() -> str:
def get_header() -> str: # pragma: no cover
logo = [
r" _ _ _ ",
r" | | | | | | ",
Expand Down
5 changes: 3 additions & 2 deletions src/dxtb/_src/io/output/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import torch

from dxtb.__version__ import __tversion__
from dxtb._src.typing import Any

__all__ = [
"get_mkl_num_threads",
Expand Down Expand Up @@ -70,7 +71,7 @@ def get_system_info():
}


def get_pytorch_info():
def get_pytorch_info() -> dict[str, Any]: # pragma: no cover
is_cuda = torch.cuda.is_available()

backends = []
Expand Down Expand Up @@ -129,7 +130,7 @@ def get_pytorch_info():
}


def print_system_info(punit=print):
def print_system_info(punit=print) -> None: # pragma: no cover
system_info = get_system_info()["System Information"]
pytorch_info = get_pytorch_info()["PyTorch Information"]
sep = 17
Expand Down
Loading

0 comments on commit 56db401

Please sign in to comment.