Skip to content

Commit

Permalink
Black and mypy
Browse files Browse the repository at this point in the history
  • Loading branch information
JB Lovland committed Jan 8, 2024
1 parent 975c21c commit 32576f2
Show file tree
Hide file tree
Showing 16 changed files with 297 additions and 227 deletions.
1 change: 1 addition & 0 deletions .dmypy.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"pid": 29548, "connection_name": "/var/folders/yj/3l_bv6h5763871t8jl_m7xz40000gp/T/tmpyyp2m3jv/dmypy.sock"}
1 change: 0 additions & 1 deletion examples/s/d/nn/_project/aggregate_surfaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@ def main():
)

for operation in operations:

print(f"Running aggregation: {operation}")

# Call the aggregation machine and create an aggregated surface
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ def export_faultlines():
)

for hname in HORISONNAMES:

# RMS version for reading polygons from a project:
# poly = xtgeo.polygons_from_roxar(project, hname, RMS_POL_CATEGORY)

Expand Down
8 changes: 8 additions & 0 deletions mypy.ini
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
[mypy]
disallow_untyped_defs = True
extra_checks = True
ignore_missing_imports = True
strict_equality = True
warn_redundant_casts = True
warn_unused_configs = True
warn_unused_ignores = True
exclude = ^((tests|docs|examples|bin)/|conftest.py?)

[mypy-numpy.*]
# Applies to Python 3.6:
Expand Down
27 changes: 14 additions & 13 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,12 @@ classifiers = [
]
dynamic = ["version"]
dependencies = [
"xtgeo>=2.16",
"PyYAML",
"pyarrow",
"fmu-config>=1.1.0",
"pandas",
"numpy",
"pandas",
"pyarrow",
"PyYAML",
"xtgeo>=2.16",
]

[project.urls]
Expand All @@ -49,27 +49,28 @@ dev = [
"black",
"coverage>=4.1",
"flake8",
"hypothesis",
"isort",
"jsonschema",
"hypothesis",
"mypy",
"pydocstyle",
"pylint",
"pytest",
"pytest-cov",
"pydocstyle",
"pytest-runner",
"pytest-mock",
"termcolor",
"pytest-runner",
"pytest",
"rstcheck",
"termcolor",
"types-PyYAML",
]
docs = [
"pydocstyle",
"Sphinx<7",
"autoapi",
"sphinx-rtd-theme",
"pydocstyle",
"sphinx-autodoc-typehints<1.23",
"sphinxcontrib-apidoc",
"sphinx-rtd-theme",
"sphinx-togglebutton",
"Sphinx<7",
"sphinxcontrib-apidoc",
"urllib3<1.27",
]

Expand Down
2 changes: 1 addition & 1 deletion src/fmu/dataio/_definitions.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ class _ValidFormats:
points: dict = field(default_factory=dict)
dictionary: dict = field(default_factory=dict)

def __post_init__(self):
def __post_init__(self) -> None:
self.surface = {"irap_binary": ".gri"}
self.grid = {"hdf": ".hdf", "roff": ".roff"}
self.cube = {"segy": ".segy"}
Expand Down
60 changes: 30 additions & 30 deletions src/fmu/dataio/_design_kw.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import logging
import shlex
import re
from typing import Iterable


_STATUS_FILE_NAME = "DESIGN_KW.OK"
Expand All @@ -17,20 +18,18 @@


def run(
template_file_name,
result_file_name,
log_level,
parameters_file_name="parameters.txt",
):
template_file_name: str,
result_file_name: str,
log_level: logging._Level,
parameters_file_name: str = "parameters.txt",
) -> None:
# Get all key, value pairs
# If FWL key is having multiple entries in the parameters file
# KeyError is raised. This will be logged, and no OK
# file is written

_logger.setLevel(log_level)

valid = True

with open(parameters_file_name) as parameters_file:
parameters = parameters_file.readlines()

Expand All @@ -41,24 +40,22 @@ def run(
with open(template_file_name, "r") as template_file:
template = template_file.readlines()

if valid:
with open(result_file_name, "w") as result_file:
for line in template:
if not is_comment(line):
for key, value in key_vals.items():
line = line.replace(f"<{key}>", str(value))
with open(result_file_name, "w") as result_file:
for line in template:
if not is_comment(line):
for key, value in key_vals.items():
line = line.replace(f"<{key}>", str(value))

if not all_matched(line, template_file_name, template):
valid = False
if not all_matched(line, template_file_name, template):
valid = False

result_file.write(line)
result_file.write(line)

if valid:
with open(_STATUS_FILE_NAME, "w") as status_file:
status_file.write("DESIGN_KW OK\n")
with open(_STATUS_FILE_NAME, "w") as status_file:
status_file.write("DESIGN_KW OK\n")


def all_matched(line, template_file_name, template):
def all_matched(line: str, template_file_name: str, template: list[str]) -> bool:
valid = True
for unmatched in unmatched_templates(line):
if is_perl(template_file_name, template):
Expand All @@ -76,25 +73,24 @@ def all_matched(line, template_file_name, template):
return valid


def is_perl(file_name, template):
return file_name.endswith(".pl") or template[0].find("perl") != -1
def is_perl(file_name: str, template: list[str]) -> bool:
return bool(file_name.endswith(".pl") or template[0].find("perl") != -1)


def unmatched_templates(line):
def unmatched_templates(line: str) -> list[str]:
bracketpattern = re.compile("<.+?>")
if bracketpattern.search(line):
return bracketpattern.findall(line)
else:
return []
return []


def is_comment(line):
def is_comment(line: str) -> bool:
ecl_comment_pattern = re.compile("^--")
std_comment_pattern = re.compile("^#")
return ecl_comment_pattern.search(line) or std_comment_pattern.search(line)
return bool(ecl_comment_pattern.search(line) or std_comment_pattern.search(line))


def extract_key_value(parameters):
def extract_key_value(parameters: Iterable[str]) -> dict[str, str]:
"""Parses a list of strings, looking for key-value pairs pr. line
separated by whitespace, into a dictionary.
Expand Down Expand Up @@ -132,7 +128,10 @@ def extract_key_value(parameters):
return res


def rm_genkw_prefix(paramsdict, ignoreprefixes="LOG10_"):
def rm_genkw_prefix(
paramsdict: dict[str, object],
ignoreprefixes: str | list[str] | None = "LOG10_",
) -> dict[str, object]:
"""Strip prefixes from keys in a dictionary.
Prefix is any string before a colon. No colon means no prefix.
Expand All @@ -156,7 +155,8 @@ def rm_genkw_prefix(paramsdict, ignoreprefixes="LOG10_"):
ignoreprefixes = []
if isinstance(ignoreprefixes, str):
ignoreprefixes = [ignoreprefixes]
ignoreprefixes = filter(None, ignoreprefixes)

ignoreprefixes = list(filter(None, ignoreprefixes))

for ignore_str in ignoreprefixes:
paramsdict = {
Expand Down
22 changes: 12 additions & 10 deletions src/fmu/dataio/_filedata_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ class _FileDataProvider:
absolute_path_symlink: Optional[str] = field(default="", init=False)
checksum_md5: Optional[str] = field(default="", init=False)

def __post_init__(self):
def __post_init__(self) -> None:
logger.setLevel(level=self.verbosity)

if self.dataio.name:
Expand All @@ -63,10 +63,11 @@ def __post_init__(self):

self.fmu_context = self.dataio._usecontext # may be None!

logger.info("Initialize %s", __class__)
logger.info("Initialize %s", self.__class__)

def derive_filedata(self):
def derive_filedata(self) -> None:
relpath, symrelpath = self._get_path()
assert relpath is not None
relative, absolute = self._derive_filedata_generic(relpath)
self.relative_path = relative
self.absolute_path = absolute
Expand All @@ -78,7 +79,7 @@ def derive_filedata(self):

logger.info("Derived filedata")

def _derive_filedata_generic(self, inrelpath):
def _derive_filedata_generic(self, inrelpath: Path) -> tuple[str, str]:
"""This works with both normal data and symlinks."""
stem = self._get_filestem()

Expand Down Expand Up @@ -116,7 +117,7 @@ def _derive_filedata_generic(self, inrelpath):
logger.info("Derived filedata")
return str(relpath), str(abspath)

def _get_filestem(self):
def _get_filestem(self) -> str:
"""Construct the file"""

if not self.name:
Expand Down Expand Up @@ -153,15 +154,15 @@ def _get_filestem(self):
stem = stem.replace("__", "_")

# treat norwegian special letters
# BUG(?): What about germen letter like "Ü"?
stem = stem.replace("æ", "ae")
stem = stem.replace("ø", "oe")
stem = stem.replace("å", "aa")

return stem

def _get_path(self):
def _get_path(self) -> tuple[Path, Path | None]:
"""Construct and get the folder path(s)."""
dest = None
linkdest = None

dest = self._get_path_generic(mode=self.fmu_context, allow_forcefolder=True)
Expand All @@ -173,7 +174,9 @@ def _get_path(self):

return dest, linkdest

def _get_path_generic(self, mode="realization", allow_forcefolder=True, info=""):
def _get_path_generic(
self, mode: str = "realization", allow_forcefolder: bool = True, info: str = ""
) -> Path:
"""Generically construct and get the folder path and verify."""
dest = None

Expand Down Expand Up @@ -215,8 +218,7 @@ def _get_path_generic(self, mode="realization", allow_forcefolder=True, info="")
warn("Using absolute paths in forcefolder is not recommended!")

# absolute if starts with "/", otherwise relative to outroot
dest = Path(self.dataio.forcefolder)
dest = dest.absolute()
dest = Path(self.dataio.forcefolder).absolute()
self.forcefolder_is_absolute = True

if not allow_forcefolder:
Expand Down
15 changes: 9 additions & 6 deletions src/fmu/dataio/_fmu_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,16 +65,16 @@ class _FmuProvider:
metadata: dict = field(default_factory=dict, init=False)
rootpath: Optional[Path] = field(default=None, init=False)

def __post_init__(self):
def __post_init__(self) -> None:
logger.setLevel(level=self.verbosity)

self.rootpath = Path(self.dataio._rootpath.absolute())

self.rootpath_initial = self.rootpath

logger.info("Initialize %s", __class__)
logger.info("Initialize %s", self.__class__)

def detect_provider(self):
def detect_provider(self) -> None:
"""First order method to detect provider, ans also check fmu_context."""
if self._detect_ert2provider() or self._detect_ert2provider_case_only():
self.provider = "ERT2"
Expand Down Expand Up @@ -188,7 +188,7 @@ def _detect_ert2provider_case_only(self) -> bool:
return True
return False

def get_ert2_information(self):
def get_ert2_information(self) -> None:
"""Retrieve information from an ERT2 run."""
if not self.iter_path:
return
Expand All @@ -197,6 +197,8 @@ def get_ert2_information(self):
parameters_file = self.iter_path / "parameters.txt"
if parameters_file.is_file():
params = _utils.read_parameters_txt(parameters_file)
# BUG(?): value can contain Nones, loop in fn. below
# does contains check, will fail.
nested_params = _utils.nested_parameters_dict(params)
self.ert2["params"] = nested_params
logger.debug("parameters.txt parsed.")
Expand Down Expand Up @@ -245,13 +247,14 @@ def get_ert2_information(self):

logger.debug("ERT files has been parsed.")

def get_ert2_case_metadata(self):
def get_ert2_case_metadata(self) -> None:
"""Check if metadatafile file for CASE exists, and if so parse metadata.
If file does not exist, still give a proposed file path, but the
self.case_metadata will be {} (empty) and the physical file will not be made.
"""

assert self.rootpath is not None
self.case_metafile = self.rootpath / ERT2_RELATIVE_CASE_METADATA_FILE
self.case_metafile = self.case_metafile.resolve()
if self.case_metafile.exists():
Expand All @@ -263,7 +266,7 @@ def get_ert2_case_metadata(self):
"Case metadata file does not exists as %s", str(self.case_metafile)
)

def generate_ert2_metadata(self):
def generate_ert2_metadata(self) -> None:
"""Construct the metadata FMU block for an ERT2 forward job."""
logger.info("Generate ERT2 metadata...")

Expand Down
Loading

0 comments on commit 32576f2

Please sign in to comment.