Skip to content

Commit

Permalink
Review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
JB Lovland committed Jan 9, 2024
1 parent 2e1eb9b commit a4d0afc
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 35 deletions.
12 changes: 4 additions & 8 deletions .github/workflows/mypy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,23 +7,19 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.10"]
python-version: ["3.8", "3.10"]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Set up python
uses: actions/setup-python@v4
- name: Cache pip
uses: actions/cache@v3
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml') }}
restore-keys: |
${{ runner.os }}-pip-

- name: Install dev-env.
run: |
pip install -U pip
pip install ".[dev]"
- name: Mypy
run: mypy .
11 changes: 7 additions & 4 deletions src/fmu/dataio/_design_kw.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ def run(
# If FWL key is having multiple entries in the parameters file
# KeyError is raised. This will be logged, and no OK
# file is written

_logger.setLevel(log_level)


with open(parameters_file_name) as parameters_file:
parameters = parameters_file.readlines()

Expand All @@ -40,19 +40,22 @@ def run(
with open(template_file_name) as template_file:
template = template_file.readlines()

valid = True
with open(result_file_name, "w") as result_file:
for line in template:
if not is_comment(line):
for key, value in key_vals.items():
line = line.replace(f"<{key}>", str(value))

if not all_matched(line, template_file_name, template):
pass
valid = False

result_file.write(line)

with open(_STATUS_FILE_NAME, "w") as status_file:
status_file.write("DESIGN_KW OK\n")
if valid:
with open(_STATUS_FILE_NAME, "w") as status_file:
status_file.write("DESIGN_KW OK\n")



def all_matched(line: str, template_file_name: str, template: list[str]) -> bool:
Expand Down
40 changes: 20 additions & 20 deletions src/fmu/dataio/_objectdata_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@
from dataclasses import dataclass, field
from datetime import datetime as dt
from pathlib import Path
from typing import Any, Dict, Final, Optional
from typing import Any, Final, Optional
from warnings import warn

import numpy as np
Expand Down Expand Up @@ -159,7 +159,7 @@ def _derive_name_stratigraphy(self) -> dict:
"""
logger.info("Evaluate data:name attribute and stratigraphy")
result: Dict[str, Any] = {}
result: dict[str, Any] = {}

name = self.dataio.name

Expand Down Expand Up @@ -209,7 +209,7 @@ def _validate_get_ext(
def _derive_objectdata(self) -> dict:
"""Derive object spesific data."""
logger.info("Evaluate data settings for object")
result: Dict[str, Any] = {}
result: dict[str, Any] = {}

if isinstance(self.obj, xtgeo.RegularSurface):
result["subtype"] = "RegularSurface"
Expand Down Expand Up @@ -393,14 +393,14 @@ def _derive_spec_bbox_polygons(self) -> tuple[dict, dict]:
bbox["zmax"] = float(zmax)
return specs, bbox

def _derive_spec_bbox_points(self) -> tuple[Dict[str, Any], Dict[str, Any]]:
def _derive_spec_bbox_points(self) -> tuple[dict[str, Any], dict[str, Any]]:
"""Process/collect the data.spec and data.bbox for Points"""
logger.info("Derive bbox and specs for Points")
pnts = self.obj

specs: Dict[str, Any] = {}
specs: dict[str, Any] = {}

bbox: Dict[str, Any] = {}
bbox: dict[str, Any] = {}

if len(pnts.dataframe.columns) > 3:
attrnames = pnts.dataframe.columns[3:]
Expand Down Expand Up @@ -482,8 +482,8 @@ def _derive_spec_bbox_cpgridproperty(self) -> tuple[dict, dict]:
logger.info("Derive bbox and specs for GridProperty")
gridprop = self.obj

specs: Dict[str, Any] = {}
bbox: Dict[str, Any] = {}
specs: dict[str, Any] = {}
bbox: dict[str, Any] = {}

specs["ncol"] = gridprop.ncol
specs["nrow"] = gridprop.nrow
Expand All @@ -493,15 +493,15 @@ def _derive_spec_bbox_cpgridproperty(self) -> tuple[dict, dict]:
def _derive_spec_bbox_dataframe(
self,
) -> tuple[
Dict[str, Any],
Dict[str, Any],
dict[str, Any],
dict[str, Any],
]:
"""Process/collect the data items for DataFrame."""
logger.info("Process data metadata for DataFrame (tables)")
dfr = self.obj

specs: Dict[str, Any] = {}
bbox: Dict[str, Any] = {}
specs: dict[str, Any] = {}
bbox: dict[str, Any] = {}

specs["columns"] = list(dfr.columns)
specs["size"] = int(dfr.size)
Expand All @@ -511,22 +511,22 @@ def _derive_spec_bbox_dataframe(
def _derive_spec_bbox_arrowtable(
self,
) -> tuple[
Dict[str, Any],
Dict[str, Any],
dict[str, Any],
dict[str, Any],
]:
"""Process/collect the data items for Arrow table."""
logger.info("Process data metadata for arrow (tables)")
table = self.obj

specs: Dict[str, Any] = {}
bbox: Dict[str, Any] = {}
specs: dict[str, Any] = {}
bbox: dict[str, Any] = {}

specs["columns"] = list(table.column_names)
specs["size"] = table.num_columns * table.num_rows

return specs, bbox

def _derive_spec_bbox_dict(self) -> tuple[Dict[str, Any], Dict[str, Any]]:
def _derive_spec_bbox_dict(self) -> tuple[dict[str, Any], dict[str, Any]]:
"""Process/collect the data items for dictionary."""
logger.info("Process data metadata for dictionary")
return {}, {}
Expand Down Expand Up @@ -591,12 +591,12 @@ def _derive_timedata(self) -> dict:
timedata = self._derive_timedata_newformat()
return timedata

def _derive_timedata_legacy(self) -> Dict[str, Any]:
def _derive_timedata_legacy(self) -> dict[str, Any]:
"""Format input timedata to metadata. legacy version."""
# TODO(JB): Covnert tresult to TypedDict or Dataclass.
tdata = self.dataio.timedata

tresult: Dict[str, Any] = {}
tresult: dict[str, Any] = {}
tresult["time"] = []
if len(tdata) == 1:
elem = tdata[0]
Expand Down Expand Up @@ -640,7 +640,7 @@ def _derive_timedata_newformat(self) -> dict[str, Any]:
set for those who wants it turned around).
"""
tdata = self.dataio.timedata
tresult: Dict[str, Any] = {}
tresult: dict[str, Any] = {}

if len(tdata) == 1:
elem = tdata[0]
Expand Down
7 changes: 5 additions & 2 deletions src/fmu/dataio/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from copy import deepcopy
from datetime import datetime
from pathlib import Path
from typing import Any, Final
from typing import Any, Final, Literal

import pandas as pd
import yaml
Expand Down Expand Up @@ -83,7 +83,10 @@ def drop_nones(dinput: dict) -> dict:


def export_metadata_file(
yfile: Path, metadata: dict, savefmt: str = "yaml", verbosity: str = "WARNING"
yfile: Path,
metadata: dict,
savefmt: Literal["yaml", "json"] = "yaml",
verbosity: str = "WARNING",
) -> None:
"""Export genericly and ordered to the complementary metadata file."""
logger.setLevel(level=verbosity)
Expand Down
1 change: 0 additions & 1 deletion src/fmu/dataio/dataio.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@ def _validate_variable(key: str, value: type, legals: dict[str, str | type]) ->
validcheck = valid_type

if "typing." not in str(validcheck):
print(f"{value=}, {validcheck=}, {type(value)=}, {type(validcheck)=}")
if not isinstance(value, validcheck):
logger.warning("Wrong type of value, raise an error")
raise ValidationError(
Expand Down

0 comments on commit a4d0afc

Please sign in to comment.