Skip to content

Commit

Permalink
pyupgrade --py3-plus
Browse files Browse the repository at this point in the history
  • Loading branch information
JB Lovland committed Jan 8, 2024
1 parent 32576f2 commit 5b0a2f3
Show file tree
Hide file tree
Showing 21 changed files with 135 additions and 135 deletions.
1 change: 0 additions & 1 deletion .dmypy.json

This file was deleted.

3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -96,3 +96,6 @@ venv.bak/

# setuptools_scm version
src/fmu/dataio/version.py

# mypy
.dmypy.json
8 changes: 4 additions & 4 deletions examples/s/d/nn/_project/aggregate_surfaces.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
"""Use fmu-dataio for aggregated surfaces created by an aggregation service."""

from pathlib import Path
import logging
from pathlib import Path

import yaml
import numpy as np

import xtgeo
import yaml

import fmu.dataio


Expand Down Expand Up @@ -127,7 +127,7 @@ def _parse_yaml(fname):
dict
"""

with open(fname, "r") as stream:
with open(fname) as stream:
data = yaml.safe_load(stream)
return data

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,12 @@
For the file case, CSV files are read from disk. The dataio function is the same.
"""
import pathlib

import pandas as pd
import fmu.dataio
from fmu.config import utilities as ut

import fmu.dataio

CFG = ut.yaml_load("../../fmuconfig/output/global_variables.yml")

IN_ROXAR = False
Expand Down
1 change: 0 additions & 1 deletion src/fmu/dataio/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from fmu.dataio.dataio import AggregatedData # noqa # type: ignore
from fmu.dataio.dataio import ExportData # noqa # type: ignore
from fmu.dataio.dataio import InitializeCase # noqa # type: ignore

from fmu.dataio.dataio import read_metadata # noqa

try:
Expand Down
17 changes: 7 additions & 10 deletions src/fmu/dataio/_design_kw.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,9 @@

# pylint: disable=logging-fstring-interpolation
import logging
import shlex
import re
from typing import Iterable

import shlex
from typing import Any, Iterable

_STATUS_FILE_NAME = "DESIGN_KW.OK"

Expand All @@ -37,7 +36,7 @@ def run(

key_vals.update(rm_genkw_prefix(key_vals))

with open(template_file_name, "r") as template_file:
with open(template_file_name) as template_file:
template = template_file.readlines()

with open(result_file_name, "w") as result_file:
Expand All @@ -60,10 +59,8 @@ def all_matched(line: str, template_file_name: str, template: list[str]) -> bool
for unmatched in unmatched_templates(line):
if is_perl(template_file_name, template):
_logger.warning( # pylint: disable=logging-fstring-interpolation
(
f"{unmatched} not found in design matrix, "
f"but this is probably a Perl file"
)
f"{unmatched} not found in design matrix, "
f"but this is probably a Perl file"
)
else:
_logger.error( # pylint: disable=logging-fstring-interpolation
Expand Down Expand Up @@ -129,9 +126,9 @@ def extract_key_value(parameters: Iterable[str]) -> dict[str, str]:


def rm_genkw_prefix(
paramsdict: dict[str, object],
paramsdict: dict[str, Any],
ignoreprefixes: str | list[str] | None = "LOG10_",
) -> dict[str, object]:
) -> dict[str, Any]:
"""Strip prefixes from keys in a dictionary.
Prefix is any string before a colon. No colon means no prefix.
Expand Down
3 changes: 2 additions & 1 deletion src/fmu/dataio/_filedata_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Populate and verify stuff in the 'file' block in fmu (partial excpetion is checksum_md5
as this is convinient to populate later, on demand)
"""
from __future__ import annotations

import logging
from copy import deepcopy
Expand Down Expand Up @@ -234,6 +235,6 @@ def _get_path_generic(

# check that destination actually exists if verifyfolder is True
if self.dataio.verifyfolder and not dest.exists():
raise IOError(f"Folder {str(dest)} is not present.")
raise OSError(f"Folder {str(dest)} is not present.")

return dest
2 changes: 1 addition & 1 deletion src/fmu/dataio/_fmu_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ def get_ert2_information(self) -> None:
if self.dataio.include_ert2jobs:
jobs_file = self.iter_path / "jobs.json"
if jobs_file.is_file():
with open(jobs_file, "r") as stream:
with open(jobs_file) as stream:
self.ert2["jobs"] = json.load(stream)
logger.debug("jobs.json parsed.")
logger.debug("jobs.json was not found")
Expand Down
2 changes: 1 addition & 1 deletion src/fmu/dataio/_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
# https://realpython.com/python-data-classes/#basic-data-classes

import datetime
from datetime import timezone
import getpass
import logging
from dataclasses import dataclass, field
from datetime import timezone
from pathlib import Path
from typing import Any, Optional
from warnings import warn
Expand Down
42 changes: 21 additions & 21 deletions src/fmu/dataio/_objectdata_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ class _ObjectDataProvider:
# input fields
obj: Any
dataio: Any
meta_existing: Optional[dict] = None
meta_existing: dict | None = None

# result properties; the most important is metadata which IS the 'data' part in
# the resulting metadata. But other variables needed later are also given
Expand Down Expand Up @@ -159,7 +159,7 @@ def _derive_name_stratigraphy(self) -> dict:
"""
logger.info("Evaluate data:name attribute and stratigraphy")
result = dict[str, object]()
result = dict[str, Any]()

name = self.dataio.name

Expand Down Expand Up @@ -193,7 +193,7 @@ def _derive_name_stratigraphy(self) -> dict:

@staticmethod
def _validate_get_ext(
fmt: str, subtype: str, validator: dict[str, object]
fmt: str, subtype: str, validator: dict[str, Any]
) -> object | None:
"""Validate that fmt (file format) matches data and return legal extension."""
if fmt not in validator.keys():
Expand All @@ -207,7 +207,7 @@ def _validate_get_ext(
def _derive_objectdata(self) -> dict:
"""Derive object spesific data."""
logger.info("Evaluate data settings for object")
result = dict[str, object]()
result = dict[str, Any]()

if isinstance(self.obj, xtgeo.RegularSurface):
result["subtype"] = "RegularSurface"
Expand Down Expand Up @@ -391,13 +391,13 @@ def _derive_spec_bbox_polygons(self) -> tuple[dict, dict]:
bbox["zmax"] = float(zmax)
return specs, bbox

def _derive_spec_bbox_points(self) -> tuple[dict[str, object], dict[str, object]]:
def _derive_spec_bbox_points(self) -> tuple[dict[str, Any], dict[str, Any]]:
"""Process/collect the data.spec and data.bbox for Points"""
logger.info("Derive bbox and specs for Points")
pnts = self.obj

specs = dict[str, object]()
bbox = dict[str, object]()
specs = dict[str, Any]()
bbox = dict[str, Any]()

if len(pnts.dataframe.columns) > 3:
attrnames = pnts.dataframe.columns[3:]
Expand Down Expand Up @@ -479,8 +479,8 @@ def _derive_spec_bbox_cpgridproperty(self) -> tuple[dict, dict]:
logger.info("Derive bbox and specs for GridProperty")
gridprop = self.obj

specs = dict[str, object]()
bbox = dict[str, object]()
specs = dict[str, Any]()
bbox = dict[str, Any]()

specs["ncol"] = gridprop.ncol
specs["nrow"] = gridprop.nrow
Expand All @@ -489,13 +489,13 @@ def _derive_spec_bbox_cpgridproperty(self) -> tuple[dict, dict]:

def _derive_spec_bbox_dataframe(
self,
) -> tuple[dict[str, object], dict[str, object],]:
) -> tuple[dict[str, Any], dict[str, Any],]:
"""Process/collect the data items for DataFrame."""
logger.info("Process data metadata for DataFrame (tables)")
dfr = self.obj

specs = dict[str, object]()
bbox = dict[str, object]()
specs = dict[str, Any]()
bbox = dict[str, Any]()

specs["columns"] = list(dfr.columns)
specs["size"] = int(dfr.size)
Expand All @@ -504,20 +504,20 @@ def _derive_spec_bbox_dataframe(

def _derive_spec_bbox_arrowtable(
self,
) -> tuple[dict[str, object], dict[str, object],]:
) -> tuple[dict[str, Any], dict[str, Any],]:
"""Process/collect the data items for Arrow table."""
logger.info("Process data metadata for arrow (tables)")
table = self.obj

specs = dict[str, object]()
bbox = dict[str, object]()
specs = dict[str, Any]()
bbox = dict[str, Any]()

specs["columns"] = list(table.column_names)
specs["size"] = table.num_columns * table.num_rows

return specs, bbox

def _derive_spec_bbox_dict(self) -> tuple[dict[str, object], dict[str, object]]:
def _derive_spec_bbox_dict(self) -> tuple[dict[str, Any], dict[str, Any]]:
"""Process/collect the data items for dictionary."""
logger.info("Process data metadata for dictionary")
return {}, {}
Expand Down Expand Up @@ -582,12 +582,12 @@ def _derive_timedata(self) -> dict:
timedata = self._derive_timedata_newformat()
return timedata

def _derive_timedata_legacy(self) -> dict[str, object]:
def _derive_timedata_legacy(self) -> dict[str, Any]:
"""Format input timedata to metadata. legacy version."""
# TODO(JB): Covnert tresult to TypedDict or Dataclass.
tdata = self.dataio.timedata

tresult = dict[str, object]()
tresult = dict[str, Any]()
tresult["time"] = list()
if len(tdata) == 1:
elem = tdata[0]
Expand Down Expand Up @@ -621,7 +621,7 @@ def _derive_timedata_legacy(self) -> dict[str, object]:
logger.info("Timedata: time0 is %s while time1 is %s", self.time0, self.time1)
return tresult

def _derive_timedata_newformat(self) -> dict[str, object]:
def _derive_timedata_newformat(self) -> dict[str, Any]:
"""Format input timedata to metadata, new format.
When using two dates, input convention is [[newestdate, "monitor"], [oldestdate,
Expand All @@ -631,7 +631,7 @@ def _derive_timedata_newformat(self) -> dict[str, object]:
set for those who wants it turned around).
"""
tdata = self.dataio.timedata
tresult = dict[str, object]()
tresult = dict[str, Any]()

if len(tdata) == 1:
elem = tdata[0]
Expand Down Expand Up @@ -687,7 +687,7 @@ def _derive_from_existing(self) -> None:

self.time0, self.time1 = parse_timedata(self.meta_existing["data"])

def _process_content(self) -> Tuple[str, Optional[dict]]:
def _process_content(self) -> tuple[str, dict | None]:
"""Work with the `content` metadata"""

# content == "unset" is not wanted, but in case metadata has been produced while
Expand Down
Loading

0 comments on commit 5b0a2f3

Please sign in to comment.