From 1c2a72d97926d4c71dbbdd34c7416ab563cd800a Mon Sep 17 00:00:00 2001 From: JB Lovland Date: Mon, 8 Jan 2024 18:45:55 +0100 Subject: [PATCH] Add ruff as linter --- .github/workflows/linting.yml | 14 +- .../s/d/nn/_project/aggregate_surfaces.py | 27 ++-- .../iter-0/any/bin/export_grid3d.py | 3 +- .../iter-0/any/bin/export_volumetables.py | 3 +- .../iter-0/rms/bin/export_a_surface.py | 3 +- .../iter-0/rms/bin/export_faultpolygons.py | 3 +- .../iter-0/rms/bin/export_propmaps.py | 3 +- .../iter-0/rms/bin/export_a_surface.py | 3 +- .../iter-0/rms/bin/export_a_surface.py | 3 +- pyproject.toml | 129 +++++------------- src/fmu/dataio/_design_kw.py | 3 +- src/fmu/dataio/_filedata_provider.py | 7 +- src/fmu/dataio/_fmu_provider.py | 2 +- src/fmu/dataio/_metadata.py | 22 ++- src/fmu/dataio/_objectdata_provider.py | 59 ++++---- src/fmu/dataio/_oyaml.py | 10 +- src/fmu/dataio/_utils.py | 36 +++-- src/fmu/dataio/dataio.py | 49 +++---- .../dataio/scripts/create_case_metadata.py | 6 +- tests/conftest.py | 30 ++-- tests/test_schema/test_schema_logic.py | 1 - tests/test_units/test_aggregated_surfaces.py | 8 +- tests/test_units/test_dataio.py | 7 +- tests/test_units/test_dictionary.py | 7 +- tests/test_units/test_ert2_context.py | 3 +- .../test_units/test_filedataprovider_class.py | 1 - tests/test_units/test_fmuprovider_class.py | 3 +- tests/test_units/test_initialize_case.py | 1 - tests/test_units/test_metadata_class.py | 3 +- .../test_objectdataprovider_class.py | 1 - .../test_prerealization_surfaces.py | 3 +- tests/test_units/test_rms_context.py | 6 +- tests/test_units/test_table.py | 6 +- tests/test_units/test_utils.py | 1 - 34 files changed, 181 insertions(+), 285 deletions(-) diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index a51fa0a32..013c25922 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -14,10 +14,10 @@ jobs: - name: Set up python uses: actions/setup-python@v4 - name: Check black style and linting - run: | - pip install black - black --check src tests - pip install flake8 - flake8 src tests - pip install isort - isort --profile=black src tests + run: pip install ruff + - name: Ruff check + if: ${{ always() }} + run: ruff check . + - name: Ruff format + if: ${{ always() }} + run: ruff format . --check diff --git a/examples/s/d/nn/_project/aggregate_surfaces.py b/examples/s/d/nn/_project/aggregate_surfaces.py index 3f3ad0aa1..b04969bd7 100644 --- a/examples/s/d/nn/_project/aggregate_surfaces.py +++ b/examples/s/d/nn/_project/aggregate_surfaces.py @@ -1,14 +1,13 @@ """Use fmu-dataio for aggregated surfaces created by an aggregation service.""" +from __future__ import annotations -import logging from pathlib import Path +import fmu.dataio import numpy as np import xtgeo import yaml -import fmu.dataio - def main(): """Aggregate one surface across X realizations from the example case and store the @@ -56,9 +55,6 @@ def main(): # This is the ID we assign to this set of aggregations aggregation_id = "something_very_unique" # IRL this will usually be a uuid - # We aggregate these source surfaces and collect results in list of dictionaries - aggregations = [] - # Initialize an AggregatedData object for this set of aggregations exp = fmu.dataio.AggregatedData( source_metadata=source_metadata, @@ -81,8 +77,8 @@ def main(): # ============================================================================== # Example 2: We only want the metadata (e.g. we are in a cloud service) - metadata = exp.generate_metadata(aggregated_surface, operation=operation) - print(f"Example 2: Metadata generated") + exp.generate_metadata(aggregated_surface, operation=operation) + print("Example 2: Metadata generated") # At this point, we have the surface, the operation and the metadata # These can be collected into e.g. a list or a dictionary for further usage, @@ -128,8 +124,7 @@ def _parse_yaml(fname): """ with open(fname) as stream: - data = yaml.safe_load(stream) - return data + return yaml.safe_load(stream) def _metadata_filename(fname): @@ -152,7 +147,10 @@ def _get_realization_ids(casepath): def _get_source_surfaces_from_disk( - casepath: Path, iter_name: str, realization_ids: list, relative_path: Path + casepath: Path, + iter_name: str, + realization_ids: list, + relative_path: Path, ): """Collect surfaces and metadata from disk. @@ -192,7 +190,10 @@ def _get_source_surfaces_from_disk( def _get_source_surfaces_from_sumo( - case_uuid: str, iter_name: str, realization_ids: list, relative_path: Path + case_uuid: str, + iter_name: str, + realization_ids: list, + relative_path: Path, ): """Collect surfaces and metadata from Sumo. @@ -202,7 +203,7 @@ def _get_source_surfaces_from_sumo( Not implemented. """ - raise NotImplementedError() + raise NotImplementedError if __name__ == "__main__": diff --git a/examples/s/d/nn/xcase/realization-0/iter-0/any/bin/export_grid3d.py b/examples/s/d/nn/xcase/realization-0/iter-0/any/bin/export_grid3d.py index 737be3f3f..89fdbe8c3 100644 --- a/examples/s/d/nn/xcase/realization-0/iter-0/any/bin/export_grid3d.py +++ b/examples/s/d/nn/xcase/realization-0/iter-0/any/bin/export_grid3d.py @@ -2,11 +2,10 @@ import pathlib +import fmu.dataio as dataio import xtgeo from fmu.config import utilities as ut -import fmu.dataio as dataio - CFG = ut.yaml_load("../../fmuconfig/output/global_variables.yml") FOLDER = pathlib.Path("../output/grids") diff --git a/examples/s/d/nn/xcase/realization-0/iter-0/any/bin/export_volumetables.py b/examples/s/d/nn/xcase/realization-0/iter-0/any/bin/export_volumetables.py index 9ee01f2a1..61f4bc9cc 100644 --- a/examples/s/d/nn/xcase/realization-0/iter-0/any/bin/export_volumetables.py +++ b/examples/s/d/nn/xcase/realization-0/iter-0/any/bin/export_volumetables.py @@ -7,11 +7,10 @@ """ import pathlib +import fmu.dataio import pandas as pd from fmu.config import utilities as ut -import fmu.dataio - CFG = ut.yaml_load("../../fmuconfig/output/global_variables.yml") IN_ROXAR = False diff --git a/examples/s/d/nn/xcase/realization-0/iter-0/rms/bin/export_a_surface.py b/examples/s/d/nn/xcase/realization-0/iter-0/rms/bin/export_a_surface.py index 9f83c8ca0..774ff62ca 100644 --- a/examples/s/d/nn/xcase/realization-0/iter-0/rms/bin/export_a_surface.py +++ b/examples/s/d/nn/xcase/realization-0/iter-0/rms/bin/export_a_surface.py @@ -6,11 +6,10 @@ """ from pathlib import Path +import fmu.dataio as dataio import xtgeo from fmu.config import utilities as ut -import fmu.dataio as dataio - CFG = ut.yaml_load("../../fmuconfig/output/global_variables.yml") FILES = { diff --git a/examples/s/d/nn/xcase/realization-0/iter-0/rms/bin/export_faultpolygons.py b/examples/s/d/nn/xcase/realization-0/iter-0/rms/bin/export_faultpolygons.py index 8fc91266e..7da78d95c 100644 --- a/examples/s/d/nn/xcase/realization-0/iter-0/rms/bin/export_faultpolygons.py +++ b/examples/s/d/nn/xcase/realization-0/iter-0/rms/bin/export_faultpolygons.py @@ -1,11 +1,10 @@ """Export faultpolygons via dataio with metadata.""" from pathlib import Path +import fmu.dataio as dataio import xtgeo from fmu.config import utilities as utils -import fmu.dataio as dataio - CFG = utils.yaml_load("../../fmuconfig/output/global_variables.yml") HORISONNAMES = CFG["rms"]["horizons"]["TOP_RES"] diff --git a/examples/s/d/nn/xcase/realization-0/iter-0/rms/bin/export_propmaps.py b/examples/s/d/nn/xcase/realization-0/iter-0/rms/bin/export_propmaps.py index 948dec2b2..469235e62 100644 --- a/examples/s/d/nn/xcase/realization-0/iter-0/rms/bin/export_propmaps.py +++ b/examples/s/d/nn/xcase/realization-0/iter-0/rms/bin/export_propmaps.py @@ -12,11 +12,10 @@ """ from pathlib import Path +import fmu.dataio as dataio import xtgeo from fmu.config import utilities as ut -import fmu.dataio as dataio - CFG = ut.yaml_load("../../fmuconfig/output/global_variables.yml") # property attributes, the key is "pattern" and the value is generic name to be used: diff --git a/examples/s/d/nn/xcase/realization-1/iter-0/rms/bin/export_a_surface.py b/examples/s/d/nn/xcase/realization-1/iter-0/rms/bin/export_a_surface.py index 9f83c8ca0..774ff62ca 100644 --- a/examples/s/d/nn/xcase/realization-1/iter-0/rms/bin/export_a_surface.py +++ b/examples/s/d/nn/xcase/realization-1/iter-0/rms/bin/export_a_surface.py @@ -6,11 +6,10 @@ """ from pathlib import Path +import fmu.dataio as dataio import xtgeo from fmu.config import utilities as ut -import fmu.dataio as dataio - CFG = ut.yaml_load("../../fmuconfig/output/global_variables.yml") FILES = { diff --git a/examples/s/d/nn/xcase/realization-9/iter-0/rms/bin/export_a_surface.py b/examples/s/d/nn/xcase/realization-9/iter-0/rms/bin/export_a_surface.py index 9f83c8ca0..774ff62ca 100644 --- a/examples/s/d/nn/xcase/realization-9/iter-0/rms/bin/export_a_surface.py +++ b/examples/s/d/nn/xcase/realization-9/iter-0/rms/bin/export_a_surface.py @@ -6,11 +6,10 @@ """ from pathlib import Path +import fmu.dataio as dataio import xtgeo from fmu.config import utilities as ut -import fmu.dataio as dataio - CFG = ut.yaml_load("../../fmuconfig/output/global_variables.yml") FILES = { diff --git a/pyproject.toml b/pyproject.toml index 24f7c566a..211d9a1f5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,12 +30,12 @@ classifiers = [ ] dynamic = ["version"] dependencies = [ - "xtgeo>=2.16", - "PyYAML", - "pyarrow", "fmu-config>=1.1.0", - "pandas", "numpy", + "pandas", + "pyarrow", + "PyYAML", + "xtgeo>=2.16", ] [project.urls] @@ -46,64 +46,33 @@ Documentation = "https://fmu-dataio.readthedocs.io" [project.optional-dependencies] dev = [ - "black", "coverage>=4.1", - "flake8", - "isort", - "jsonschema", "hypothesis", + "jsonschema", "mypy", - "pylint", - "pytest", - "pytest-cov", "pydocstyle", - "pytest-runner", + "pytest-cov", "pytest-mock", - "termcolor", + "pytest-runner", + "pytest", "rstcheck", + "ruff", + "termcolor", ] docs = [ - "pydocstyle", - "Sphinx<7", "autoapi", - "sphinx-rtd-theme", + "pydocstyle", "sphinx-autodoc-typehints<1.23", - "sphinxcontrib-apidoc", + "sphinx-rtd-theme", "sphinx-togglebutton", + "Sphinx<7", + "sphinxcontrib-apidoc", "urllib3<1.27", ] [project.entry-points.ert] dataio_case_metadata = "fmu.dataio.scripts.create_case_metadata" - -[tool.black] -line-length = 88 -target-version = ['py38', 'py39', 'py310', 'py311'] -include = '\.pyi?$' -exclude = ''' -/( - \.eggs - | \.git - | \.hg - | \.mypy_cache - | \.tox - | \.venv - | _build - | buck-out - | build - | dist - # The following are specific to Black, you probably don't want those. - | blib2to3 - | tests/data - | profiling -)/ -''' - -[tool.isort] -profile = "black" - - [tool.setuptools_scm] write_to = "src/fmu/dataio/version.py" @@ -117,54 +86,30 @@ testpaths = "tests" markers = ["integration: marks a test as an integration test"] xfail_strict = true -[tool.mypy] -ignore_missing_imports = true - [tool.pydocstyle] convention = "google" match = '(?!(test_|_)).*\.py' -[tool.pylint.MASTER] -ignore = ["version.py", "conf.py"] - -[tool.pylint.BASIC] -additional-builtins = ["_x", "_y", "_z", "_tmp1", "_tmp2"] -good-names = ["logger", "version", "i", "j", "k", "x", "y", "z", "_"] -variable-rgx = "^[a-z_][_a-z0-9]+((_[a-z0-9]+)*)?$" -argument-rgx = "^[a-z_][_a-z0-9]+((_[a-z0-9]+)*)?$" -dummy-variables-rgx = "^_+[a-z0-9]*?$|dummy" - -[tool.pylint.TYPECHECK] -generated-members = ["np.*", "numpy.*", "pd.*", "pandas.*"] - -[tool.pylint.FORMAT] -max-line-length = 88 -max-module-lines = 3000 - -[tool.pylint.DESIGN] -max-attributes = 30 -max-args = 20 -max-locals = 30 -max-branches = 15 -max-bool-expr = 8 - -[tool.pylint.SIMILARITIES] -min-similarity-lines = 40 -ignore-comments = "yes" -ignore-docstrings = "no" -ignore-imports = "yes" - -[tool.pylint."MESSAGES CONTROL"] -disable = """ - fixme, - missing-docstring, - protected-access, - C0330, - useless-object-inheritance, - import-outside-toplevel, - import-error, - no-name-in-module, - raise-missing-from, - unspecified-encoding, - wrong-import-order - """ +[tool.ruff] +ignore = [ + "C901", +] +select = [ + "C", + "E", + "F", + "I", + "PIE", + "Q", + "RET", + "RSE", + "SIM", + "W", + # "C90", + # "NPY", + # "PD", + # "PL", +] +line-length = 88 +[tool.ruff.lint.isort] +combine-as-imports = true diff --git a/src/fmu/dataio/_design_kw.py b/src/fmu/dataio/_design_kw.py index f82f23406..0b83df652 100644 --- a/src/fmu/dataio/_design_kw.py +++ b/src/fmu/dataio/_design_kw.py @@ -81,8 +81,7 @@ def unmatched_templates(line): bracketpattern = re.compile("<.+?>") if bracketpattern.search(line): return bracketpattern.findall(line) - else: - return [] + return [] def is_comment(line): diff --git a/src/fmu/dataio/_filedata_provider.py b/src/fmu/dataio/_filedata_provider.py index 8d53dde5b..33c8d81e0 100644 --- a/src/fmu/dataio/_filedata_provider.py +++ b/src/fmu/dataio/_filedata_provider.py @@ -155,9 +155,7 @@ def _get_filestem(self): # treat norwegian special letters stem = stem.replace("æ", "ae") stem = stem.replace("ø", "oe") - stem = stem.replace("å", "aa") - - return stem + return stem.replace("å", "aa") def _get_path(self): """Construct and get the folder path(s).""" @@ -211,8 +209,7 @@ def _get_path_generic(self, mode="realization", allow_forcefolder=True, info="") "starting with '/'. This is strongly discouraged and is only " "allowed if classvariable allow_forcefolder_absolute is set to True" ) - else: - warn("Using absolute paths in forcefolder is not recommended!") + warn("Using absolute paths in forcefolder is not recommended!") # absolute if starts with "/", otherwise relative to outroot dest = Path(self.dataio.forcefolder) diff --git a/src/fmu/dataio/_fmu_provider.py b/src/fmu/dataio/_fmu_provider.py index 1440afc49..f2fad2586 100644 --- a/src/fmu/dataio/_fmu_provider.py +++ b/src/fmu/dataio/_fmu_provider.py @@ -325,7 +325,7 @@ def generate_ert2_metadata(self): "Generate ERT2 metadata continues, and real ID %s", self.real_id ) - mreal = meta["realization"] = dict() + mreal = meta["realization"] = {} mreal["id"] = self.real_id mreal["uuid"] = real_uuid mreal["name"] = self.real_name diff --git a/src/fmu/dataio/_metadata.py b/src/fmu/dataio/_metadata.py index 3ecf90fbb..988b87839 100644 --- a/src/fmu/dataio/_metadata.py +++ b/src/fmu/dataio/_metadata.py @@ -35,7 +35,7 @@ class ConfigurationError(ValueError): def default_meta_dollars() -> dict: - dollars = dict() + dollars = {} dollars["$schema"] = SCHEMA dollars["version"] = VERSION dollars["source"] = SOURCE @@ -44,7 +44,7 @@ def default_meta_dollars() -> dict: def generate_meta_tracklog() -> list: """Create the tracklog metadata, which here assumes 'created' only.""" - meta = list() + meta = [] dtime = datetime.datetime.now(timezone.utc).isoformat() user = getpass.getuser() @@ -63,7 +63,8 @@ def generate_meta_masterdata(config: dict) -> Optional[dict]: UserWarning, ) return None - elif "masterdata" not in config.keys(): + + if "masterdata" not in config: raise ValueError("A config exists, but 'masterdata' are not present.") return config["masterdata"] @@ -107,7 +108,7 @@ def generate_meta_access(config: dict) -> Optional[dict]: raise ConfigurationError("The 'access.asset' field not found in the config") # initialize and populate with defaults from config - a_meta = dict() # shortform + a_meta = {} # shortform # if there is a config, the 'asset' tag shall be present a_meta["asset"] = a_cfg["asset"] @@ -366,11 +367,10 @@ def _reuse_existing_metadata(self, meta): newmeta = meta.copy() if self.dataio.reuse_metadata_rule == "preprocessed": return glue_metadata_preprocessed(oldmeta, newmeta) - else: - raise ValueError( - f"The reuse_metadata_rule {self.dataio.reuse_metadata_rule} is not " - "supported." - ) + raise ValueError( + f"The reuse_metadata_rule {self.dataio.reuse_metadata_rule} is not " + "supported." + ) return meta def generate_export_metadata(self, skip_null=True) -> dict: # TODO! -> skip_null? @@ -410,6 +410,4 @@ def generate_export_metadata(self, skip_null=True) -> dict: # TODO! -> skip_nul if skip_null: meta = drop_nones(meta) - meta = self._reuse_existing_metadata(meta) - - return meta + return self._reuse_existing_metadata(meta) diff --git a/src/fmu/dataio/_objectdata_provider.py b/src/fmu/dataio/_objectdata_provider.py index 36ea6e51f..e40405996 100644 --- a/src/fmu/dataio/_objectdata_provider.py +++ b/src/fmu/dataio/_objectdata_provider.py @@ -157,7 +157,7 @@ def _derive_name_stratigraphy(self) -> dict: """ logger.info("Evaluate data:name attribute and stratigraphy") - result = dict() # shorter form + result = {} # shorter form name = self.dataio.name @@ -177,7 +177,7 @@ def _derive_name_stratigraphy(self) -> dict: else: logger.info("The name in strat...") result["name"] = strat[name].get("name", name) - result["alias"] = strat[name].get("alias", list()) + result["alias"] = strat[name].get("alias", []) if result["name"] != "name": result["alias"].append(name) # type: ignore result["stratigraphic"] = strat[name].get("stratigraphic", False) @@ -192,19 +192,18 @@ def _derive_name_stratigraphy(self) -> dict: @staticmethod def _validate_get_ext(fmt, subtype, validator): """Validate that fmt (file format) matches data and return legal extension.""" - if fmt not in validator.keys(): + if fmt not in validator: raise ConfigurationError( f"The file format {fmt} is not supported.", f"Valid {subtype} formats are: {list(validator.keys())}", ) - ext = validator.get(fmt, None) - return ext + return validator.get(fmt, None) def _derive_objectdata(self): """Derive object spesific data.""" logger.info("Evaluate data settings for object") - result = dict() + result = {} if isinstance(self.obj, xtgeo.RegularSurface): result["subtype"] = "RegularSurface" @@ -332,8 +331,8 @@ def _derive_spec_bbox_regularsurface(self): logger.info("Derive bbox and specs for RegularSurface") regsurf = self.obj - specs = dict() - bbox = dict() + specs = {} + bbox = {} xtgeo_specs = regsurf.metadata.required for spec, val in xtgeo_specs.items(): @@ -356,8 +355,8 @@ def _derive_spec_bbox_polygons(self): logger.info("Derive bbox and specs for Polygons") poly = self.obj - specs = dict() - bbox = dict() + specs = {} + bbox = {} # number of polygons: specs["npolys"] = np.unique(poly.dataframe[poly.pname].values).size xmin, xmax, ymin, ymax, zmin, zmax = poly.get_boundary() @@ -375,8 +374,8 @@ def _derive_spec_bbox_points(self): logger.info("Derive bbox and specs for Points") pnts = self.obj - specs = dict() - bbox = dict() + specs = {} + bbox = {} if len(pnts.dataframe.columns) > 3: attrnames = pnts.dataframe.columns[3:] @@ -397,8 +396,8 @@ def _derive_spec_bbox_cube(self): logger.info("Derive bbox and specs for Cube") cube = self.obj - specs = dict() - bbox = dict() + specs = {} + bbox = {} xtgeo_specs = cube.metadata.required for spec, val in xtgeo_specs.items(): @@ -434,8 +433,8 @@ def _derive_spec_bbox_cpgrid(self): logger.info("Derive bbox and specs for Gride (geometry)") grid = self.obj - specs = dict() - bbox = dict() + specs = {} + bbox = {} xtgeo_specs = grid.metadata.required for spec, val in xtgeo_specs.items(): @@ -458,8 +457,8 @@ def _derive_spec_bbox_cpgridproperty(self): logger.info("Derive bbox and specs for GridProperty") gridprop = self.obj - specs = dict() - bbox = dict() + specs = {} + bbox = {} specs["ncol"] = gridprop.ncol specs["nrow"] = gridprop.nrow @@ -471,8 +470,8 @@ def _derive_spec_bbox_dataframe(self): logger.info("Process data metadata for DataFrame (tables)") dfr = self.obj - specs = dict() - bbox = dict() + specs = {} + bbox = {} specs["columns"] = list(dfr.columns) specs["size"] = int(dfr.size) @@ -484,8 +483,8 @@ def _derive_spec_bbox_arrowtable(self): logger.info("Process data metadata for arrow (tables)") table = self.obj - specs = dict() - bbox = dict() + specs = {} + bbox = {} specs["columns"] = list(table.column_names) specs["size"] = table.num_columns * table.num_rows @@ -496,8 +495,8 @@ def _derive_spec_bbox_dict(self): """Process/collect the data items for dictionary.""" logger.info("Process data metadata for dictionary") - specs = dict() - bbox = dict() + specs = {} + bbox = {} return specs, bbox @@ -565,11 +564,11 @@ def _derive_timedata_legacy(self): """Format input timedata to metadata. legacy version.""" tdata = self.dataio.timedata - tresult = dict() - tresult["time"] = list() + tresult = {} + tresult["time"] = [] if len(tdata) == 1: elem = tdata[0] - tresult["time"] = list() + tresult["time"] = [] xfield = {"value": dt.strptime(str(elem[0]), "%Y%m%d").isoformat()} self.time0 = str(elem[0]) if len(elem) == 2: @@ -609,11 +608,11 @@ def _derive_timedata_newformat(self): set for those who wants it turned around). """ tdata = self.dataio.timedata - tresult = dict() + tresult = {} if len(tdata) == 1: elem = tdata[0] - tresult["t0"] = dict() + tresult["t0"] = {} xfield = {"value": dt.strptime(str(elem[0]), "%Y%m%d").isoformat()} self.time0 = str(elem[0]) if len(elem) == 2: @@ -737,7 +736,7 @@ def derive_metadata(self): for key, val in tresult.items(): meta[key] = val else: - meta["time"] = dict() + meta["time"] = {} for key, val in tresult.items(): meta["time"][key] = val diff --git a/src/fmu/dataio/_oyaml.py b/src/fmu/dataio/_oyaml.py index be12debee..90b64c130 100644 --- a/src/fmu/dataio/_oyaml.py +++ b/src/fmu/dataio/_oyaml.py @@ -34,11 +34,11 @@ def map_constructor(loader, node): del map_constructor, map_representer # cf. stackoverflow.com/questions/21695705/dump-an-python-object-as-yaml-file/51261042 -pyyaml.SafeDumper.yaml_representers[ - None -] = lambda self, data: pyyaml.representer.SafeRepresenter.represent_str( - self, - str(data), +pyyaml.SafeDumper.yaml_representers[None] = ( + lambda self, data: pyyaml.representer.SafeRepresenter.represent_str( + self, + str(data), + ) ) # Merge PyYAML namespace into ours. diff --git a/src/fmu/dataio/_utils.py b/src/fmu/dataio/_utils.py index 22c68f87a..7faabb430 100644 --- a/src/fmu/dataio/_utils.py +++ b/src/fmu/dataio/_utils.py @@ -14,6 +14,7 @@ import pandas as pd # type: ignore import yaml + from fmu.config import utilities as ut try: @@ -24,10 +25,11 @@ HAS_PYARROW = True from pyarrow import feather +import contextlib + import xtgeo # type: ignore -from . import _design_kw -from . import _oyaml as oyaml +from . import _design_kw, _oyaml as oyaml logger = logging.getLogger(__name__) @@ -138,7 +140,7 @@ def export_file(obj, filename, extension, flag=None): elif extension == ".roff" and isinstance(obj, (xtgeo.Grid, xtgeo.GridProperty)): obj.to_file(filename, fformat="roff") elif extension == ".csv" and isinstance(obj, pd.DataFrame): - includeindex = True if flag == "include_index" else False + includeindex = flag == "include_index" obj.to_csv(filename, index=includeindex) elif extension == ".arrow" and HAS_PYARROW and isinstance(obj, pa.Table): # comment taken from equinor/webviz_subsurface/smry2arrow.py @@ -253,7 +255,7 @@ def read_parameters_txt(pfile: Union[Path, str]) -> Dict[str, Union[str, float, def nested_parameters_dict( - paramdict: Dict[str, Union[str, int, float]] + paramdict: Dict[str, Union[str, int, float]], ) -> Dict[str, Union[str, int, float, Dict[str, Union[str, int, float]]]]: """Interpret a flat parameters dictionary into a nested dictionary, based on presence of colons in keys. @@ -287,16 +289,14 @@ def check_if_number(value): """Check if value (str) looks like a number and return the converted value.""" if value is None: - return + return None res = None try: res = int(value) except ValueError: - try: + with contextlib.suppress(ValueError): res = float(value) - except ValueError: - pass if res is not None: return res @@ -320,19 +320,19 @@ def get_object_name(obj): name = obj.name except AttributeError: logger.info("display.name could not be set") - return + return None if isinstance(obj, xtgeo.RegularSurface) and name == "unknown": logger.debug("Got 'unknown' as name from a surface object, returning None") - return + return None if isinstance(obj, xtgeo.Polygons) and name == "poly": logger.debug("Got 'poly' as name from a polygons object, returning None") - return + return None if isinstance(obj, xtgeo.Grid) and name == "noname": logger.debug("Got 'noname' as name from grids object, returning None") - return + return None return name @@ -387,7 +387,7 @@ def filter_validate_metadata(metadata_in: dict) -> dict: metadata = deepcopy(metadata_in) - for key in metadata_in.keys(): + for key in metadata_in: if key not in valids: del metadata[key] @@ -401,10 +401,10 @@ def generate_description(desc: Optional[Union[str, list]] = None) -> Union[list, if isinstance(desc, str): return [desc] - elif isinstance(desc, list): + if isinstance(desc, list): return desc - else: - raise ValueError("Description of wrong type, must be list of strings or string") + + raise ValueError("Description of wrong type, must be list of strings or string") def read_metadata(filename: Union[str, Path]) -> dict: @@ -428,9 +428,7 @@ def read_metadata(filename: Union[str, Path]) -> dict: if not metafilepath.exists(): raise OSError(f"Cannot find requested metafile: {metafile}") with open(metafilepath) as stream: - metacfg = yaml.safe_load(stream) - - return metacfg + return yaml.safe_load(stream) def glue_metadata_preprocessed(oldmeta, newmeta): diff --git a/src/fmu/dataio/dataio.py b/src/fmu/dataio/dataio.py index e9ed6d8d3..aa426add3 100644 --- a/src/fmu/dataio/dataio.py +++ b/src/fmu/dataio/dataio.py @@ -30,9 +30,10 @@ filter_validate_metadata, generate_description, prettyprint_dict, + read_metadata as _utils_read_metadata, + some_config_from_env, + uuid_from_string, ) -from ._utils import read_metadata as _utils_read_metadata -from ._utils import some_config_from_env, uuid_from_string INSIDE_RMS = detect_inside_rms() @@ -59,10 +60,7 @@ def _validate_variable(key, value, legals) -> bool: logger.warning("Unsupported key, raise an error") raise ValidationError(f"The input key '{key}' is not supported") - if isinstance(legals[key], str): - valid_type = eval(legals[key]) # pylint: disable=eval-used - else: - valid_type = legals[key] + valid_type = eval(legals[key]) if isinstance(legals[key], str) else legals[key] try: validcheck = valid_type.__args__ @@ -169,13 +167,12 @@ def _check_global_config( if msg: if "err" in action: raise ValueError(msg) - else: - msg += ( - "The metadata may become invalid; hence no metadata file will be made, " - "but the data item may still be exported. Note: allowing these keys to " - "be missing is a temporary solution that may change in future versions!" - ) - warnings.warn(msg, PendingDeprecationWarning) + msg += ( + "The metadata may become invalid; hence no metadata file will be made, " + "but the data item may still be exported. Note: allowing these keys to " + "be missing is a temporary solution that may change in future versions!" + ) + warnings.warn(msg, PendingDeprecationWarning) return False @@ -244,7 +241,7 @@ def _content_validate(name, fields): replace_deprecated = {} for key, dtype in fields.items(): - if key in valid.keys(): + if key in valid: wanted_type = valid[key] if not isinstance(dtype, wanted_type): raise ValidationError( @@ -280,10 +277,10 @@ def _content_validate(name, fields): logger.info("rlist is %s", rlist) logger.info("fields is %s", fields) rkey, status = rlist.pop() - logger.info("rkey not in fields.keys(): %s", str(rkey not in fields.keys())) + logger.info("rkey not in fields.keys(): %s", str(rkey not in fields)) logger.info("rkey: %s", rkey) logger.info("fields.keys(): %s", str(fields.keys())) - if rkey not in fields.keys() and status is True: + if rkey not in fields and status is True: raise ValidationError( f"The subkey <{rkey}> is required for content <{name}> ", "but is not found", @@ -698,10 +695,10 @@ def _update_check_settings(self, newsettings: dict) -> None: # derive legal input from dataclass signature annots = getattr(self, "__annotations__", {}) legals = {key: val for key, val in annots.items() if not key.startswith("_")} - if "config" in legals.keys(): + if "config" in legals: del legals["config"] # config cannot be updated - if "config" in newsettings.keys(): + if "config" in newsettings: raise ValueError("Cannot have 'config' outside instance initialization") for setting, value in newsettings.items(): @@ -722,7 +719,7 @@ def _update_globalconfig_from_settings(self): if self.access_ssdl: if "ssdl" not in self.config["access"]: - newglobals["access"]["ssdl"] = dict() + newglobals["access"]["ssdl"] = {} newglobals["access"]["ssdl"] = deepcopy(self.access_ssdl) @@ -923,8 +920,7 @@ def export(self, obj, return_symlink=False, **kwargs) -> str: if return_symlink and outfile_target: return str(outfile_target) - else: - return str(outfile) + return str(outfile) # ###################################################################################### @@ -1091,13 +1087,13 @@ def generate_metadata( # only asset, not ssdl access = _metadata.generate_meta_access(self.config) - meta["access"] = dict() + meta["access"] = {} meta["access"]["asset"] = access["asset"] - meta["fmu"] = dict() + meta["fmu"] = {} meta["fmu"]["model"] = self.config["model"] - mcase = meta["fmu"]["case"] = dict() + mcase = meta["fmu"]["case"] = {} mcase["name"] = self.casename mcase["uuid"] = str(uuid.uuid4()) @@ -1295,8 +1291,7 @@ def _construct_filename(self, template: dict) -> Tuple[Path, Path]: f"The given casepath {casepath} does not exist. " "It must exist in advance!" ) - else: - abspath = str(casepath / relpath) + abspath = str(casepath / relpath) relpath = relpath.replace(realiname + "/", "") relpath = Path(relpath) @@ -1353,7 +1348,7 @@ def _generate_aggrd_metadata( # fmu.realization shall not be used del template["fmu"]["realization"] - template["fmu"]["aggregation"] = dict() + template["fmu"]["aggregation"] = {} template["fmu"]["aggregation"]["operation"] = self.operation template["fmu"]["aggregation"]["realization_ids"] = real_ids template["fmu"]["aggregation"]["id"] = self.aggregation_id diff --git a/src/fmu/dataio/scripts/create_case_metadata.py b/src/fmu/dataio/scripts/create_case_metadata.py index 63af3b955..38cce4bab 100644 --- a/src/fmu/dataio/scripts/create_case_metadata.py +++ b/src/fmu/dataio/scripts/create_case_metadata.py @@ -122,7 +122,7 @@ def register_on_sumo(args, case_metadata_path) -> str: logger.info("Registering case on Sumo (%s)", env) else: logger.info("Sumo registration has been deactivated through arguments") - return + return None # lazy loading of Sumo dependencies from fmu.sumo.uploader import CaseOnDisk, SumoConnection @@ -146,9 +146,7 @@ def _parse_yaml(path): """Parse the global variables, return as dict""" with open(path) as stream: - data = yaml.safe_load(stream) - - return data + return yaml.safe_load(stream) def check_arguments(args): diff --git a/tests/conftest.py b/tests/conftest.py index e02f9d8ec..161462577 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -192,7 +192,7 @@ def fixture_casesetup(tmp_path_factory): @pytest.fixture(name="caseglobalconfig", scope="module", autouse=True) def fixture_caseglobalconfig(): """Create as global config for case testing.""" - gconfig = dict() + gconfig = {} gconfig["model"] = {"name": "Test", "revision": "21.0.0"} gconfig["masterdata"] = { "smda": { @@ -213,9 +213,9 @@ def fixture_caseglobalconfig(): def fixture_globalconfig1(): """Minimalistic global config variables no. 1 in ExportData class.""" - cfg = dict() + cfg = {} - cfg = dict() + cfg = {} cfg["model"] = {"name": "Test", "revision": "AUTO"} cfg["stratigraphy"] = { "TopWhatever": { @@ -343,13 +343,11 @@ def fixture_metadata_examples(): """ # hard code 0.8.0 for now - examples = { + return { path.name: _isoformat_all_datetimes(_parse_yaml(str(path))) for path in ROOTPWD.glob("schema/definitions/0.8.0/examples/*.yml") } - return examples - # ====================================================================================== # Various objects @@ -509,12 +507,10 @@ def fixture_edataobj3(globalconfig1): """Combined globalconfig and settings to instance, for internal testing""" # logger.info("Establish edataobj1") - eobj = ExportData( + return ExportData( config=globalconfig1, name="summary", content="timeseries", tagname="" ) - return eobj - @pytest.fixture(name="mock_summary") def fixture_summary(): @@ -534,8 +530,7 @@ def fixture_drogon_sum(): pa.Table: table with summary data """ path = ROOTPWD / "tests/data/drogon/tabular/summary.arrow" - table = pa.feather.read_table(path) - return table + return pa.feather.read_table(path) @pytest.fixture(name="mock_volumes") @@ -563,8 +558,7 @@ def fixture_drogon_volumes(): pa.Table: table with summary data """ path = ROOTPWD / "tests/data/drogon/tabular/geogrid--vol.csv" - table = pa.Table.from_pandas(pd.read_csv(path)) - return table + return pa.Table.from_pandas(pd.read_csv(path)) # ====================================================================================== @@ -575,9 +569,7 @@ def fixture_drogon_volumes(): def _parse_json(schema_path): """Parse the schema, return JSON""" with open(schema_path, encoding="utf-8") as stream: - data = json.load(stream) - - return data + return json.load(stream) def _parse_yaml(yaml_path): @@ -585,9 +577,7 @@ def _parse_yaml(yaml_path): with open(yaml_path, encoding="utf-8") as stream: data = yaml.safe_load(stream) - data = _isoformat_all_datetimes(data) - - return data + return _isoformat_all_datetimes(data) def _isoformat_all_datetimes(indate): @@ -597,7 +587,7 @@ def _isoformat_all_datetimes(indate): return [_isoformat_all_datetimes(i) for i in indate] if isinstance(indate, dict): - return {key: _isoformat_all_datetimes(indate[key]) for key in indate.keys()} + return {key: _isoformat_all_datetimes(indate[key]) for key in indate} if isinstance(indate, (datetime.datetime, datetime.date)): return indate.isoformat() diff --git a/tests/test_schema/test_schema_logic.py b/tests/test_schema/test_schema_logic.py index e4fe91159..8625a666b 100644 --- a/tests/test_schema/test_schema_logic.py +++ b/tests/test_schema/test_schema_logic.py @@ -4,7 +4,6 @@ import jsonschema import pytest - from fmu.dataio._definitions import ALLOWED_CONTENTS # pylint: disable=no-member diff --git a/tests/test_units/test_aggregated_surfaces.py b/tests/test_units/test_aggregated_surfaces.py index a4bac6942..421f6401d 100644 --- a/tests/test_units/test_aggregated_surfaces.py +++ b/tests/test_units/test_aggregated_surfaces.py @@ -2,11 +2,10 @@ import logging import os -import pytest -import xtgeo - import fmu.dataio._utils as utils import fmu.dataio.dataio as dataio +import pytest +import xtgeo logger = logging.getLogger(__name__) @@ -318,7 +317,8 @@ def test_regsurf_aggregated_diffdata(fmurun_w_casemetadata, rmsglobalconfig, reg os.chdir(fmurun_w_casemetadata) edata = dataio.ExportData( - config=rmsglobalconfig, content="depth" # read from global config + config=rmsglobalconfig, + content="depth", # read from global config ) aggs = [] diff --git a/tests/test_units/test_dataio.py b/tests/test_units/test_dataio.py index 9283cc0ed..91f9af812 100644 --- a/tests/test_units/test_dataio.py +++ b/tests/test_units/test_dataio.py @@ -7,7 +7,6 @@ import pytest import yaml - from fmu.dataio._utils import prettyprint_dict from fmu.dataio.dataio import ExportData, ValidationError, read_metadata @@ -338,7 +337,7 @@ def test_global_config_from_env(globalconfig_asfile): def test_settings_config_from_env(tmp_path, rmsglobalconfig, regsurf): """Testing getting user settings config from a file via env variable.""" - settings = dict() + settings = {} settings["name"] = "MyFancyName" settings["tagname"] = "MyFancyTag" settings["workflow"] = "Some work flow" @@ -362,7 +361,7 @@ def test_settings_config_from_env(tmp_path, rmsglobalconfig, regsurf): def test_settings_and_global_config_from_env(tmp_path, rmsglobalconfig, regsurf): """Testing getting user settings config ands global from a env -> file.""" - settings = dict() + settings = {} settings["name"] = "MyFancyName" settings["tagname"] = "MyFancyTag" settings["workflow"] = "Some work flow" @@ -392,7 +391,7 @@ def test_settings_and_global_config_from_env(tmp_path, rmsglobalconfig, regsurf) def test_settings_config_from_env_invalid(tmp_path, rmsglobalconfig): """Testing getting user settings config from a file but some invalid stuff.""" - settings = dict() + settings = {} settings["invalid"] = "MyFancyName" settings["workflow"] = "Some work flow" settings["config"] = rmsglobalconfig diff --git a/tests/test_units/test_dictionary.py b/tests/test_units/test_dictionary.py index c37b99934..b1db88045 100644 --- a/tests/test_units/test_dictionary.py +++ b/tests/test_units/test_dictionary.py @@ -5,7 +5,6 @@ import pytest import yaml - from fmu.dataio import ExportData from fmu.dataio._utils import nested_parameters_dict, read_parameters_txt @@ -17,8 +16,7 @@ def _fixture_simple(): Returns: dict: the dictionary created """ - simple = {"this": "is a test"} - return simple + return {"this": "is a test"} @pytest.fixture(name="json_dict", scope="function") @@ -34,8 +32,7 @@ def _fixture_json(fmurun_w_casemetadata): os.chdir(fmurun_w_casemetadata) print(fmurun_w_casemetadata) with open(fmurun_w_casemetadata / "parameters.json", encoding="utf-8") as stream: - json_dict = json.load(stream) - return json_dict + return json.load(stream) @pytest.fixture(name="simple_parameters", scope="session") diff --git a/tests/test_units/test_ert2_context.py b/tests/test_units/test_ert2_context.py index 1091d2f3f..37d080335 100644 --- a/tests/test_units/test_ert2_context.py +++ b/tests/test_units/test_ert2_context.py @@ -6,10 +6,9 @@ import os import sys +import fmu.dataio.dataio as dataio import pandas as pd import pytest - -import fmu.dataio.dataio as dataio from fmu.dataio._utils import prettyprint_dict logger = logging.getLogger(__name__) diff --git a/tests/test_units/test_filedataprovider_class.py b/tests/test_units/test_filedataprovider_class.py index 39263bfe6..f252e52f6 100644 --- a/tests/test_units/test_filedataprovider_class.py +++ b/tests/test_units/test_filedataprovider_class.py @@ -3,7 +3,6 @@ from pathlib import Path import pytest - from fmu.dataio._filedata_provider import _FileDataProvider from fmu.dataio._objectdata_provider import _ObjectDataProvider diff --git a/tests/test_units/test_fmuprovider_class.py b/tests/test_units/test_fmuprovider_class.py index 4f8fb1304..b450c966c 100644 --- a/tests/test_units/test_fmuprovider_class.py +++ b/tests/test_units/test_fmuprovider_class.py @@ -1,9 +1,8 @@ """Test the _MetaData class from the _metadata.py module""" import os -import pytest - import fmu.dataio as dio +import pytest from fmu.dataio._fmu_provider import RESTART_PATH_ENVNAME, _FmuProvider, _get_folderlist FOLDERTREE = "scratch/myfield/case/realization-13/iter-2" diff --git a/tests/test_units/test_initialize_case.py b/tests/test_units/test_initialize_case.py index a74a2a1d5..f3c15656e 100644 --- a/tests/test_units/test_initialize_case.py +++ b/tests/test_units/test_initialize_case.py @@ -8,7 +8,6 @@ import pytest import yaml - from fmu.dataio import InitializeCase from fmu.dataio._utils import prettyprint_dict diff --git a/tests/test_units/test_metadata_class.py b/tests/test_units/test_metadata_class.py index 25da3f702..2da091a38 100644 --- a/tests/test_units/test_metadata_class.py +++ b/tests/test_units/test_metadata_class.py @@ -2,10 +2,9 @@ import logging from copy import deepcopy +import fmu.dataio as dio import pytest from dateutil.parser import isoparse - -import fmu.dataio as dio from fmu.dataio._metadata import SCHEMA, SOURCE, VERSION, ConfigurationError, _MetaData from fmu.dataio._utils import prettyprint_dict diff --git a/tests/test_units/test_objectdataprovider_class.py b/tests/test_units/test_objectdataprovider_class.py index 82cec78d0..bcd0eed05 100644 --- a/tests/test_units/test_objectdataprovider_class.py +++ b/tests/test_units/test_objectdataprovider_class.py @@ -1,6 +1,5 @@ """Test the _ObjectData class from the _objectdata.py module""" import pytest - from fmu.dataio._definitions import _ValidFormats from fmu.dataio._objectdata_provider import ConfigurationError, _ObjectDataProvider diff --git a/tests/test_units/test_prerealization_surfaces.py b/tests/test_units/test_prerealization_surfaces.py index 5ddbc6679..8c429ff90 100644 --- a/tests/test_units/test_prerealization_surfaces.py +++ b/tests/test_units/test_prerealization_surfaces.py @@ -13,10 +13,9 @@ import os from pathlib import Path +import fmu.dataio.dataio as dataio import pytest from conftest import inside_rms - -import fmu.dataio.dataio as dataio from fmu.dataio import _utils as utils logger = logging.getLogger(__name__) diff --git a/tests/test_units/test_rms_context.py b/tests/test_units/test_rms_context.py index cc977f666..708f3ad46 100644 --- a/tests/test_units/test_rms_context.py +++ b/tests/test_units/test_rms_context.py @@ -6,11 +6,10 @@ import logging import os +import fmu.dataio.dataio as dataio import pandas as pd import pytest from conftest import inside_rms - -import fmu.dataio.dataio as dataio from fmu.dataio._utils import prettyprint_dict from fmu.dataio.dataio import ValidationError @@ -28,7 +27,8 @@ def test_regsurf_generate_metadata(rmssetup, rmsglobalconfig, regsurf): logger.debug(prettyprint_dict(rmsglobalconfig["access"])) edata = dataio.ExportData( - config=rmsglobalconfig, content="depth" # read from global config + config=rmsglobalconfig, + content="depth", # read from global config ) logger.info("Inside RMS status now %s", dataio.ExportData._inside_rms) diff --git a/tests/test_units/test_table.py b/tests/test_units/test_table.py index 691ed0809..5d4f05bdb 100644 --- a/tests/test_units/test_table.py +++ b/tests/test_units/test_table.py @@ -5,7 +5,6 @@ import pyarrow as pa import pytest from fmu.config.utilities import yaml_load - from fmu.dataio import ExportData from fmu.dataio._objectdata_provider import _ObjectDataProvider @@ -45,10 +44,7 @@ def assert_correct_table_index(dict_input, answer): answer (list): expected answer """ index_name = "table_index" - if isinstance(dict_input, dict): - meta = dict_input - else: - meta = _read_dict(dict_input) + meta = dict_input if isinstance(dict_input, dict) else _read_dict(dict_input) index = meta["data"][index_name] assert_list_and_answer(index, answer, index) diff --git a/tests/test_units/test_utils.py b/tests/test_units/test_utils.py index bb77702e0..b5fa80302 100644 --- a/tests/test_units/test_utils.py +++ b/tests/test_units/test_utils.py @@ -1,7 +1,6 @@ """Test the utils module""" import pytest - from fmu.dataio import _utils as utils