Skip to content

Commit

Permalink
CLN: Deprecate reuse_metadata_rule argument (#568)
Browse files Browse the repository at this point in the history
  • Loading branch information
tnatt authored Apr 3, 2024
1 parent f26948d commit d0fcdbb
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 32 deletions.
20 changes: 7 additions & 13 deletions src/fmu/dataio/_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,10 +139,9 @@ class MetaData:
def __post_init__(self) -> None:
logger.info("Initialize _MetaData instance.")

# one special case is that obj is a file path, and dataio.reuse_metadata_rule is
# active. In this case we read the existing metadata here and reuse parts
# according to rule described in string self.reuse_metadata_rule!
if isinstance(self.obj, (str, Path)) and self.dataio.reuse_metadata_rule:
# one special case is that obj is a file path.
# In this case we read the existing metadata here and reuse parts
if isinstance(self.obj, (str, Path)) and self.dataio._reuse_metadata:
logger.info("Partially reuse existing metadata from %s", self.obj)
self.meta_existing = read_metadata_from_file(self.obj)

Expand Down Expand Up @@ -300,15 +299,10 @@ def _populate_meta_xpreprocessed(self) -> None:
self.meta_xpreprocessed["subfolder"] = self.dataio.subfolder

def _reuse_existing_metadata(self, meta: dict) -> dict:
"""Perform a merge procedure if the key `reuse_metadata_rule` is active."""
if self.dataio and self.dataio.reuse_metadata_rule:
oldmeta = self.meta_existing
newmeta = meta.copy()
if self.dataio.reuse_metadata_rule == "preprocessed":
return glue_metadata_preprocessed(oldmeta, newmeta)
raise ValueError(
f"The reuse_metadata_rule {self.dataio.reuse_metadata_rule} is not "
"supported."
"""Perform a merge procedure if input is a file i.e. `_reuse_metadata=True`"""
if self.dataio._reuse_metadata:
return glue_metadata_preprocessed(
oldmeta=self.meta_existing, newmeta=meta.copy()
)
return meta

Expand Down
27 changes: 12 additions & 15 deletions src/fmu/dataio/dataio.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,8 +279,7 @@ class ExportData:
"preprocessed" folder instead, and metadata will be partially re-used in
an ERT model run. If a non-FMU run is detected (e.g. you run from project),
fmu-dataio will detect that and set actual context to None as fall-back
(unless preprocessed is specified). If value is "preprocessed", see also
``reuse_metadata`` key.
(unless preprocessed is specified).
description: A multiline description of the data either as a string or a list
of strings.
Expand Down Expand Up @@ -323,11 +322,6 @@ class ExportData:
detected automatically from the FMU run. Can be used to override in rare
cases. If so, numbers must be >= 0
reuse_metadata_rule: This input is None or a string describing rule for reusing
metadata. Default is None, but if the input is a file string or object with
already valid metadata, then it is assumed to be "preprocessed", which
merges the metadata after predefined rules.
runpath: TODO! Optional and deprecated. The relative location of the current run
root. Optional and will in most cases be auto-detected, assuming that FMU
folder conventions are followed. For an ERT run e.g.
Expand Down Expand Up @@ -427,7 +421,7 @@ class ExportData:
undef_is_zero: bool = False
parent: str = ""
realization: int = -999
reuse_metadata_rule: Optional[str] = None
reuse_metadata_rule: Optional[str] = None # deprecated
runpath: Optional[Union[str, Path]] = None
subfolder: str = ""
tagname: str = ""
Expand All @@ -447,11 +441,18 @@ class ExportData:
_pwd: Path = field(default_factory=Path, init=False)
_config_is_valid: bool = field(default=True, init=False)
_fmurun: bool = field(default=False, init=False)
_reuse_metadata: bool = field(default=False, init=False)

# << NB! storing ACTUAL casepath:
_rootpath: Path = field(default_factory=Path, init=False)

def __post_init__(self) -> None:
if self.reuse_metadata_rule:
warn(
"The 'reuse_metadata_rule' key is deprecated and has no effect. "
"Please remove it from the argument list.",
UserWarning,
)
if self.verbosity != "DEPRECATED":
warn(
"Using the 'verbosity' key is now deprecated and will have no "
Expand Down Expand Up @@ -627,12 +628,11 @@ def _check_obj_if_file(self, obj: types.Inferrable) -> types.Inferrable:
"""

if isinstance(obj, (str, Path)):
if isinstance(obj, str):
obj = Path(obj)
obj = Path(obj)
if not obj.exists():
raise ValidationError(f"The file {obj} does not exist.")
if not self.reuse_metadata_rule:
self.reuse_metadata_rule = "preprocessed"

self._reuse_metadata = True

currentmeta = read_metadata(obj)
if "_preprocessed" not in currentmeta:
Expand Down Expand Up @@ -671,9 +671,6 @@ def generate_metadata(
Examples of such known types are XTGeo objects (e.g. a RegularSurface),
a Pandas Dataframe, a PyArrow table, etc.
If the key ``reuse_metadata_rule`` is applied with legal value, the object may
also be a reference to a file with existing metadata which then will be re-used.
Args:
obj: XTGeo instance, a Pandas Dataframe instance or other supported object.
compute_md5: If True, compute a MD5 checksum for the exported file.
Expand Down
5 changes: 1 addition & 4 deletions src/fmu/dataio/providers/objectdata/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,10 +194,7 @@ def _process_content(self) -> tuple[str | dict, dict | None]:
# content == "unset" is not wanted, but in case metadata has been produced while
# doing a preprocessing step first, and this step is re-using metadata, the
# check is not done.
if self.dataio._usecontent == "unset" and (
self.dataio.reuse_metadata_rule is None
or self.dataio.reuse_metadata_rule != "preprocessed"
):
if self.dataio._usecontent == "unset" and not self.dataio._reuse_metadata:
allowed_fields = ", ".join(AllowedContent.model_fields.keys())
warn(
"The <content> is not provided which defaults to 'unset'. "
Expand Down

0 comments on commit d0fcdbb

Please sign in to comment.