Skip to content

Commit

Permalink
Fix for avoiding serialization error when having datetimes larger tha…
Browse files Browse the repository at this point in the history
…n 2263
  • Loading branch information
frode-aarstad committed Nov 24, 2023
1 parent 799b6bb commit 7fa7d4c
Show file tree
Hide file tree
Showing 4 changed files with 43 additions and 1 deletion.
1 change: 1 addition & 0 deletions src/ert/config/summary_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ def read_from_file(self, run_path: str, iens: int) -> xr.Dataset:
summary_data.sort(key=lambda x: x[0])
data = [d for _, d in summary_data]
keys = [k for k, _ in summary_data]
time_map = [datetime.isoformat(t) for t in time_map]
ds = xr.Dataset(
{"values": (["name", "time"], data)},
coords={"time": time_map, "name": keys},
Expand Down
4 changes: 3 additions & 1 deletion src/ert/libres_facade.py
Original file line number Diff line number Diff line change
Expand Up @@ -411,7 +411,9 @@ def load_all_summary_data(
summary_keys = ensemble.get_summary_keyset()

try:
df = ensemble.load_responses("summary", tuple(realizations)).to_dataframe()
df = ensemble.load_responses("summary", tuple(realizations)).to_dataframe(
{"time", "name", "realization"}

Check failure on line 415 in src/ert/libres_facade.py

View workflow job for this annotation

GitHub Actions / type-checking (3.11)

Argument 1 to "to_dataframe" of "Dataset" has incompatible type "set[str]"; expected "Sequence[Hashable] | None"
)
except (ValueError, KeyError):
return pd.DataFrame()
df = df.unstack(level="name")
Expand Down
7 changes: 7 additions & 0 deletions src/ert/storage/local_ensemble.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,8 +217,15 @@ def load_responses(
if not input_path.exists():
raise KeyError(f"No response for key {key}, realization: {realization}")
ds = xr.open_dataset(input_path, engine="scipy")
if "time" in ds.coords:
ds.coords["time"] = [
datetime.fromisoformat(str(e.values).split(".", maxsplit=1)[0])
for e in ds.coords["time"]
]

loaded.append(ds)
response = xr.combine_nested(loaded, concat_dim="realization")

assert isinstance(response, xr.Dataset)
return response

Expand Down
32 changes: 32 additions & 0 deletions tests/unit_tests/test_load_forward_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,38 @@ def test_load_inconsistent_time_map_summary(caplog):
assert loaded == 1


@pytest.mark.usefixtures("copy_snake_oil_case_storage")
def test_datetime_2500():
"""
Test that we are able to work with dates past year 2263 in summary files.
"""
cwd = os.getcwd()

# Get rid of GEN_DATA as we are only interested in SUMMARY
with fileinput.input("snake_oil.ert", inplace=True) as fin:
for line in fin:
if line.startswith("GEN_DATA"):
continue
print(line, end="")

facade = LibresFacade.from_config_file("snake_oil.ert")
realisation_number = 0
storage = open_storage(facade.enspath, mode="w")
ensemble = storage.get_ensemble_by_name("default_0")

# Create a result that is incompatible with the refcase
run_path = Path("storage") / "snake_oil" / "runpath" / "realization-0" / "iter-0"
os.chdir(run_path)
ecl_sum = run_simulator(100, datetime(2500, 1, 1))
ecl_sum.fwrite()
os.chdir(cwd)

realizations = [False] * facade.get_ensemble_size()
realizations[realisation_number] = True
facade.load_from_forward_model(ensemble, realizations, 0)


@pytest.mark.usefixtures("copy_snake_oil_case_storage")
def test_load_forward_model(snake_oil_default_storage):
"""
Expand Down

0 comments on commit 7fa7d4c

Please sign in to comment.