From 2f421a9c0e81d282ce57bcda5e56c70a6d85f5a6 Mon Sep 17 00:00:00 2001 From: Gediminas Kirsanskas Date: Wed, 13 Dec 2023 09:28:30 +0100 Subject: [PATCH 1/5] Initial implementation of merging LTS event statistics results. --- mikeio1d/res1d.py | 16 ++ mikeio1d/result_reader_writer/__init__.py | 2 + .../result_reader_writer/result_merger.py | 43 +++ tests/test_res1d_lts_events.py | 46 +++- .../DHI.Mike1D.MikeIO.csproj | 9 + util/DHI.Mike1D.MikeIO/DataEntry.cs | 62 ++++- util/DHI.Mike1D.MikeIO/LTSResultEvents.cs | 45 ++++ util/DHI.Mike1D.MikeIO/LTSResultMerger.cs | 250 ++++++++++++++++++ util/DHI.Mike1D.MikeIO/ResultDataCopier.cs | 14 +- .../DHI.Mike1D.MikeIO/ResultDataExtensions.cs | 25 ++ 10 files changed, 499 insertions(+), 13 deletions(-) create mode 100644 mikeio1d/result_reader_writer/result_merger.py create mode 100644 util/DHI.Mike1D.MikeIO/LTSResultEvents.cs create mode 100644 util/DHI.Mike1D.MikeIO/LTSResultMerger.cs create mode 100644 util/DHI.Mike1D.MikeIO/ResultDataExtensions.cs diff --git a/mikeio1d/res1d.py b/mikeio1d/res1d.py index a0fb5f5b..b9d7c14b 100644 --- a/mikeio1d/res1d.py +++ b/mikeio1d/res1d.py @@ -8,6 +8,7 @@ from .result_extractor import ExtractorOutputFileType from .result_network import ResultNetwork from .result_network import ResultCatchment +from .result_reader_writer import ResultMerger from .result_reader_writer import ResultReaderCreator from .result_reader_writer import ResultReaderType from .result_reader_writer import ResultWriter @@ -347,3 +348,18 @@ def to_dfs0(self, file_path, queries=None, time_step_skipping_number=1): def to_txt(self, file_path, queries=None, time_step_skipping_number=1): """Extract to txt file.""" self.extract(file_path, queries, time_step_skipping_number, ExtractorOutputFileType.TXT) + + @staticmethod + def merge(file_names, merged_file_name): + """ + Merges res1d files. + + Parameters + ---------- + file_names : list of str + List of res1d file names to merge. + merged_file_name : str + File name of the res1d file to store the merged data. + """ + result_merger = ResultMerger(file_names) + result_merger.merge(merged_file_name) diff --git a/mikeio1d/result_reader_writer/__init__.py b/mikeio1d/result_reader_writer/__init__.py index 2b03e08c..05b41de5 100644 --- a/mikeio1d/result_reader_writer/__init__.py +++ b/mikeio1d/result_reader_writer/__init__.py @@ -5,3 +5,5 @@ from .result_reader_copier import ResultReaderCopier from .result_reader_query import ResultReaderQuery from .result_writer import ResultWriter + +from .result_merger import ResultMerger diff --git a/mikeio1d/result_reader_writer/result_merger.py b/mikeio1d/result_reader_writer/result_merger.py new file mode 100644 index 00000000..4878d10e --- /dev/null +++ b/mikeio1d/result_reader_writer/result_merger.py @@ -0,0 +1,43 @@ +from System.Collections.Generic import List +from System import String + +from DHI.Mike1D.MikeIO import LTSResultMerger + + +class ResultMerger: + """ + Wrapper class for merging res1d result files. + + Parameters + ---------- + file_names : list of str + List of res1d file names to merge. + + """ + + def __init__(self, file_names): + self.file_names = file_names + self.result_data_merged = None + + def merge(self, merged_file_name): + """ + Merges the data from in file_names to a file + specified by merged_file_name. + + Parameters + ---------- + merged_file_name : str + File name of the res1d file to store the merged data. + """ + if self.result_data_merged is None: + file_names_dotnet = self._get_file_name_dotnet() + self.result_data_merged = LTSResultMerger.Merge(file_names_dotnet) + + self.result_data_merged.Connection.FilePath.Path = merged_file_name + self.result_data_merged.Save() + + def _get_file_name_dotnet(self): + file_names_dotnet = List[String]() + for file_name in self.file_names: + file_names_dotnet.Add(file_name) + return file_names_dotnet diff --git a/tests/test_res1d_lts_events.py b/tests/test_res1d_lts_events.py index 79b1a7cf..40555266 100644 --- a/tests/test_res1d_lts_events.py +++ b/tests/test_res1d_lts_events.py @@ -138,7 +138,10 @@ def test_read_reach(test_file, quantity, reach_id, chainage, expected_max): @pytest.mark.parametrize( "quantity,node_id,expected_max", - [("WaterLevelMaximum", "B4.1320", 17.511), ("WaterLevelMaximum", "B4.1480", 16.957)], + [ + ("WaterLevelMaximum", "B4.1320", 17.511), + ("WaterLevelMaximum", "B4.1480", 16.957), + ], ) def test_read_node(test_file, quantity, node_id, expected_max): data = test_file.query.GetNodeValues(node_id, quantity) @@ -211,3 +214,44 @@ def test_res1d_filter_readall(test_file_path, helpers): # Release the .NET object res1d = None + + +def test_res1d_merging(test_file_path): + # Use the same file twice to create a merged LTS statistics file + file_names = [test_file_path, test_file_path] + merged_file_name = test_file_path.replace(".res1d", ".merged.res1d") + Res1D.merge(file_names, merged_file_name) + + # Read the merged file + res1d = Res1D(merged_file_name) + + # Test one node location for particular values + df_node = res1d.nodes.B4_1320.WaterLevelMaximum.read() + b4_1320_event1 = float(df_node.iloc[0]) + b4_1320_event2 = float(df_node.iloc[1]) + assert b4_1320_event1 == b4_1320_event2 + assert pytest.approx(np.round(b4_1320_event1, 3)) == 17.511 + + df_node_time = res1d.nodes.B4_1320.WaterLevelMaximumTime.read() + b4_1320_time1 = df_node_time.iloc[0] + b4_1320_time2 = df_node_time.iloc[1] + assert (b4_1320_time1 == b4_1320_time2).values[0] + + # Test one reach location for particular values + df_reach = res1d.reaches.B4_1491l1.m_216.DischargeMaximum.read() + b4_1491l1_event1 = float(df_reach.iloc[0]) + b4_1491l1_event2 = float(df_reach.iloc[1]) + assert b4_1491l1_event1 == b4_1491l1_event2 + assert pytest.approx(np.round(b4_1491l1_event1, 3)) == 0.151 + + df_reach_time = res1d.reaches.B4_1491l1.m_216.DischargeMaximumTime.read() + b4_1491l1_time1 = df_reach_time.iloc[0] + b4_1491l1_time2 = df_reach_time.iloc[1] + assert (b4_1491l1_time1 == b4_1491l1_time2).values[0] + + # Validate all merged events. Every event now needs to appear twice. + df = res1d.read_all() + # TODO: Maybe it is possible to vectorize this check. + for col in df: + for i in range(0, len(df[col]), 2): + assert df[col][i] == df[col][i + 1] diff --git a/util/DHI.Mike1D.MikeIO/DHI.Mike1D.MikeIO.csproj b/util/DHI.Mike1D.MikeIO/DHI.Mike1D.MikeIO.csproj index 094ad7ff..21af6e9e 100644 --- a/util/DHI.Mike1D.MikeIO/DHI.Mike1D.MikeIO.csproj +++ b/util/DHI.Mike1D.MikeIO/DHI.Mike1D.MikeIO.csproj @@ -5,6 +5,15 @@ + + ..\..\mikeio1d\bin\DHI.Generic.MikeZero.EUM.dll + + + ..\..\mikeio1d\bin\DHI.corlib.dll + + + ..\..\mikeio1d\bin\DHI.Mike1D.Generic.dll + ..\..\mikeio1d\bin\DHI.Mike1D.ResultDataAccess.dll diff --git a/util/DHI.Mike1D.MikeIO/DataEntry.cs b/util/DHI.Mike1D.MikeIO/DataEntry.cs index 9b61af5d..f2d87fb1 100644 --- a/util/DHI.Mike1D.MikeIO/DataEntry.cs +++ b/util/DHI.Mike1D.MikeIO/DataEntry.cs @@ -1,3 +1,5 @@ +using System; +using DHI.Mike1D.Generic; using DHI.Mike1D.ResultDataAccess; namespace DHI.Mike1D.MikeIO @@ -7,6 +9,8 @@ namespace DHI.Mike1D.MikeIO /// public class DataEntry { + public DataEntryId EntryId { get; set; } + /// public IDataItem DataItem { get; set; } @@ -15,11 +19,67 @@ public class DataEntry /// public int ElementIndex { get; set; } - /// + /// public DataEntry(IDataItem dataItem, int elementIndex) { DataItem = dataItem; ElementIndex = elementIndex; + EntryId = new DataEntryId( + dataItem.Quantity.Id, + dataItem.ItemTypeGroup, + dataItem.NumberWithinGroup, + ElementIndex); + } + + /// + /// Sets value for the data entry at a given time step index. + /// + /// The data item time data is expanded if the index larger than + /// the number of time steps. + /// + /// + public void SetValue(int timeStepIndex, double value) + { + int numberOfTimeSteps = DataItem.TimeData.NumberOfTimeSteps; + if (numberOfTimeSteps <= timeStepIndex) + ExpandTimeData(timeStepIndex - numberOfTimeSteps + 1); + + DataItem.TimeData.SetValue(timeStepIndex, ElementIndex, (float) value); + } + + /// + /// Expands time data by given expansion size. + /// + public void ExpandTimeData(int expansionSize = 1) + { + var elementDeleteValues = new float[DataItem.NumberOfElements]; + for (int i = 0; i < DataItem.NumberOfElements; i++) + elementDeleteValues[i] = (float) Constants.DOUBLE_DELETE_VALUE; + + for (int i = 0; i < expansionSize; i++) + DataItem.TimeData.Add(elementDeleteValues); + } + } + + /// + /// Tuple ID for a DataEntry + /// + public class DataEntryId : Tuple + { + /// + public DataEntryId( + string quantityId, + ItemTypeGroup itemTypeGroup, + int numberWithinGroup, + int elementIndex) : base(quantityId, itemTypeGroup, numberWithinGroup, elementIndex) + { + } + + /// + public DataEntryId( + string quantityId, + DataEntryId entryId) : base(quantityId, entryId.Item2, entryId.Item3, entryId.Item4) + { } } } diff --git a/util/DHI.Mike1D.MikeIO/LTSResultEvents.cs b/util/DHI.Mike1D.MikeIO/LTSResultEvents.cs new file mode 100644 index 00000000..0739af03 --- /dev/null +++ b/util/DHI.Mike1D.MikeIO/LTSResultEvents.cs @@ -0,0 +1,45 @@ +using System.Collections.Generic; + +namespace DHI.Mike1D.MikeIO +{ + /// + /// List of LTS events stored in result files. + /// + public class LTSResultEvents : List + { + /// + /// Sort the event list on the first value of each event + /// + public void SortOnValue() + { + Sort(CompareValue); + } + + /// + /// Sort on first value, and if the same, then on time. + /// + public int CompareValue(LTSResultEvent e1, LTSResultEvent e2) + { + int cvalue = e2.Value.CompareTo(e1.Value); + if (cvalue == 0) + cvalue = e1.Time.CompareTo(e2.Time); + return cvalue; + } + } + + /// + /// LTS event. + /// + public class LTSResultEvent + { + /// + /// Value of the LTS event. + /// + public double Value; + + /// + /// Time of the LTS event. + /// + public double Time; + } +} diff --git a/util/DHI.Mike1D.MikeIO/LTSResultMerger.cs b/util/DHI.Mike1D.MikeIO/LTSResultMerger.cs new file mode 100644 index 00000000..d42c226d --- /dev/null +++ b/util/DHI.Mike1D.MikeIO/LTSResultMerger.cs @@ -0,0 +1,250 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using DHI.Generic.MikeZero; +using DHI.Mike1D.Generic; +using DHI.Mike1D.ResultDataAccess; + +namespace DHI.Mike1D.MikeIO +{ + /// + /// Class for merging Long Term Statistics (LTS) result files. + /// + public class LTSResultMerger + { + /// + /// Instances of ResultData which will be merged. + /// + private IList _resultDataCollection; + + /// + /// Result data, where the merged results will be stored. + /// + private ResultData _resultData; + + /// + /// Data entries corresponding to the . + /// + private List _dataEntries; + + /// + /// Map from data entry ID to actual data entry. + /// + private Dictionary _mapIdToDataEntry; + + /// + /// Map from data entry ID to list of LTS result events. + /// + private Dictionary _mapIdToResultEvents; + + /// + public LTSResultMerger(IList resultDataCollection) + { + _resultDataCollection = resultDataCollection; + _resultData = _resultDataCollection.First(); + _dataEntries = _resultData.GetAllDataEntries(); + } + + #region Static Merge methods + + /// + /// Merge result files given by their file names + /// + public static ResultData Merge(IList resultFileNames) + { + var resultFilePaths = resultFileNames.Select(name => new FilePath(name)).ToList(); + return Merge(resultFilePaths); + } + + /// + /// Merge result files given by their FilePath specification. + /// + public static ResultData Merge(IList resultFilePaths) + { + var resultData = resultFilePaths.Select(path => LoadFile(path.FullFilePath)).ToList(); + return Merge(resultData); + } + + /// + /// Merge result files given by their ResultData specification. + /// + public static ResultData Merge(IList resultDataCollection) + { + var merger = new LTSResultMerger(resultDataCollection); + return merger.Merge(); + } + + /// + /// Loads a file based on the filename. + /// + private static ResultData LoadFile(string fileName) + { + var res = new ResultData(); + res.Connection = Connection.Create(fileName); + + var diagnostics = new Diagnostics("LTS result merging"); + res.Load(diagnostics); + + return res; + } + + #endregion Static Merge methods + + /// + /// Performs the actual merging of result files. + /// + public ResultData Merge() + { + CreateMaps(); + MergeDataEntries(); + SortResultEvents(); + UpdateTimesList(); + UpdateResultData(); + + return _resultData; + } + + #region CreateMaps + + private void CreateMaps() + { + _mapIdToDataEntry = CreateMapIdToDataEntry(_dataEntries); + _mapIdToResultEvents = CreateMapIdToResultEvents(_dataEntries); + } + + private static Dictionary CreateMapIdToDataEntry(List dataEntries) + { + var mapIdToDataEntry = dataEntries.ToDictionary(dataEntry => dataEntry.EntryId); + return mapIdToDataEntry; + } + + private static Dictionary CreateMapIdToResultEvents(List dataEntries) + { + var mapIdToResultEvents = dataEntries.ToDictionary(dataEntry => dataEntry.EntryId, dataEntry => new LTSResultEvents()); + return mapIdToResultEvents; + } + + #endregion CreateMaps + + #region MergeDataEntries + + /// + /// Merges data entries, which means that the full LTS event result list + /// is created from all the specified res1d files. + /// + private void MergeDataEntries() + { + foreach (var resultData in _resultDataCollection) + { + var dataEntries = resultData.GetAllDataEntries(); + var mapIdToDataEntry = CreateMapIdToDataEntry(dataEntries); + foreach (var dataEntry in dataEntries) + MergeDataEntry(dataEntry, mapIdToDataEntry); + } + } + + private void MergeDataEntry(DataEntry dataEntry, Dictionary mapIdToDataEntry) + { + var dataItem = dataEntry.DataItem; + bool isTimeQuantity = dataItem.Quantity.Description.Contains(", Time"); + if (isTimeQuantity) + return; + + var dataEntryTime = GetDataEntryForTimeQuantity(dataEntry, mapIdToDataEntry); + var dataItemTime = dataEntryTime.DataItem; + // TODO: Consider if the case of no Time quantity should be included + if (dataItemTime == null) + return; + + var ltsResultEvents = _mapIdToResultEvents[dataEntry.EntryId]; + int elementIndex = dataEntry.ElementIndex; + for (int j = 0; j < dataItem.NumberOfTimeSteps; j++) + { + + var ltsResultEvent = new LTSResultEvent + { + Value = dataItem.GetValue(j, elementIndex), + Time = dataItemTime.GetValue(j, elementIndex) + }; + ltsResultEvents.Add(ltsResultEvent); + } + } + + private static DataEntry GetDataEntryForTimeQuantity(DataEntry dataEntry, Dictionary mapIdToDataEntry) + { + var quantity = dataEntry.DataItem.Quantity; + var quantityTime = Create(quantity, "Time"); + var entryId = dataEntry.EntryId; + var entryIdTime = new DataEntryId(quantityTime.Id, entryId); + var dataEntryTime = mapIdToDataEntry[entryIdTime]; + return dataEntryTime; + } + + /// + /// Create a quantity with "extra" string added to Id and description. + /// + private static IQuantity Create(IQuantity quantity, string extra, eumItem item = eumItem.eumITimeScale, eumUnit? unit = null) + { + var ex = new ExtraForQuantities(extra); + string id = quantity.Id + ex.ExtraForId; + string description = quantity.Description + ex.ExtraForDescription; + + var extraQuantity = unit == null + ? new Quantity(id, description, item) + : new Quantity(id, description, item, unit.Value); + + return extraQuantity; + } + + #endregion + + private void SortResultEvents() + { + _mapIdToResultEvents.Values.ToList().ForEach(x => x.SortOnValue()); + } + + #region UpdateTimesList + + private void UpdateTimesList() + { + var numberOfEventsEnumerable = _mapIdToResultEvents.Values.ToList().Select(x => x.Count); + int largestNumberOfEvents = numberOfEventsEnumerable.Max(); + _resultData.TimesList = GetTimesListForEventResults(largestNumberOfEvents); + } + + private static IListDateTimes GetTimesListForEventResults(int largestNumberOfEvents) + { + var timesList = new ListDateTimes(); + var startLabel = new DateTime(100, 1, 1); + for (int i = 0; i < largestNumberOfEvents; i++) + { + var eventLabel = startLabel.AddSeconds(i); + timesList.Add(eventLabel); + } + return timesList; + } + + #endregion UpdateTimesList + + private void UpdateResultData() + { + foreach (var mapIdToResultEvent in _mapIdToResultEvents) + { + var entryId = mapIdToResultEvent.Key; + var ltsResultEvents = mapIdToResultEvent.Value; + if (ltsResultEvents.Count == 0) + continue; + + var dataEntry = _mapIdToDataEntry[entryId]; + var dataEntryTime = GetDataEntryForTimeQuantity(dataEntry, _mapIdToDataEntry); + + for (int i = 0; i < ltsResultEvents.Count; i++) + { + var ltsResultEvent = ltsResultEvents[i]; + dataEntry.SetValue(i, ltsResultEvent.Value); + dataEntryTime.SetValue(i, ltsResultEvent.Time); + } + } + } + } +} diff --git a/util/DHI.Mike1D.MikeIO/ResultDataCopier.cs b/util/DHI.Mike1D.MikeIO/ResultDataCopier.cs index dcbaadc2..687c0069 100644 --- a/util/DHI.Mike1D.MikeIO/ResultDataCopier.cs +++ b/util/DHI.Mike1D.MikeIO/ResultDataCopier.cs @@ -1,6 +1,5 @@ using System; using System.Collections.Generic; -using System.Linq; using System.Runtime.InteropServices; using DHI.Mike1D.ResultDataAccess; @@ -18,7 +17,7 @@ public class ResultDataCopier public ResultData ResultData { get => _resultData; set => _resultData = value; } private ResultData _resultData; - /// + /// public ResultDataCopier(ResultData resultData) { _resultData = resultData; @@ -41,17 +40,10 @@ public void CopyData(IntPtr intPointer) CopyData(intPointer, dataEntries); } - /// - /// Creates a list of all data entries corresponding to all data items. - /// + /// public List GetAllDataEntries() { - var dataEntries = _resultData.DataSets - .SelectMany(dataSet => dataSet.DataItems) - .SelectMany(dataItem => Enumerable.Range(0, dataItem.NumberOfElements) - .Select(elementIndex => new DataEntry(dataItem, elementIndex))) - .ToList(); - return dataEntries; + return _resultData.GetAllDataEntries(); } /// diff --git a/util/DHI.Mike1D.MikeIO/ResultDataExtensions.cs b/util/DHI.Mike1D.MikeIO/ResultDataExtensions.cs new file mode 100644 index 00000000..a7bbe0b7 --- /dev/null +++ b/util/DHI.Mike1D.MikeIO/ResultDataExtensions.cs @@ -0,0 +1,25 @@ +using System.Collections.Generic; +using System.Linq; +using DHI.Mike1D.ResultDataAccess; + +namespace DHI.Mike1D.MikeIO +{ + /// + /// Extension methods for ResultData. + /// + public static class ResultDataExtensions + { + /// + /// Creates a list of all data entries corresponding to all data items. + /// + public static List GetAllDataEntries(this ResultData resultData) + { + var dataEntries = resultData.DataSets + .SelectMany(dataSet => dataSet.DataItems) + .SelectMany(dataItem => Enumerable.Range(0, dataItem.NumberOfElements) + .Select(elementIndex => new DataEntry(dataItem, elementIndex))) + .ToList(); + return dataEntries; + } + } +} From f88ca035cf25e231d4aa043a48e7170593ddfdec Mon Sep 17 00:00:00 2001 From: Gediminas Kirsanskas Date: Thu, 18 Jan 2024 12:06:44 +0100 Subject: [PATCH 2/5] Initial implementation of merging LTS periodic statistics results. --- tests/test_res1d_lts_chronological.py | 25 +++ tests/test_res1d_lts_events.py | 10 +- tests/test_res1d_network_river.py | 2 +- util/DHI.Mike1D.MikeIO/LTSResultEvents.cs | 30 +++ util/DHI.Mike1D.MikeIO/LTSResultMerger.cs | 171 +++++++++--------- .../LTSResultMergerExtreme.cs | 120 ++++++++++++ .../LTSResultMergerPeriodic.cs | 131 ++++++++++++++ 7 files changed, 397 insertions(+), 92 deletions(-) create mode 100644 util/DHI.Mike1D.MikeIO/LTSResultMergerExtreme.cs create mode 100644 util/DHI.Mike1D.MikeIO/LTSResultMergerPeriodic.cs diff --git a/tests/test_res1d_lts_chronological.py b/tests/test_res1d_lts_chronological.py index fcf0f722..0667b99f 100644 --- a/tests/test_res1d_lts_chronological.py +++ b/tests/test_res1d_lts_chronological.py @@ -249,3 +249,28 @@ def test_global_data_attributes(test_file): actual_max = round(df["DischargeIntegratedMonthlyTotalOutflow"].max(), 3) assert pytest.approx(actual_max) == 5971.352 + + +def test_res1d_merging_same_file(test_file_path): + # Use the same file twice to create a merged LTS statistics file + file_names = [test_file_path, test_file_path] + merged_file_name = test_file_path.replace(".res1d", ".merged.res1d") + Res1D.merge(file_names, merged_file_name) + + # Read the merged file + res1d = Res1D(merged_file_name) + + # Test one reach location for particular values + df_reach = res1d.reaches.B4_1320l1.m_101_251.DischargeIntegratedMonthly.read() + assert pytest.approx(np.round(df_reach.max(), 3)) == 2 * 1215.915 + + df_reach_count = res1d.reaches.B4_1320l1.m_101_251.DischargeIntegratedMonthlyCount.read() + assert pytest.approx(np.round(df_reach_count.max(), 3)) == 2 * 3 + + df_reach_duration = res1d.reaches.B4_1320l1.m_101_251.DischargeIntegratedMonthlyDuration.read() + assert pytest.approx(np.round(df_reach_duration.max(), 3)) == 2 * 10.703 + + res1d_ori = Res1D(test_file_path) + df_ori = res1d_ori.read() + df_merged = res1d.read() + pd.testing.assert_frame_equal(2 * df_ori, df_merged) diff --git a/tests/test_res1d_lts_events.py b/tests/test_res1d_lts_events.py index 40555266..12ec3baa 100644 --- a/tests/test_res1d_lts_events.py +++ b/tests/test_res1d_lts_events.py @@ -216,7 +216,7 @@ def test_res1d_filter_readall(test_file_path, helpers): res1d = None -def test_res1d_merging(test_file_path): +def test_res1d_merging_same_file(test_file_path): # Use the same file twice to create a merged LTS statistics file file_names = [test_file_path, test_file_path] merged_file_name = test_file_path.replace(".res1d", ".merged.res1d") @@ -227,8 +227,8 @@ def test_res1d_merging(test_file_path): # Test one node location for particular values df_node = res1d.nodes.B4_1320.WaterLevelMaximum.read() - b4_1320_event1 = float(df_node.iloc[0]) - b4_1320_event2 = float(df_node.iloc[1]) + b4_1320_event1 = df_node.iloc[0].iloc[0] + b4_1320_event2 = df_node.iloc[1].iloc[0] assert b4_1320_event1 == b4_1320_event2 assert pytest.approx(np.round(b4_1320_event1, 3)) == 17.511 @@ -239,8 +239,8 @@ def test_res1d_merging(test_file_path): # Test one reach location for particular values df_reach = res1d.reaches.B4_1491l1.m_216.DischargeMaximum.read() - b4_1491l1_event1 = float(df_reach.iloc[0]) - b4_1491l1_event2 = float(df_reach.iloc[1]) + b4_1491l1_event1 = df_reach.iloc[0].iloc[0] + b4_1491l1_event2 = df_reach.iloc[1].iloc[0] assert b4_1491l1_event1 == b4_1491l1_event2 assert pytest.approx(np.round(b4_1491l1_event1, 3)) == 0.151 diff --git a/tests/test_res1d_network_river.py b/tests/test_res1d_network_river.py index 3374c45d..b491dd66 100644 --- a/tests/test_res1d_network_river.py +++ b/tests/test_res1d_network_river.py @@ -307,7 +307,7 @@ def test_result_quantity_methods(test_file): discharge_in_structure = res1d.structures.W_right.DischargeInStructure df = discharge_in_structure.to_dataframe() - max_value = round(df.max()[0], 3) + max_value = round(df.max().iloc[0], 3) assert pytest.approx(max_value) == 11.018 # Test the calling of methods diff --git a/util/DHI.Mike1D.MikeIO/LTSResultEvents.cs b/util/DHI.Mike1D.MikeIO/LTSResultEvents.cs index 0739af03..25bd0868 100644 --- a/util/DHI.Mike1D.MikeIO/LTSResultEvents.cs +++ b/util/DHI.Mike1D.MikeIO/LTSResultEvents.cs @@ -1,3 +1,4 @@ +using System; using System.Collections.Generic; namespace DHI.Mike1D.MikeIO @@ -25,6 +26,14 @@ public int CompareValue(LTSResultEvent e1, LTSResultEvent e2) cvalue = e1.Time.CompareTo(e2.Time); return cvalue; } + + /// + /// Sort the event list on time stamps. + /// + public void SortOnTimePeriod() + { + Sort((e1, e2) => ((LTSResultEventPeriodic)e1).TimePeriod.CompareTo(((LTSResultEventPeriodic)e2).TimePeriod)); + } } /// @@ -42,4 +51,25 @@ public class LTSResultEvent /// public double Time; } + + /// + /// An LTS periodic event. + /// + public class LTSResultEventPeriodic : LTSResultEvent + { + /// + /// Number of events (Count) in a period. + /// + public int Count; + + /// + /// Duration of events in a period. + /// + public double Duration; + + /// + /// Time period (year or month) represented as DateTime. + /// + public DateTime TimePeriod; + } } diff --git a/util/DHI.Mike1D.MikeIO/LTSResultMerger.cs b/util/DHI.Mike1D.MikeIO/LTSResultMerger.cs index d42c226d..cd14888c 100644 --- a/util/DHI.Mike1D.MikeIO/LTSResultMerger.cs +++ b/util/DHI.Mike1D.MikeIO/LTSResultMerger.cs @@ -10,32 +10,32 @@ namespace DHI.Mike1D.MikeIO /// /// Class for merging Long Term Statistics (LTS) result files. /// - public class LTSResultMerger + public abstract class LTSResultMerger { /// /// Instances of ResultData which will be merged. /// - private IList _resultDataCollection; + protected IList _resultDataCollection; /// /// Result data, where the merged results will be stored. /// - private ResultData _resultData; + protected ResultData _resultData; /// /// Data entries corresponding to the . /// - private List _dataEntries; + protected List _dataEntries; /// /// Map from data entry ID to actual data entry. /// - private Dictionary _mapIdToDataEntry; + protected Dictionary _mapIdToDataEntry; /// /// Map from data entry ID to list of LTS result events. /// - private Dictionary _mapIdToResultEvents; + protected Dictionary _mapIdToResultEvents; /// public LTSResultMerger(IList resultDataCollection) @@ -45,6 +45,29 @@ public LTSResultMerger(IList resultDataCollection) _dataEntries = _resultData.GetAllDataEntries(); } + /// + /// Create particular LTSResultMerger class depending on the result type. + /// + public static LTSResultMerger Create(IList resultDataCollection) + { + var resultData = resultDataCollection.FirstOrDefault(); + if (resultData == null) + throw new Exception("Empty result data list provided."); + + var resultType = resultData.ResultType; + switch (resultType) + { + case ResultTypes.LTSEvents: + return new LTSResultMergerExtreme(resultDataCollection); + + case ResultTypes.LTSAnnual: + case ResultTypes.LTSMonthly: + return new LTSResultMergerPeriodic(resultDataCollection); + default: + throw new NotSupportedException($"Not supported result type {resultType}"); + } + } + #region Static Merge methods /// @@ -70,7 +93,7 @@ public static ResultData Merge(IList resultFilePaths) /// public static ResultData Merge(IList resultDataCollection) { - var merger = new LTSResultMerger(resultDataCollection); + var merger = LTSResultMerger.Create(resultDataCollection); return merger.Merge(); } @@ -97,7 +120,8 @@ public ResultData Merge() { CreateMaps(); MergeDataEntries(); - SortResultEvents(); + SortResults(); + ProcessResults(); UpdateTimesList(); UpdateResultData(); @@ -132,58 +156,62 @@ private static Dictionary CreateMapIdToResultEvent /// Merges data entries, which means that the full LTS event result list /// is created from all the specified res1d files. /// - private void MergeDataEntries() + protected void MergeDataEntries() { foreach (var resultData in _resultDataCollection) { var dataEntries = resultData.GetAllDataEntries(); var mapIdToDataEntry = CreateMapIdToDataEntry(dataEntries); foreach (var dataEntry in dataEntries) - MergeDataEntry(dataEntry, mapIdToDataEntry); + MergeDataEntry(dataEntry, mapIdToDataEntry, resultData); } } - private void MergeDataEntry(DataEntry dataEntry, Dictionary mapIdToDataEntry) - { - var dataItem = dataEntry.DataItem; - bool isTimeQuantity = dataItem.Quantity.Description.Contains(", Time"); - if (isTimeQuantity) - return; - - var dataEntryTime = GetDataEntryForTimeQuantity(dataEntry, mapIdToDataEntry); - var dataItemTime = dataEntryTime.DataItem; - // TODO: Consider if the case of no Time quantity should be included - if (dataItemTime == null) - return; - - var ltsResultEvents = _mapIdToResultEvents[dataEntry.EntryId]; - int elementIndex = dataEntry.ElementIndex; - for (int j = 0; j < dataItem.NumberOfTimeSteps; j++) - { + /// + /// Merge in a particular DataEntry. + /// + /// The DataEntry to merge in + /// A map from DataEntryId to DataEntry used for finding DataEntry for derived quantity + /// ResultData where DataEntry comes from + protected abstract void MergeDataEntry( + DataEntry dataEntry, + Dictionary mapIdToDataEntry, + ResultData resultData); - var ltsResultEvent = new LTSResultEvent - { - Value = dataItem.GetValue(j, elementIndex), - Time = dataItemTime.GetValue(j, elementIndex) - }; - ltsResultEvents.Add(ltsResultEvent); - } - } + /// + /// Check if the quantity is a derived LTS quantity. + /// + /// For example, derived LTS quantity is the time of the event + /// and for extreme statistics we have (derived quantity ID on the right): + /// DischargeMaximum - DischargeMaximumTime + /// For periodic statistics we have as an example: + /// DischargeIntegratedMonthly - DischargeIntegratedMonthlyCount + /// DischargeIntegratedMonthly - DischargeIntegratedMonthlyDuration + /// + /// + protected abstract bool IsDerivedQuantity(IQuantity quantity); - private static DataEntry GetDataEntryForTimeQuantity(DataEntry dataEntry, Dictionary mapIdToDataEntry) + /// + /// Get the DataEntry of a derived quantity. + /// + /// Extra ID string defining the derived quantity + /// DataEntry corresponding to original LTS quantity + /// A map from DataEntryId to DataEntry + /// DataEntry for derived quantity + public static DataEntry GetDataEntryForDerivedQuantity(string extraId, DataEntry dataEntry, Dictionary mapIdToDataEntry) { var quantity = dataEntry.DataItem.Quantity; - var quantityTime = Create(quantity, "Time"); + var quantityTime = Create(quantity, extraId); var entryId = dataEntry.EntryId; - var entryIdTime = new DataEntryId(quantityTime.Id, entryId); - var dataEntryTime = mapIdToDataEntry[entryIdTime]; - return dataEntryTime; + var entryIdDerived = new DataEntryId(quantityTime.Id, entryId); + var dataEntryDerived = mapIdToDataEntry[entryIdDerived]; + return dataEntryDerived; } /// /// Create a quantity with "extra" string added to Id and description. /// - private static IQuantity Create(IQuantity quantity, string extra, eumItem item = eumItem.eumITimeScale, eumUnit? unit = null) + public static IQuantity Create(IQuantity quantity, string extra, eumItem item = eumItem.eumITimeScale, eumUnit? unit = null) { var ex = new ExtraForQuantities(extra); string id = quantity.Id + ex.ExtraForId; @@ -198,53 +226,24 @@ private static IQuantity Create(IQuantity quantity, string extra, eumItem item = #endregion - private void SortResultEvents() - { - _mapIdToResultEvents.Values.ToList().ForEach(x => x.SortOnValue()); - } - - #region UpdateTimesList - - private void UpdateTimesList() - { - var numberOfEventsEnumerable = _mapIdToResultEvents.Values.ToList().Select(x => x.Count); - int largestNumberOfEvents = numberOfEventsEnumerable.Max(); - _resultData.TimesList = GetTimesListForEventResults(largestNumberOfEvents); - } + /// + /// Sort LTSResultEvents on value or time inside + /// + protected abstract void SortResults(); - private static IListDateTimes GetTimesListForEventResults(int largestNumberOfEvents) - { - var timesList = new ListDateTimes(); - var startLabel = new DateTime(100, 1, 1); - for (int i = 0; i < largestNumberOfEvents; i++) - { - var eventLabel = startLabel.AddSeconds(i); - timesList.Add(eventLabel); - } - return timesList; - } + /// + /// Apply processing on LTSResultEvents inside + /// + protected abstract void ProcessResults(); - #endregion UpdateTimesList + /// + /// Create a new ResultData.TimesList for merged + /// + protected abstract void UpdateTimesList(); - private void UpdateResultData() - { - foreach (var mapIdToResultEvent in _mapIdToResultEvents) - { - var entryId = mapIdToResultEvent.Key; - var ltsResultEvents = mapIdToResultEvent.Value; - if (ltsResultEvents.Count == 0) - continue; - - var dataEntry = _mapIdToDataEntry[entryId]; - var dataEntryTime = GetDataEntryForTimeQuantity(dataEntry, _mapIdToDataEntry); - - for (int i = 0; i < ltsResultEvents.Count; i++) - { - var ltsResultEvent = ltsResultEvents[i]; - dataEntry.SetValue(i, ltsResultEvent.Value); - dataEntryTime.SetValue(i, ltsResultEvent.Time); - } - } - } + /// + /// Update with actual merged LTS data. + /// + protected abstract void UpdateResultData(); } } diff --git a/util/DHI.Mike1D.MikeIO/LTSResultMergerExtreme.cs b/util/DHI.Mike1D.MikeIO/LTSResultMergerExtreme.cs new file mode 100644 index 00000000..0001bb7d --- /dev/null +++ b/util/DHI.Mike1D.MikeIO/LTSResultMergerExtreme.cs @@ -0,0 +1,120 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using DHI.Mike1D.Generic; +using DHI.Mike1D.ResultDataAccess; + +namespace DHI.Mike1D.MikeIO +{ + /// + /// Class for merging Long Term Statistics (LTS) extreme result files. + /// + public class LTSResultMergerExtreme : LTSResultMerger + { + /// + public LTSResultMergerExtreme(IList resultDataCollection) : base(resultDataCollection) + { + } + + #region MergeDataEntries + + /// + protected override void MergeDataEntry( + DataEntry dataEntry, + Dictionary mapIdToDataEntry, + ResultData resultData) + { + var dataItem = dataEntry.DataItem; + bool isDerivedQuantity = IsDerivedQuantity(dataItem.Quantity); + if (isDerivedQuantity) + return; + + var dataEntryTime = GetDataEntryForDerivedQuantity("Time", dataEntry, mapIdToDataEntry); + var dataItemTime = dataEntryTime.DataItem; + // TODO: Consider if the case of no Time quantity should be included + if (dataItemTime == null) + return; + + var ltsResultEvents = _mapIdToResultEvents[dataEntry.EntryId]; + int elementIndex = dataEntry.ElementIndex; + for (int j = 0; j < dataItem.NumberOfTimeSteps; j++) + { + var ltsResultEvent = new LTSResultEvent + { + Value = dataItem.GetValue(j, elementIndex), + Time = dataItemTime.GetValue(j, elementIndex) + }; + ltsResultEvents.Add(ltsResultEvent); + } + } + + /// + protected override bool IsDerivedQuantity(IQuantity quantity) + { + bool isTimeQuantity = quantity.Description.Contains(", Time"); + if (isTimeQuantity) + return true; + + return false; + } + + #endregion + + /// + protected override void SortResults() + { + _mapIdToResultEvents.Values.ToList().ForEach(x => x.SortOnValue()); + } + + /// + protected override void ProcessResults() + { + } + + #region UpdateTimesList + + /// + protected override void UpdateTimesList() + { + var numberOfEventsEnumerable = _mapIdToResultEvents.Values.ToList().Select(x => x.Count); + int largestNumberOfEvents = numberOfEventsEnumerable.Max(); + _resultData.TimesList = GetTimesListForEventResults(largestNumberOfEvents); + } + + private static IListDateTimes GetTimesListForEventResults(int largestNumberOfEvents) + { + var timesList = new ListDateTimes(); + var startLabel = new DateTime(100, 1, 1); + for (int i = 0; i < largestNumberOfEvents; i++) + { + var eventLabel = startLabel.AddSeconds(i); + timesList.Add(eventLabel); + } + return timesList; + } + + #endregion UpdateTimesList + + /// + protected override void UpdateResultData() + { + foreach (var mapIdToResultEvent in _mapIdToResultEvents) + { + var entryId = mapIdToResultEvent.Key; + var ltsResultEvents = mapIdToResultEvent.Value; + if (ltsResultEvents.Count == 0) + continue; + + var dataEntry = _mapIdToDataEntry[entryId]; + var dataEntryTime = GetDataEntryForDerivedQuantity("Time", dataEntry, _mapIdToDataEntry); + + for (int i = 0; i < ltsResultEvents.Count; i++) + { + var ltsResultEvent = ltsResultEvents[i]; + dataEntry.SetValue(i, ltsResultEvent.Value); + dataEntryTime.SetValue(i, ltsResultEvent.Time); + } + } + } + } +} diff --git a/util/DHI.Mike1D.MikeIO/LTSResultMergerPeriodic.cs b/util/DHI.Mike1D.MikeIO/LTSResultMergerPeriodic.cs new file mode 100644 index 00000000..96c76b44 --- /dev/null +++ b/util/DHI.Mike1D.MikeIO/LTSResultMergerPeriodic.cs @@ -0,0 +1,131 @@ +using System.Collections.Generic; +using System.Linq; +using DHI.Mike1D.Generic; +using DHI.Mike1D.ResultDataAccess; + +namespace DHI.Mike1D.MikeIO +{ + /// + /// Class for merging Long Term Statistics (LTS) periodic result files. + /// + public class LTSResultMergerPeriodic : LTSResultMerger + { + /// + public LTSResultMergerPeriodic(IList resultDataCollection) : base(resultDataCollection) + { + } + + /// + protected override void MergeDataEntry( + DataEntry dataEntry, + Dictionary mapIdToDataEntry, + ResultData resultData) + { + var dataItem = dataEntry.DataItem; + bool isDerivedQuantity = IsDerivedQuantity(dataItem.Quantity); + if (isDerivedQuantity) + return; + + bool isGlobal = dataItem.ItemTypeGroup == ItemTypeGroup.GlobalItem; + var dataItemCount = isGlobal ? null : GetDataEntryForDerivedQuantity("Count", dataEntry, mapIdToDataEntry).DataItem; + var dataItemDuration = isGlobal ? null : GetDataEntryForDerivedQuantity("Duration", dataEntry, mapIdToDataEntry).DataItem; + if ((dataItemCount == null || dataItemDuration == null) && !isGlobal) + return; + + var ltsResultEventsPeriodic = _mapIdToResultEvents[dataEntry.EntryId]; + int elementIndex = dataEntry.ElementIndex; + for (int j = 0; j < dataItem.NumberOfTimeSteps; j++) + { + var ltsResultEvent = new LTSResultEventPeriodic + { + Value = dataItem.GetValue(j, elementIndex), + Count = (int)(dataItemCount?.GetValue(j, elementIndex) ?? 0), + Duration = dataItemDuration?.GetValue(j, elementIndex) ?? 0.0, + TimePeriod = resultData.TimesList[j] + }; + ltsResultEventsPeriodic.Add(ltsResultEvent); + } + } + + /// + protected override bool IsDerivedQuantity(IQuantity quantity) + { + if (quantity.Description.Contains(", Count")) + return true; + + if (quantity.Description.Contains(", Duration")) + return true; + + return false; + } + + /// + protected override void SortResults() + { + _mapIdToResultEvents.Values.ToList().ForEach(x => x.SortOnTimePeriod()); + } + + /// + protected override void ProcessResults() + { + foreach (var mapIdToResultEvent in _mapIdToResultEvents) + { + var ltsResultEvents = mapIdToResultEvent.Value; + for (int i = ltsResultEvents.Count - 1; i >= 1; i--) + { + var ltsResultEventAfter = (LTSResultEventPeriodic)ltsResultEvents[i]; + var ltsResultEventBefore = (LTSResultEventPeriodic)ltsResultEvents[i-1]; + if (ltsResultEventAfter.TimePeriod == ltsResultEventBefore.TimePeriod) + { + ltsResultEventBefore.Value += ltsResultEventAfter.Value; + ltsResultEventBefore.Count += ltsResultEventAfter.Count; + ltsResultEventBefore.Duration += ltsResultEventAfter.Duration; + ltsResultEvents.RemoveAt(i); + } + } + } + } + + /// + protected override void UpdateTimesList() + { + var ltsResultEventsPeriodic = _mapIdToResultEvents.Values.ToList().FirstOrDefault(x => x.Count > 0); + if (ltsResultEventsPeriodic == null) + return; + + var timesList = new ListDateTimes(); + foreach (var ltsResultEvent in ltsResultEventsPeriodic) + { + var ltsResultEventPeriodic = (LTSResultEventPeriodic)ltsResultEvent; + timesList.Add(ltsResultEventPeriodic.TimePeriod); + } + + _resultData.TimesList = timesList; + } + + /// + protected override void UpdateResultData() + { + foreach (var mapIdToResultEvent in _mapIdToResultEvents) + { + var entryId = mapIdToResultEvent.Key; + var ltsResultEvents = mapIdToResultEvent.Value; + if (ltsResultEvents.Count == 0) + continue; + + var dataEntry = _mapIdToDataEntry[entryId]; + bool isGlobal = dataEntry.DataItem.ItemTypeGroup == ItemTypeGroup.GlobalItem; + var dataEntryCount = isGlobal ? null : GetDataEntryForDerivedQuantity("Count", dataEntry, _mapIdToDataEntry); + var dataEntryDuration = isGlobal ? null : GetDataEntryForDerivedQuantity("Duration", dataEntry, _mapIdToDataEntry); + + for (int i = 0; i < ltsResultEvents.Count; i++) + { + var ltsResultEventPeriodic = (LTSResultEventPeriodic)ltsResultEvents[i]; + dataEntry.SetValue(i, ltsResultEventPeriodic.Value); + dataEntryCount?.SetValue(i, ltsResultEventPeriodic.Count); + dataEntryDuration?.SetValue(i, ltsResultEventPeriodic.Duration); + } + } + } + } +} From b3283d8947d13ba038a71833e73aeb3646251488 Mon Sep 17 00:00:00 2001 From: Gediminas Kirsanskas Date: Thu, 18 Jan 2024 15:26:49 +0100 Subject: [PATCH 3/5] Initial implementation of merging regular res1d files. --- .gitignore | 3 + CHANGELOG.md | 2 + .../result_reader_writer/result_merger.py | 12 +- tests/test_res1d_catchments.py | 23 +++ tests/testdata/catchment_merge_a.res1d | Bin 0 -> 6055 bytes tests/testdata/catchment_merge_b.res1d | Bin 0 -> 6214 bytes .../DHI.Mike1D.MikeIO.csproj | 3 + util/DHI.Mike1D.MikeIO/LTSResultMerger.cs | 108 +++------- util/DHI.Mike1D.MikeIO/ResultMerger.cs | 187 ++++++++++++++++++ 9 files changed, 245 insertions(+), 93 deletions(-) create mode 100644 tests/testdata/catchment_merge_a.res1d create mode 100644 tests/testdata/catchment_merge_b.res1d create mode 100644 util/DHI.Mike1D.MikeIO/ResultMerger.cs diff --git a/.gitignore b/.gitignore index 94b2e34c..929e2bd4 100644 --- a/.gitignore +++ b/.gitignore @@ -157,3 +157,6 @@ tests/testdata/discharge_in_structure.extract.txt tests/testdata/w_right_discharge_in_structure.extract.csv tests/testdata/w_right_discharge_in_structure.extract.dfs0 tests/testdata/w_right_discharge_in_structure.extract.txt +tests/testdata/lts_event_statistics.merged.res1d +tests/testdata/lts_monthly_statistics.merged.res1d +tests/testdata/catchment_merge_c.res1d diff --git a/CHANGELOG.md b/CHANGELOG.md index 1d6668b4..fc048815 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,8 @@ ### Added +- Merging of regular and LTS extreme/periodic res1d files + ### Fixed ### Changed diff --git a/mikeio1d/result_reader_writer/result_merger.py b/mikeio1d/result_reader_writer/result_merger.py index 4878d10e..0ef9eaa8 100644 --- a/mikeio1d/result_reader_writer/result_merger.py +++ b/mikeio1d/result_reader_writer/result_merger.py @@ -1,7 +1,7 @@ from System.Collections.Generic import List from System import String -from DHI.Mike1D.MikeIO import LTSResultMerger +from DHI.Mike1D.MikeIO import ResultMerger as Res1DResultMerger class ResultMerger: @@ -12,12 +12,10 @@ class ResultMerger: ---------- file_names : list of str List of res1d file names to merge. - """ def __init__(self, file_names): self.file_names = file_names - self.result_data_merged = None def merge(self, merged_file_name): """ @@ -29,12 +27,8 @@ def merge(self, merged_file_name): merged_file_name : str File name of the res1d file to store the merged data. """ - if self.result_data_merged is None: - file_names_dotnet = self._get_file_name_dotnet() - self.result_data_merged = LTSResultMerger.Merge(file_names_dotnet) - - self.result_data_merged.Connection.FilePath.Path = merged_file_name - self.result_data_merged.Save() + file_names_dotnet = self._get_file_name_dotnet() + Res1DResultMerger.Merge(file_names_dotnet, merged_file_name) def _get_file_name_dotnet(self): file_names_dotnet = List[String]() diff --git a/tests/test_res1d_catchments.py b/tests/test_res1d_catchments.py index 983171b6..d4d9faff 100644 --- a/tests/test_res1d_catchments.py +++ b/tests/test_res1d_catchments.py @@ -1,6 +1,7 @@ import os import pytest import numpy as np +import pandas as pd from mikeio1d.custom_exceptions import NoDataForQuery, InvalidQuantity from mikeio1d.res1d import Res1D, QueryDataCatchment @@ -15,6 +16,13 @@ def test_file_path(): return os.path.join(test_folder_path, "testdata", "catchments.res1d") +@pytest.fixture +def test_file_path_for_merging(): + test_folder_path = os.path.dirname(os.path.abspath(__file__)) + # File taken from TestSuite: RainfallRunoff\SWQ\DemoSWQ1BaseMixedRRAD.res1d + return os.path.join(test_folder_path, "testdata", "catchment_merge.res1d") + + @pytest.fixture(params=[True, False]) def test_file(test_file_path, request): return Res1D(test_file_path, lazy_load=request.param) @@ -212,3 +220,18 @@ def test_catchment_static_attributes(res1d_catchments): catchment.id catchment.area catchment.type + + +def test_res1d_merging(test_file_path_for_merging): + file_a = test_file_path_for_merging.replace(".res1d", "_a.res1d") + file_b = test_file_path_for_merging.replace(".res1d", "_b.res1d") + file_c = test_file_path_for_merging.replace(".res1d", "_c.res1d") + + Res1D.merge([file_a, file_b], file_c) + + df_a = Res1D(file_a).read() + df_b = Res1D(file_b).read().tail(-1) + df_c = Res1D(file_c).read() + + df_merged = pd.concat([df_a, df_b]) + pd.testing.assert_frame_equal(df_merged, df_c) diff --git a/tests/testdata/catchment_merge_a.res1d b/tests/testdata/catchment_merge_a.res1d new file mode 100644 index 0000000000000000000000000000000000000000..5642f1c3232d50382b49383559c7783c12984a67 GIT binary patch literal 6055 zcmc(jZA=_R7{?b5=!?`!>5E{A8=}1ui#<|{wP+`qA{$d#F#p&h=aOj$f}kS52>1* z%!os>A#V|fbWzg{Q5jMVQEclwz@%!**35~|445D}9~Dm`jY7`V137+A)xLAx?og0c{N5p$!}Cx#v!o|i+= z=>6z}gFDwBkAiCMiY_ET@<} zI%)x3&P66XTw1mZIvY6hL+2ORGZqenGc1~HHFV!RXrv)%`NL5t=Zjv*Cb}OffPXn_F;hAXeu0{F6^d6wk} z4&^1q#V6^Jj3&p`!Kk6cQw68>g^3nl`X3u|9m)(q9iM3mHEQcd70^poP=QLwm1 zpmQ#+0kqcA{Z51pPmjfFn`&W`5uHSarj^+_EUtX2^}4>^>-wIgo=)^;5{lnp+TO-r_vP=!Swx;qoCm zg@Mg>1}`jUVyX@G!9>b^+K zeHWcydC(>h`qGHCJ5;BpPDbj}&cu5cp_yk(1~f864X^sl1sw{6kF$hN#i;dgA2Tvug@J~O8+JXYcGLNm6g Wc=ca-PcHnrYi%q{#3IcqIMP3g1B~$i literal 0 HcmV?d00001 diff --git a/tests/testdata/catchment_merge_b.res1d b/tests/testdata/catchment_merge_b.res1d new file mode 100644 index 0000000000000000000000000000000000000000..4bb8d2f1ea2276788644dea3c660f79f3c882f59 GIT binary patch literal 6214 zcmc&&U2GIp6rS#uw$y4Vzf!50^numHN-GwSMw!C4wzQp=QZP0~7}{BPw4K@P<^q zY*Z>rBT%6)l%zfg#+3Mzn8<@>6G99j8lo@0kTe2nA`iTfm>6d~=g!P!@7%?X4O?%r zn|sd8ckg$;d(NFZ6Yc2S6K&tQN9^hBY7+<5q#-tlW?V~&F>OQ@hYa0RG(9Em({v@7 z5r-91c|sgEMBOk&byzb+vAs8;>g{mzwxp_G8D*z$)$_={)dW2Az3{d#bwnW*scFW-h;CjG4bF6kv#st7VTT3|UUHXzb_-?ptH(@M&3;Iy zm@+cr5V~53S@x?xZ#)Ns?LG8U&$l=-HjW7Am^ZU(7`}DLOoM2_$q`cCAIF7r%!gdx z5m?Zrlw8|>C1E5LQ#15`5f4Q}@tDTZ;T->opEO06_BTivQVqM+WQsDw$+?7M7WEvX+Im#Ha zF}$$pT?v#pkxS7Vw6S3zQa1WkWhlM?ffU+8kvGffLBeU9Vh+VeRoyIpIuxQs&aF&! z;qjW1!hKCvMmf!jT4DHj<`$nEk?RMvQB`b(R|(&qj9OCCol(D*MWu^#^K{W$Pbuwb z@)5>8j9y)Eh37?iMLuPc806vM*ZTf%V}U`Zgl+-pTb~V{&Xnf7z|2*|+M_yk`D8Cm`xSD)^4mnr4-+>R24v(!xJIY|t zT>X&Qu1(zPIk9RoS37azOw}7>65I&wwoUny;{cFr#e1IsA1a`C1e{85RA^=_>+~(m(MQZx^G*#%vwT-O>Dl#RKd zP<7e?eY0_F13}XcKa=i)&c1ghFesOFK?B{FJ$mPynPx7z9d4%VeoF67V32gs1^paf zvegx0&Y6TaUC_GiYXgI3v-6lK_m&H)ySzN`Ow;eUpx#f)9S|%}v$sx8hgWaR1!d%0 zbe-k`?LFdx=sE?MNqXM}t=oP!@J!;k3##mRJ1|K2zy)17Dg_439&N^rE8V!z9 zZ}zwgx>oh01N!T<{v|={1gR^Z}GyOONi{G3(D7OE)aZt_HCBD ..\..\mikeio1d\bin\DHI.Generic.MikeZero.EUM.dll + + ..\..\mikeio1d\bin\DHI.Generic.MikeZero.DFS.dll + ..\..\mikeio1d\bin\DHI.corlib.dll diff --git a/util/DHI.Mike1D.MikeIO/LTSResultMerger.cs b/util/DHI.Mike1D.MikeIO/LTSResultMerger.cs index cd14888c..e7b2f4df 100644 --- a/util/DHI.Mike1D.MikeIO/LTSResultMerger.cs +++ b/util/DHI.Mike1D.MikeIO/LTSResultMerger.cs @@ -1,5 +1,4 @@ -using System; -using System.Collections.Generic; +using System.Collections.Generic; using System.Linq; using DHI.Generic.MikeZero; using DHI.Mike1D.Generic; @@ -10,18 +9,8 @@ namespace DHI.Mike1D.MikeIO /// /// Class for merging Long Term Statistics (LTS) result files. /// - public abstract class LTSResultMerger + public abstract class LTSResultMerger : ResultMerger { - /// - /// Instances of ResultData which will be merged. - /// - protected IList _resultDataCollection; - - /// - /// Result data, where the merged results will be stored. - /// - protected ResultData _resultData; - /// /// Data entries corresponding to the . /// @@ -38,85 +27,22 @@ public abstract class LTSResultMerger protected Dictionary _mapIdToResultEvents; /// - public LTSResultMerger(IList resultDataCollection) + public LTSResultMerger(IList resultDataCollection) : base(resultDataCollection) { - _resultDataCollection = resultDataCollection; - _resultData = _resultDataCollection.First(); + LoadData(); _dataEntries = _resultData.GetAllDataEntries(); } - /// - /// Create particular LTSResultMerger class depending on the result type. - /// - public static LTSResultMerger Create(IList resultDataCollection) - { - var resultData = resultDataCollection.FirstOrDefault(); - if (resultData == null) - throw new Exception("Empty result data list provided."); - - var resultType = resultData.ResultType; - switch (resultType) - { - case ResultTypes.LTSEvents: - return new LTSResultMergerExtreme(resultDataCollection); - - case ResultTypes.LTSAnnual: - case ResultTypes.LTSMonthly: - return new LTSResultMergerPeriodic(resultDataCollection); - default: - throw new NotSupportedException($"Not supported result type {resultType}"); - } - } - - #region Static Merge methods - - /// - /// Merge result files given by their file names - /// - public static ResultData Merge(IList resultFileNames) - { - var resultFilePaths = resultFileNames.Select(name => new FilePath(name)).ToList(); - return Merge(resultFilePaths); - } - - /// - /// Merge result files given by their FilePath specification. - /// - public static ResultData Merge(IList resultFilePaths) - { - var resultData = resultFilePaths.Select(path => LoadFile(path.FullFilePath)).ToList(); - return Merge(resultData); - } - - /// - /// Merge result files given by their ResultData specification. - /// - public static ResultData Merge(IList resultDataCollection) + private void LoadData() { - var merger = LTSResultMerger.Create(resultDataCollection); - return merger.Merge(); - } - - /// - /// Loads a file based on the filename. - /// - private static ResultData LoadFile(string fileName) - { - var res = new ResultData(); - res.Connection = Connection.Create(fileName); - var diagnostics = new Diagnostics("LTS result merging"); - res.Load(diagnostics); - - return res; + foreach (var resultData in _resultDataCollection) + if (resultData.LoadStatus == LoadStatus.Header) + resultData.LoadData(diagnostics); } - #endregion Static Merge methods - - /// - /// Performs the actual merging of result files. - /// - public ResultData Merge() + /// + public override ResultData Merge(string mergedFileName = null) { CreateMaps(); MergeDataEntries(); @@ -124,6 +50,7 @@ public ResultData Merge() ProcessResults(); UpdateTimesList(); UpdateResultData(); + SaveToFile(mergedFileName); return _resultData; } @@ -245,5 +172,18 @@ public static IQuantity Create(IQuantity quantity, string extra, eumItem item = /// Update with actual merged LTS data. /// protected abstract void UpdateResultData(); + + /// + /// Save the current to a given file. + /// + /// File name to save to. + public virtual void SaveToFile(string mergedFileName = null) + { + if (string.IsNullOrWhiteSpace(mergedFileName)) + return; + + _resultData.Connection.FilePath.Path = mergedFileName; + _resultData.Save(); + } } } diff --git a/util/DHI.Mike1D.MikeIO/ResultMerger.cs b/util/DHI.Mike1D.MikeIO/ResultMerger.cs new file mode 100644 index 00000000..dd644b28 --- /dev/null +++ b/util/DHI.Mike1D.MikeIO/ResultMerger.cs @@ -0,0 +1,187 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using DHI.Generic.MikeZero.DFS; +using DHI.Mike1D.Generic; +using DHI.Mike1D.ResultDataAccess; + +namespace DHI.Mike1D.MikeIO +{ + /// + /// Class for merging MIKE 1D res1d result files. + /// + public class ResultMerger + { + /// + /// Instances of ResultData which will be merged. + /// + protected IList _resultDataCollection; + + /// + /// Result data, where the merged results will be stored. + /// + protected ResultData _resultData; + + /// + public ResultMerger(IList resultDataCollection) + { + _resultDataCollection = resultDataCollection; + _resultData = _resultDataCollection.First(); + } + + /// + /// Create particular ResultMerger class depending on the result type. + /// + public static ResultMerger Create(IList resultDataCollection) + { + var resultData = resultDataCollection.FirstOrDefault(); + if (resultData == null) + throw new Exception("Empty result data list provided."); + + var resultType = resultData.ResultType; + switch (resultType) + { + case ResultTypes.LTSEvents: + return new LTSResultMergerExtreme(resultDataCollection); + + case ResultTypes.LTSAnnual: + case ResultTypes.LTSMonthly: + return new LTSResultMergerPeriodic(resultDataCollection); + + default: + return new ResultMerger(resultDataCollection); + } + } + + #region Static Merge methods + + /// + /// Merge result files given by their file names + /// + public static ResultData Merge(IList resultFileNames, string mergedFileName = null) + { + var resultFilePaths = resultFileNames.Select(name => new FilePath(name)).ToList(); + return Merge(resultFilePaths, mergedFileName); + } + + /// + /// Merge result files given by their FilePath specification. + /// + public static ResultData Merge(IList resultFilePaths, string mergedFileName = null) + { + var resultData = resultFilePaths.Select(path => LoadFileHeader(path.FullFilePath)).ToList(); + return Merge(resultData, mergedFileName); + } + + /// + /// Merge result files given by their ResultData specification. + /// + public static ResultData Merge(IList resultDataCollection, string mergedFileName = null) + { + var merger = Create(resultDataCollection); + return merger.Merge(mergedFileName); + } + + /// + /// Loads a file based on the filename. + /// + private static ResultData LoadFileHeader(string fileName) + { + var res = new ResultData(); + res.Connection = Connection.Create(fileName); + + var diagnostics = new Diagnostics("Result merging"); + res.LoadHeader(diagnostics); + + return res; + } + + #endregion Static Merge methods + + /// + /// Performs the actual merging of result files. + /// + /// File name to save to. + /// ResultData corresponding to a merged file. + public virtual ResultData Merge(string mergedFileName = null) + { + if (string.IsNullOrEmpty(mergedFileName)) + throw new ArgumentException("To merge regular res1d files destination file name needs to be specified."); + + CopyFileIfNeeded(mergedFileName); + + foreach (var sourceResultData in _resultDataCollection) + { + if (sourceResultData.Equals(_resultData)) + continue; + + AppendToFile(mergedFileName, sourceResultData.Connection.FilePath.FullFilePath); + } + + var resultData = LoadFileHeader(mergedFileName); + return resultData; + } + + private void CopyFileIfNeeded(string mergedFileName) + { + var mergedFilePath = new FilePath(mergedFileName); + string firstFullPath = _resultData.Connection.FilePath.FullFilePath; + string mergedFullPath = mergedFilePath.FullFilePath; + if (firstFullPath.Equals(mergedFullPath)) + return; + + CreateDirectory(mergedFullPath); + File.Copy(firstFullPath, mergedFileName, true); + } + + private void CreateDirectory(string fileName) + { + string directory = Path.GetDirectoryName(fileName); + if (!string.IsNullOrWhiteSpace(directory)) + Directory.CreateDirectory(directory); + } + + /// + /// Appends data from one file to another. It is assumed that: + /// + /// The files has identical dynamic and static items + /// The last time step of the target file is equal to the first + /// time step of the source file, and therefor the first time step + /// from the source file is not added to the target file + /// + /// + /// This example uses the generic DFS functionality, and will work for any type + /// of DFS file. + /// + /// + /// Taken from https://github.com/DHI/MIKECore-Examples/blob/master/Examples/CSharp/ExamplesMisc.cs + /// ExamplesMisc.AppendToFile + /// + /// + private void AppendToFile(string targetFile, string sourceFile) + { + // Open target for appending and source for reading + var target = DfsFileFactory.DfsGenericOpenAppend(targetFile); + var source = DfsFileFactory.DfsGenericOpen(sourceFile); + + // Time of last time step of file, in the time unit of the time axis. + // This is sufficient as long as TimeAxis.StartTimeOffset equals in + // source and target file (it is zero for most files) + var targetEndTime = target.FileInfo.TimeAxis.TimeSpan(); + + // Do not add initial time step 0 of source to target file, + // so go directly to time step 1 in source + source.FindTimeStep(1); + + // Copy over data + IDfsItemData sourceData2; + while (null != (sourceData2 = source.ReadItemTimeStepNext())) + target.WriteItemTimeStepNext(targetEndTime + sourceData2.Time, sourceData2.Data); + + // Close the files + target.Close(); + source.Close(); + } + } +} From 4eedc9681e600dc89ff7e267a5da0eeb5fd88d4f Mon Sep 17 00:00:00 2001 From: Gediminas Kirsanskas Date: Fri, 19 Jan 2024 10:58:00 +0100 Subject: [PATCH 4/5] Implement IComparable for LTSResultEvent. --- util/DHI.Mike1D.MikeIO/LTSResultEvents.cs | 44 +++++++------------ util/DHI.Mike1D.MikeIO/LTSResultMerger.cs | 5 ++- .../LTSResultMergerExtreme.cs | 6 --- .../LTSResultMergerPeriodic.cs | 6 --- 4 files changed, 21 insertions(+), 40 deletions(-) diff --git a/util/DHI.Mike1D.MikeIO/LTSResultEvents.cs b/util/DHI.Mike1D.MikeIO/LTSResultEvents.cs index 25bd0868..28d7bf4b 100644 --- a/util/DHI.Mike1D.MikeIO/LTSResultEvents.cs +++ b/util/DHI.Mike1D.MikeIO/LTSResultEvents.cs @@ -8,38 +8,12 @@ namespace DHI.Mike1D.MikeIO /// public class LTSResultEvents : List { - /// - /// Sort the event list on the first value of each event - /// - public void SortOnValue() - { - Sort(CompareValue); - } - - /// - /// Sort on first value, and if the same, then on time. - /// - public int CompareValue(LTSResultEvent e1, LTSResultEvent e2) - { - int cvalue = e2.Value.CompareTo(e1.Value); - if (cvalue == 0) - cvalue = e1.Time.CompareTo(e2.Time); - return cvalue; - } - - /// - /// Sort the event list on time stamps. - /// - public void SortOnTimePeriod() - { - Sort((e1, e2) => ((LTSResultEventPeriodic)e1).TimePeriod.CompareTo(((LTSResultEventPeriodic)e2).TimePeriod)); - } } /// /// LTS event. /// - public class LTSResultEvent + public class LTSResultEvent : IComparable { /// /// Value of the LTS event. @@ -50,6 +24,15 @@ public class LTSResultEvent /// Time of the LTS event. /// public double Time; + + /// + public virtual int CompareTo(LTSResultEvent other) + { + int cvalue = other.Value.CompareTo(Value); + if (cvalue == 0) + cvalue = Time.CompareTo(other.Time); + return cvalue; + } } /// @@ -71,5 +54,12 @@ public class LTSResultEventPeriodic : LTSResultEvent /// Time period (year or month) represented as DateTime. /// public DateTime TimePeriod; + + /// + public override int CompareTo(LTSResultEvent other) + { + var otherPeriodic = (LTSResultEventPeriodic) other; + return TimePeriod.CompareTo(otherPeriodic.TimePeriod); + } } } diff --git a/util/DHI.Mike1D.MikeIO/LTSResultMerger.cs b/util/DHI.Mike1D.MikeIO/LTSResultMerger.cs index e7b2f4df..17da9810 100644 --- a/util/DHI.Mike1D.MikeIO/LTSResultMerger.cs +++ b/util/DHI.Mike1D.MikeIO/LTSResultMerger.cs @@ -156,7 +156,10 @@ public static IQuantity Create(IQuantity quantity, string extra, eumItem item = /// /// Sort LTSResultEvents on value or time inside /// - protected abstract void SortResults(); + protected virtual void SortResults() + { + _mapIdToResultEvents.Values.ToList().ForEach(x => x.Sort()); + } /// /// Apply processing on LTSResultEvents inside diff --git a/util/DHI.Mike1D.MikeIO/LTSResultMergerExtreme.cs b/util/DHI.Mike1D.MikeIO/LTSResultMergerExtreme.cs index 0001bb7d..07f9aa4c 100644 --- a/util/DHI.Mike1D.MikeIO/LTSResultMergerExtreme.cs +++ b/util/DHI.Mike1D.MikeIO/LTSResultMergerExtreme.cs @@ -60,12 +60,6 @@ protected override bool IsDerivedQuantity(IQuantity quantity) #endregion - /// - protected override void SortResults() - { - _mapIdToResultEvents.Values.ToList().ForEach(x => x.SortOnValue()); - } - /// protected override void ProcessResults() { diff --git a/util/DHI.Mike1D.MikeIO/LTSResultMergerPeriodic.cs b/util/DHI.Mike1D.MikeIO/LTSResultMergerPeriodic.cs index 96c76b44..f39e3b06 100644 --- a/util/DHI.Mike1D.MikeIO/LTSResultMergerPeriodic.cs +++ b/util/DHI.Mike1D.MikeIO/LTSResultMergerPeriodic.cs @@ -59,12 +59,6 @@ protected override bool IsDerivedQuantity(IQuantity quantity) return false; } - /// - protected override void SortResults() - { - _mapIdToResultEvents.Values.ToList().ForEach(x => x.SortOnTimePeriod()); - } - /// protected override void ProcessResults() { From fcecbd7668b30a22fd2a45022a1dbffe8d720b61 Mon Sep 17 00:00:00 2001 From: Gediminas Kirsanskas Date: Thu, 25 Jan 2024 10:00:50 +0100 Subject: [PATCH 5/5] Add possibility to merge files using info from Res1D object. Also add type hints and improve the docstring. --- mikeio1d/res1d.py | 26 +++++++++++++++++-- .../result_reader_writer/result_merger.py | 15 ++++++++--- tests/test_res1d_catchments.py | 8 +++--- 3 files changed, 40 insertions(+), 9 deletions(-) diff --git a/mikeio1d/res1d.py b/mikeio1d/res1d.py index 712ce830..e89f954a 100644 --- a/mikeio1d/res1d.py +++ b/mikeio1d/res1d.py @@ -443,16 +443,38 @@ def to_txt( self.extract(file_path, queries, time_step_skipping_number, ExtractorOutputFileType.TXT) @staticmethod - def merge(file_names, merged_file_name): + def merge(file_names: List[str] | List[Res1D], merged_file_name: str): """ Merges res1d files. + It is possible to merge three kinds of result files: + * Regular res1d (HD, RR, etc.) + * LTS extreme statistics + * LTS chronological statistics + + For regular res1d files the requirement is that the simulation start time + of the first file matches the simulation end time of the second file + (the same principle for subsequent files). + + For LTS result files, meaningful merged result file is obtained when + simulation periods for the files do not overlap. + Parameters ---------- - file_names : list of str + file_names : list of str or Res1D objects List of res1d file names to merge. merged_file_name : str File name of the res1d file to store the merged data. """ + file_names = Res1D._convert_res1d_to_str_for_file_names(file_names) result_merger = ResultMerger(file_names) result_merger.merge(merged_file_name) + + @staticmethod + def _convert_res1d_to_str_for_file_names(file_names: List[str] | List[Res1D]): + file_names_new = [] + for i in range(len(file_names)): + entry = file_names[i] + file_name = entry.file_path if isinstance(entry, Res1D) else entry + file_names_new.append(file_name) + return file_names_new diff --git a/mikeio1d/result_reader_writer/result_merger.py b/mikeio1d/result_reader_writer/result_merger.py index 0ef9eaa8..fcf80217 100644 --- a/mikeio1d/result_reader_writer/result_merger.py +++ b/mikeio1d/result_reader_writer/result_merger.py @@ -1,4 +1,11 @@ -from System.Collections.Generic import List +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import List + +from System.Collections.Generic import List as DotNetList from System import String from DHI.Mike1D.MikeIO import ResultMerger as Res1DResultMerger @@ -14,10 +21,10 @@ class ResultMerger: List of res1d file names to merge. """ - def __init__(self, file_names): + def __init__(self, file_names: List[str]): self.file_names = file_names - def merge(self, merged_file_name): + def merge(self, merged_file_name: str): """ Merges the data from in file_names to a file specified by merged_file_name. @@ -31,7 +38,7 @@ def merge(self, merged_file_name): Res1DResultMerger.Merge(file_names_dotnet, merged_file_name) def _get_file_name_dotnet(self): - file_names_dotnet = List[String]() + file_names_dotnet = DotNetList[String]() for file_name in self.file_names: file_names_dotnet.Add(file_name) return file_names_dotnet diff --git a/tests/test_res1d_catchments.py b/tests/test_res1d_catchments.py index a06abe96..421829e3 100644 --- a/tests/test_res1d_catchments.py +++ b/tests/test_res1d_catchments.py @@ -236,10 +236,12 @@ def test_res1d_merging(test_file_path_for_merging): file_b = test_file_path_for_merging.replace(".res1d", "_b.res1d") file_c = test_file_path_for_merging.replace(".res1d", "_c.res1d") - Res1D.merge([file_a, file_b], file_c) + res1d_a = Res1D(file_a) + res1d_b = Res1D(file_b) + Res1D.merge([res1d_a, res1d_b], file_c) - df_a = Res1D(file_a).read() - df_b = Res1D(file_b).read().tail(-1) + df_a = res1d_a.read() + df_b = res1d_b.read().tail(-1) df_c = Res1D(file_c).read() df_merged = pd.concat([df_a, df_b])