diff --git a/pyproject.toml b/pyproject.toml index 4550a932..9bda3669 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,7 +17,7 @@ dependencies = [ "click", "h5py", "softioc>=4.4.0", - "pandablocks~=0.8.0", + "pandablocks~=0.9.0", "pvi~=0.7.0", "typing-extensions;python_version<'3.8'", ] # Add project dependencies here, e.g. ["click", "numpy"] diff --git a/src/pandablocks_ioc/_hdf_ioc.py b/src/pandablocks_ioc/_hdf_ioc.py index 46d4c6bf..84851d4b 100644 --- a/src/pandablocks_ioc/_hdf_ioc.py +++ b/src/pandablocks_ioc/_hdf_ioc.py @@ -3,10 +3,11 @@ import os from asyncio import CancelledError from collections import deque +from dataclasses import dataclass from enum import Enum from importlib.util import find_spec from pathlib import Path -from typing import Callable, Deque, Optional, Union +from typing import Callable, Deque, Dict, Optional, Union from pandablocks.asyncio import AsyncioClient from pandablocks.hdf import ( @@ -22,6 +23,7 @@ from softioc.pythonSoftIoc import RecordWrapper from ._pvi import PviGroup, add_automatic_pvi_info, add_data_capture_pvi_info +from ._tables import ReadOnlyPvaTable from ._types import ONAM_STR, ZNAM_STR, EpicsName HDFReceived = Union[ReadyData, StartData, FrameData, EndData] @@ -70,6 +72,7 @@ def __init__( status_message_setter: Callable, number_received_setter: Callable, number_captured_setter_pipeline: NumCapturedSetter, + dataset_name_cache: Dict[str, Dict[str, str]], ): # Only one filename - user must stop capture and set new FileName/FilePath # for new files @@ -94,6 +97,8 @@ def __init__( self.number_captured_setter_pipeline = number_captured_setter_pipeline self.number_captured_setter_pipeline.number_captured_setter(0) + self.dataset_name_cache = dataset_name_cache + if ( self.capture_mode == CaptureMode.LAST_N and self.number_of_rows_to_capture <= 0 @@ -114,7 +119,9 @@ def put_data_to_file(self, data: HDFReceived): def start_pipeline(self): self.pipeline = create_default_pipeline( - iter([self.filepath]), self.number_captured_setter_pipeline + iter([self.filepath]), + self.dataset_name_cache, + self.number_captured_setter_pipeline, ) def _handle_StartData(self, data: StartData): @@ -304,10 +311,59 @@ def handle_data(self, data: HDFReceived): ) +@dataclass +class Dataset: + name: str + capture: str + + +class DatasetNameCache: + def __init__(self, datasets: Dict[str, Dataset], datasets_record_name: EpicsName): + self.datasets = datasets + + self._datasets_table_record = ReadOnlyPvaTable( + datasets_record_name, ["Name", "Type"] + ) + self._datasets_table_record.set_rows( + ["Name", "Type"], [[], []], length=300, default_data_type=str + ) + + def hdf_writer_names(self): + """Formats the current dataset names for use in the HDFWriter""" + + hdf_names: Dict[str, Dict[str, str]] = {} + for record_name, dataset in self.datasets.items(): + if not dataset.name or dataset.capture == "No": + continue + + field_name = record_name.replace(":", ".") + + hdf_names[field_name] = hdf_name = {} + + hdf_name[dataset.capture.split(" ")[-1]] = dataset.name + # Suffix -min and -max if both are present + if "Min Max" in dataset.capture: + hdf_name["Min"] = f"{dataset.name}-min" + hdf_name["Max"] = f"{dataset.name}-max" + return hdf_names + + def update_datasets_record(self): + dataset_name_list = [ + dataset.name + for dataset in self.datasets.values() + if dataset.name and dataset.capture != "No" + ] + self._datasets_table_record.update_row("Name", dataset_name_list) + self._datasets_table_record.update_row( + "Type", + ["float64"] * len(dataset_name_list), + ) + + class HDF5RecordController: """Class to create and control the records that handle HDF5 processing""" - _DATA_PREFIX = "DATA" + DATA_PREFIX = "DATA" _client: AsyncioClient @@ -325,19 +381,28 @@ class HDF5RecordController: _handle_hdf5_data_task: Optional[asyncio.Task] = None - def __init__(self, client: AsyncioClient, record_prefix: str): + def __init__( + self, + client: AsyncioClient, + dataset_cache: Dict[str, Dataset], + record_prefix: str, + ): if find_spec("h5py") is None: logging.warning("No HDF5 support detected - skipping creating HDF5 records") return self._client = client + _datasets_record_name = EpicsName( + HDF5RecordController.DATA_PREFIX + ":DATASETS" + ) + self._datasets = DatasetNameCache(dataset_cache, _datasets_record_name) path_length = os.pathconf("/", "PC_PATH_MAX") filename_length = os.pathconf("/", "PC_NAME_MAX") # Create the records, including an uppercase alias for each # Naming convention and settings (mostly) copied from FSCN2 HDF5 records - directory_record_name = EpicsName(self._DATA_PREFIX + ":HDF_DIRECTORY") + directory_record_name = EpicsName(self.DATA_PREFIX + ":HDF_DIRECTORY") self._directory_record = builder.longStringOut( directory_record_name, length=path_length, @@ -353,10 +418,10 @@ def __init__(self, client: AsyncioClient, record_prefix: str): builder.longStringOut, ) self._directory_record.add_alias( - record_prefix + ":" + self._DATA_PREFIX + ":HDFDirectory" + record_prefix + ":" + self.DATA_PREFIX + ":HDFDirectory" ) - create_directory_record_name = EpicsName(self._DATA_PREFIX + ":CreateDirectory") + create_directory_record_name = EpicsName(self.DATA_PREFIX + ":CreateDirectory") self._create_directory_record = builder.longOut( create_directory_record_name, initial_value=0, @@ -372,7 +437,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): record_prefix + ":" + create_directory_record_name.upper() ) - directory_exists_name = EpicsName(self._DATA_PREFIX + ":DirectoryExists") + directory_exists_name = EpicsName(self.DATA_PREFIX + ":DirectoryExists") self._directory_exists_record = builder.boolIn( directory_exists_name, ZNAM="No", @@ -390,7 +455,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): record_prefix + ":" + directory_exists_name.upper() ) - file_name_record_name = EpicsName(self._DATA_PREFIX + ":HDF_FILE_NAME") + file_name_record_name = EpicsName(self.DATA_PREFIX + ":HDF_FILE_NAME") self._file_name_record = builder.longStringOut( file_name_record_name, length=filename_length, @@ -405,12 +470,10 @@ def __init__(self, client: AsyncioClient, record_prefix: str): builder.longStringOut, ) self._file_name_record.add_alias( - record_prefix + ":" + self._DATA_PREFIX + ":HDFFileName" + record_prefix + ":" + self.DATA_PREFIX + ":HDFFileName" ) - full_file_path_record_name = EpicsName( - self._DATA_PREFIX + ":HDF_FULL_FILE_PATH" - ) + full_file_path_record_name = EpicsName(self.DATA_PREFIX + ":HDF_FULL_FILE_PATH") self._full_file_path_record = builder.longStringIn( full_file_path_record_name, length=path_length + 1 + filename_length, @@ -423,10 +486,10 @@ def __init__(self, client: AsyncioClient, record_prefix: str): builder.longStringIn, ) self._full_file_path_record.add_alias( - record_prefix + ":" + self._DATA_PREFIX + ":HDFFullFilePath" + record_prefix + ":" + self.DATA_PREFIX + ":HDFFullFilePath" ) - num_capture_record_name = EpicsName(self._DATA_PREFIX + ":NUM_CAPTURE") + num_capture_record_name = EpicsName(self.DATA_PREFIX + ":NUM_CAPTURE") self._num_capture_record = builder.longOut( num_capture_record_name, initial_value=0, # Infinite capture @@ -442,10 +505,10 @@ def __init__(self, client: AsyncioClient, record_prefix: str): ) # No validate - users are allowed to change this at any time self._num_capture_record.add_alias( - record_prefix + ":" + self._DATA_PREFIX + ":NumCapture" + record_prefix + ":" + self.DATA_PREFIX + ":NumCapture" ) - num_captured_record_name = EpicsName(self._DATA_PREFIX + ":NUM_CAPTURED") + num_captured_record_name = EpicsName(self.DATA_PREFIX + ":NUM_CAPTURED") self._num_captured_record = builder.longIn( num_captured_record_name, initial_value=0, @@ -459,10 +522,10 @@ def __init__(self, client: AsyncioClient, record_prefix: str): builder.longIn, ) self._num_captured_record.add_alias( - record_prefix + ":" + self._DATA_PREFIX + ":NumCaptured" + record_prefix + ":" + self.DATA_PREFIX + ":NumCaptured" ) - num_received_record_name = EpicsName(self._DATA_PREFIX + ":NUM_RECEIVED") + num_received_record_name = EpicsName(self.DATA_PREFIX + ":NUM_RECEIVED") self._num_received_record = builder.longIn( num_received_record_name, initial_value=0, @@ -476,10 +539,10 @@ def __init__(self, client: AsyncioClient, record_prefix: str): builder.longIn, ) self._num_received_record.add_alias( - record_prefix + ":" + self._DATA_PREFIX + ":NumReceived" + record_prefix + ":" + self.DATA_PREFIX + ":NumReceived" ) - flush_period_record_name = EpicsName(self._DATA_PREFIX + ":FLUSH_PERIOD") + flush_period_record_name = EpicsName(self.DATA_PREFIX + ":FLUSH_PERIOD") self._flush_period_record = builder.aOut( flush_period_record_name, initial_value=1.0, @@ -493,10 +556,10 @@ def __init__(self, client: AsyncioClient, record_prefix: str): builder.aOut, ) self._flush_period_record.add_alias( - record_prefix + ":" + self._DATA_PREFIX + ":FlushPeriod" + record_prefix + ":" + self.DATA_PREFIX + ":FlushPeriod" ) - capture_control_record_name = EpicsName(self._DATA_PREFIX + ":CAPTURE") + capture_control_record_name = EpicsName(self.DATA_PREFIX + ":CAPTURE") self._capture_control_record = builder.boolOut( capture_control_record_name, ZNAM=ZNAM_STR, @@ -511,10 +574,10 @@ def __init__(self, client: AsyncioClient, record_prefix: str): self._capture_control_record, ) self._capture_control_record.add_alias( - record_prefix + ":" + self._DATA_PREFIX + ":Capture" + record_prefix + ":" + self.DATA_PREFIX + ":Capture" ) - capture_mode_record_name = EpicsName(self._DATA_PREFIX + ":CAPTURE_MODE") + capture_mode_record_name = EpicsName(self.DATA_PREFIX + ":CAPTURE_MODE") self._capture_mode_record = builder.mbbOut( capture_mode_record_name, *[capture_mode.name for capture_mode in CaptureMode], @@ -528,10 +591,10 @@ def __init__(self, client: AsyncioClient, record_prefix: str): builder.mbbOut, ) self._capture_mode_record.add_alias( - record_prefix + ":" + self._DATA_PREFIX + ":CaptureMode" + record_prefix + ":" + self.DATA_PREFIX + ":CaptureMode" ) - status_message_record_name = EpicsName(self._DATA_PREFIX + ":STATUS") + status_message_record_name = EpicsName(self.DATA_PREFIX + ":STATUS") self._status_message_record = builder.longStringIn( status_message_record_name, initial_value="OK", @@ -545,7 +608,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): builder.stringIn, ) self._status_message_record.add_alias( - record_prefix + ":" + self._DATA_PREFIX + ":Status" + record_prefix + ":" + self.DATA_PREFIX + ":Status" ) def _parameter_validate(self, record: RecordWrapper, new_val) -> bool: @@ -615,10 +678,10 @@ async def _update_directory_path(self, new_val) -> None: self._directory_exists_record.set(0) if self._directory_exists_record.get() == 0: - sevr = alarm.MAJOR_ALARM, alrm = alarm.STATE_ALARM + sevr, alrm = alarm.MAJOR_ALARM, alarm.STATE_ALARM logging.error(status_msg) else: - sevr = alarm.NO_ALARM, alrm = alarm.NO_ALARM + sevr, alrm = alarm.NO_ALARM, alarm.NO_ALARM logging.debug(status_msg) self._status_message_record.set(status_msg, severity=sevr, alarm=alrm) @@ -647,6 +710,11 @@ async def _handle_hdf5_data(self) -> None: number_captured_setter_pipeline = NumCapturedSetter( self._num_captured_record.set ) + + # Update `DATA:DATASETS` to match the names of the datasets + # in the HDF5 file + self._datasets.update_datasets_record() + buffer = HDF5Buffer( capture_mode, filepath, @@ -654,6 +722,7 @@ async def _handle_hdf5_data(self) -> None: self._status_message_record.set, self._num_received_record.set, number_captured_setter_pipeline, + self._datasets.hdf_writer_names(), ) flush_period: float = self._flush_period_record.get() async for data in self._client.data( diff --git a/src/pandablocks_ioc/_pvi.py b/src/pandablocks_ioc/_pvi.py index 293bee17..22237f95 100644 --- a/src/pandablocks_ioc/_pvi.py +++ b/src/pandablocks_ioc/_pvi.py @@ -164,7 +164,7 @@ def add_automatic_pvi_info( _positions_table_group = Group( name="PositionsTable", layout=Grid(labelled=True), children=[] ) -_positions_table_headers = ["VALUE", "UNITS", "SCALE", "OFFSET", "CAPTURE"] +_positions_table_headers = ["VALUE", "UNITS", "SCALE", "OFFSET", "DATASET", "CAPTURE"] # TODO: Replicate this for the BITS table @@ -174,6 +174,7 @@ def add_positions_table_row( units_record_name: EpicsName, scale_record_name: EpicsName, offset_record_name: EpicsName, + dataset_record_name: EpicsName, capture_record_name: EpicsName, ) -> None: """Add a Row to the Positions table""" @@ -205,6 +206,12 @@ def add_positions_table_row( pv=offset_record_name, widget=TextWrite(), ), + SignalRW( + name=epics_to_pvi_name(dataset_record_name), + label=dataset_record_name, + pv=dataset_record_name, + widget=TextWrite(), + ), SignalRW( name=epics_to_pvi_name(capture_record_name), label=capture_record_name, diff --git a/src/pandablocks_ioc/_tables.py b/src/pandablocks_ioc/_tables.py index 76f11adf..451f9f32 100644 --- a/src/pandablocks_ioc/_tables.py +++ b/src/pandablocks_ioc/_tables.py @@ -5,7 +5,7 @@ from collections import OrderedDict from dataclasses import dataclass from enum import Enum -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Type import numpy as np from epicsdbbuilder import RecordName @@ -73,6 +73,86 @@ class TableModeEnum(Enum): DISCARD = 3 # Discard all EPICS records, re-fetch from PandA +class ReadOnlyPvaTable: + def __init__( + self, + epics_table_name: EpicsName, + labels: List[str], + ): + self.epics_table_name = epics_table_name + self.pva_table_name = RecordName(epics_table_name) + self.rows: Dict[str, RecordWrapper] = {} + + block, field = self.epics_table_name.split(":", maxsplit=1) + + columns: RecordWrapper = builder.WaveformOut( + self.epics_table_name + ":LABELS", + initial_value=np.array([k.encode() for k in labels]), + ) + columns.add_info( + "Q:group", + { + self.pva_table_name: { + "+id": "epics:nt/NTTable:1.0", + "labels": {"+type": "plain", "+channel": "VAL"}, + } + }, + ) + pv_rec = builder.longStringIn( + self.epics_table_name + ":PV", + initial_value=self.pva_table_name, + ) + pv_rec.add_info( + "Q:group", + { + RecordName(f"{block}:PVI"): { + f"pvi.{field.lower().replace(':', '_')}.rw": { + "+channel": "VAL", + "+type": "plain", + } + }, + }, + ) + + def set_rows( + self, + row_names: List[str], + initial_values: List[List], + length: Optional[int] = None, + default_data_type: Optional[Type] = None, + ): + for idx, (row_name, initial_value) in enumerate(zip(row_names, initial_values)): + full_name = EpicsName(self.epics_table_name + ":" + row_name) + pva_row_name = row_name.replace(":", "_").lower() + dtype = type(initial_value[0]) if initial_value else default_data_type + initial_value_np = np.array(initial_value, dtype=dtype) + + field_record: RecordWrapper = builder.WaveformIn( + full_name, + DESC="", # Description not provided yet + initial_value=initial_value_np, + length=length or len(initial_value), + ) + + field_pva_info = { + "+type": "plain", + "+channel": "VAL", + "+trigger": "*" if idx == len(row_names) - 1 else "", + } + + pva_info = {f"value.{pva_row_name.lower()}": field_pva_info} + + field_record.add_info( + "Q:group", + {self.pva_table_name: pva_info}, + ) + self.rows[row_name] = field_record + + def update_row(self, row_name: str, new_value: List): + new_value_np = np.array(new_value) + self.rows[row_name].set(new_value_np) + + class TableUpdater: """Class to handle creating and updating tables.""" diff --git a/src/pandablocks_ioc/ioc.py b/src/pandablocks_ioc/ioc.py index eb2b4697..603106a6 100644 --- a/src/pandablocks_ioc/ioc.py +++ b/src/pandablocks_ioc/ioc.py @@ -41,7 +41,7 @@ from softioc.pythonSoftIoc import RecordWrapper from ._connection_status import ConnectionStatus, Statuses -from ._hdf_ioc import HDF5RecordController +from ._hdf_ioc import Dataset, HDF5RecordController from ._pvi import ( Pvi, PviGroup, @@ -543,6 +543,8 @@ def __init__( # All records should be blocking builder.SetBlocking(True) + self._dataset_cache: Dict[str, Dataset] = {} + def _process_labels( self, labels: List[str], record_value: ScalarRecordValue ) -> Tuple[List[str], int]: @@ -863,9 +865,20 @@ def _make_pos_out( ) capture_record_name = EpicsName(record_name + ":CAPTURE") + dataset_record_name = EpicsName(record_name + ":DATASET") labels, capture_index = self._process_labels( field_info.capture_labels, values[capture_record_name] ) + + capture_record_updater: _RecordUpdater + + def capture_record_on_update(new_capture_mode): + self._dataset_cache[record_name] = Dataset( + record_dict[dataset_record_name].record.get(), + labels[new_capture_mode], + ) + return capture_record_updater.update(new_capture_mode) + record_dict[capture_record_name] = self._create_record_info( capture_record_name, "Capture options", @@ -874,6 +887,31 @@ def _make_pos_out( PviGroup.CAPTURE, labels=labels, initial_value=capture_index, + on_update=capture_record_on_update, + ) + + capture_record_updater = _RecordUpdater( + record_dict[capture_record_name], + self._record_prefix, + self._client, + self._all_values_dict, + labels if labels else None, + ) + + def dataset_record_on_update(new_dataset_name): + self._dataset_cache[record_name] = Dataset( + new_dataset_name, + labels[record_dict[capture_record_name].record.get()], + ) + + record_dict[dataset_record_name] = self._create_record_info( + dataset_record_name, + "Used to adjust the dataset name to one more scientifically relevant", + builder.stringOut, + str, + PviGroup.CAPTURE, + initial_value="", + on_update=dataset_record_on_update, ) offset_record_name = EpicsName(record_name + ":OFFSET") @@ -957,6 +995,13 @@ def _make_pos_out( + ":" + units_record_name.split(":")[-1] ) + record_dict[dataset_record_name].record.add_alias( + self._record_prefix + + ":" + + positions_record_name + + ":" + + dataset_record_name.split(":")[-1] + ) self._pos_out_row_counter += 1 add_positions_table_row( @@ -965,6 +1010,7 @@ def _make_pos_out( units_record_name, scale_record_name, offset_record_name, + dataset_record_name, capture_record_name, ) @@ -981,13 +1027,40 @@ def _make_ext_out( record_dict: Dict[EpicsName, RecordInfo] = {} # There is no record for the ext_out field itself - the only thing - # you do with them is to turn their Capture attribute on/off. - # The field itself has no value. + # you do with them is to turn their Capture attribute on/off, and give it + # an alternative dataset name capture_record_name = EpicsName(record_name + ":CAPTURE") + dataset_record_name = EpicsName(record_name + ":DATASET") labels, capture_index = self._process_labels( field_info.capture_labels, values[capture_record_name] ) + + def dataset_record_on_update(new_dataset_name): + self._dataset_cache[record_name] = Dataset( + new_dataset_name, + labels[record_dict[capture_record_name].record.get()], + ) + + record_dict[dataset_record_name] = self._create_record_info( + dataset_record_name, + "Used to adjust the dataset name to one more scientifically relevant", + builder.stringOut, + str, + PviGroup.OUTPUTS, + initial_value="", + on_update=dataset_record_on_update, + ) + + capture_record_updater: _RecordUpdater + + def capture_record_on_update(new_capture_mode): + self._dataset_cache[record_name] = Dataset( + record_dict[dataset_record_name].record.get(), + labels[new_capture_mode], + ) + return capture_record_updater.update(new_capture_mode) + record_dict[capture_record_name] = self._create_record_info( capture_record_name, field_info.description, @@ -996,6 +1069,14 @@ def _make_ext_out( PviGroup.OUTPUTS, labels=labels, initial_value=capture_index, + on_update=capture_record_on_update, + ) + capture_record_updater = _RecordUpdater( + record_dict[capture_record_name], + self._record_prefix, + self._client, + self._all_values_dict, + labels if labels else None, ) return record_dict @@ -1758,7 +1839,11 @@ def create_block_records( add_pcap_arm_pvi_info(PviGroup.INPUTS, pcap_arm_record) - HDF5RecordController(self._client, self._record_prefix) + HDF5RecordController( + self._client, + self._dataset_cache, + self._record_prefix, + ) return record_dict diff --git a/tests/test_hdf_ioc.py b/tests/test_hdf_ioc.py index d9d16427..58bb3a74 100644 --- a/tests/test_hdf_ioc.py +++ b/tests/test_hdf_ioc.py @@ -38,6 +38,7 @@ ) from pandablocks_ioc._hdf_ioc import ( CaptureMode, + Dataset, HDF5Buffer, HDF5RecordController, NumCapturedSetter, @@ -228,7 +229,13 @@ async def hdf5_controller( test_prefix, hdf5_test_prefix = new_random_hdf5_prefix - hdf5_controller = HDF5RecordController(AsyncioClient("localhost"), test_prefix) + dataset_name_cache = { + "COUNTER1:OUT": Dataset("some_other_dataset_name", "Value"), + } + + hdf5_controller = HDF5RecordController( + AsyncioClient("localhost"), dataset_name_cache, test_prefix + ) # When using tests w/o CA, need to manually set _directory_exists to 1 hdf5_controller._directory_exists_record.set(1) @@ -247,7 +254,7 @@ def subprocess_func( async def wrapper(): builder.SetDeviceName(namespace_prefix) client = MockedAsyncioClient(standard_responses) - HDF5RecordController(client, namespace_prefix) + HDF5RecordController(client, {}, namespace_prefix) dispatcher = asyncio_dispatcher.AsyncioDispatcher() builder.LoadDatabase() softioc.iocInit(dispatcher) @@ -619,7 +626,7 @@ async def test_hdf5_file_writing_last_n_endreason_not_ok( # Initially Status should be "Dir exists and is writable" val = await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR) - assert val == "OK" + assert val == "Dir exists and is writable" await caput(hdf5_test_prefix + ":Capture", 1, wait=True, timeout=TIMEOUT) @@ -762,6 +769,7 @@ def test_hdf_buffer_forever(differently_sized_framedata, tmp_path): status_output.append, num_received_output.append, num_captured_setter_pipeline, + {}, ) buffer.put_data_to_file = frames_written_to_file.append @@ -805,6 +813,7 @@ def test_hdf_buffer_last_n(differently_sized_framedata, tmp_path): status_output.append, num_received_output.append, num_captured_setter_pipeline, + {}, ) buffer.put_data_to_file = frames_written_to_file.append @@ -881,6 +890,7 @@ def test_hdf_buffer_last_n_large_data(tmp_path): status_output.append, num_received_output.append, num_captured_setter_pipeline, + {}, ) buffer.put_data_to_file = frames_written_to_file.append diff --git a/tests/test_ioc.py b/tests/test_ioc.py index eff6b7e2..1236a5db 100644 --- a/tests/test_ioc.py +++ b/tests/test_ioc.py @@ -201,6 +201,7 @@ def idfn(val): f"{TEST_RECORD}:CAPTURE": "Diff", f"{TEST_RECORD}:OFFSET": "5", f"{TEST_RECORD}:SCALE": "0.5", + f"{TEST_RECORD}:DATASET": "", f"{TEST_RECORD}:UNITS": "MyUnits", }, [ @@ -208,6 +209,7 @@ def idfn(val): f"{TEST_RECORD}:CAPTURE", f"{TEST_RECORD}:OFFSET", f"{TEST_RECORD}:SCALE", + f"{TEST_RECORD}:DATASET", f"{TEST_RECORD}:UNITS", ], ), @@ -217,18 +219,19 @@ def idfn(val): ), { f"{TEST_RECORD}:CAPTURE": "Diff", + f"{TEST_RECORD}:DATASET": "MyDataset", }, - [ - f"{TEST_RECORD}:CAPTURE", - ], + [f"{TEST_RECORD}:CAPTURE", f"{TEST_RECORD}:DATASET"], ), ( ExtOutFieldInfo("ext_out", "samples", None, capture_labels=["No", "Diff"]), { f"{TEST_RECORD}:CAPTURE": "Diff", + f"{TEST_RECORD}:DATASET": "MyDataset", }, [ f"{TEST_RECORD}:CAPTURE", + f"{TEST_RECORD}:DATASET", ], ), (