Skip to content

Commit

Permalink
allowed _pos_out to have the old on_update alongside the new
Browse files Browse the repository at this point in the history
  • Loading branch information
evalott100 committed Jun 13, 2024
1 parent eb5f694 commit 2aa4d46
Show file tree
Hide file tree
Showing 3 changed files with 57 additions and 21 deletions.
8 changes: 6 additions & 2 deletions src/pandablocks_ioc/_hdf_ioc.py
Original file line number Diff line number Diff line change
Expand Up @@ -335,6 +335,7 @@ def __init__(
self,
client: AsyncioClient,
dataset_name_cache: Dict[str, Dict[str, str]],
datasets_record_updater: Callable,
record_prefix: str,
):
if find_spec("h5py") is None:
Expand All @@ -343,6 +344,7 @@ def __init__(

self._client = client
self.dataset_name_cache = dataset_name_cache
self.datasets_record_updater = datasets_record_updater

path_length = os.pathconf("/", "PC_PATH_MAX")
filename_length = os.pathconf("/", "PC_NAME_MAX")
Expand Down Expand Up @@ -658,8 +660,10 @@ async def _handle_hdf5_data(self) -> None:
self._num_captured_record.set
)

# Get the dataset names, or use the record name if no
# dataset name is provided
# Update `DATA:DATASETS` to match the names of the datasets
# in the HDF5 file
self.datasets_record_updater()

buffer = HDF5Buffer(
capture_mode,
filepath,
Expand Down
66 changes: 49 additions & 17 deletions src/pandablocks_ioc/ioc.py
Original file line number Diff line number Diff line change
Expand Up @@ -537,8 +537,6 @@ def update_cache(
else:
self._record_name_to_dataset_name.pop(record_name, None)

self.update_dataset_name_to_type()

def update_dataset_name_to_type(self):
dataset_name_list = list(self._record_name_to_dataset_name.values())
self._datasets_table.update_row("Name", dataset_name_list)
Expand Down Expand Up @@ -913,6 +911,17 @@ def _make_pos_out(
labels, capture_index = self._process_labels(
field_info.capture_labels, values[capture_record_name]
)

capture_record_updater: _RecordUpdater

def capture_record_on_update(new_capture_mode):
self._dataset_name_cache.update_cache(
record_name,
record_dict[dataset_record_name].record.get(),
labels[new_capture_mode],
)
return capture_record_updater.update(new_capture_mode)

record_dict[capture_record_name] = self._create_record_info(
capture_record_name,
"Capture options",
Expand All @@ -921,14 +930,20 @@ def _make_pos_out(
PviGroup.CAPTURE,
labels=labels,
initial_value=capture_index,
on_update=lambda new_capture_mode: (
self._dataset_name_cache.update_cache(
record_name,
record_dict[dataset_record_name].record.get(),
labels[new_capture_mode],
)
),
on_update=capture_record_on_update,
)

capture_record_info = RecordInfo(
data_type_func=builder.mbbOut, labels=labels, is_in_record=False
)
capture_record_updater = _RecordUpdater(
capture_record_info,
self._record_prefix,
self._client,
self._all_values_dict,
labels if labels else None,
)

record_dict[dataset_record_name] = self._create_record_info(
dataset_record_name,
"Used to adjust the dataset name to one more scientifically relevant",
Expand Down Expand Up @@ -1082,6 +1097,16 @@ def _make_ext_out(
),
)

capture_record_updater: _RecordUpdater

def capture_record_on_update(new_capture_mode):
self._dataset_name_cache.update_cache(
record_name,
record_dict[dataset_record_name].record.get(),
labels[new_capture_mode],
)
return capture_record_updater.update(new_capture_mode)

record_dict[capture_record_name] = self._create_record_info(
capture_record_name,
field_info.description,
Expand All @@ -1090,13 +1115,17 @@ def _make_ext_out(
PviGroup.OUTPUTS,
labels=labels,
initial_value=capture_index,
on_update=lambda new_capture_mode: (
self._dataset_name_cache.update_cache(
record_name,
record_dict[dataset_record_name].record.get(),
labels[new_capture_mode],
)
),
on_update=capture_record_on_update,
)
capture_record_info = RecordInfo(
data_type_func=builder.mbbOut, labels=labels, is_in_record=False
)
capture_record_updater = _RecordUpdater(
capture_record_info,
self._record_prefix,
self._client,
self._all_values_dict,
labels if labels else None,
)

return record_dict
Expand Down Expand Up @@ -1860,7 +1889,10 @@ def create_block_records(
add_pcap_arm_pvi_info(PviGroup.INPUTS, pcap_arm_record)

HDF5RecordController(
self._client, self._dataset_name_cache.cache, self._record_prefix
self._client,
self._dataset_name_cache.cache,
self._dataset_name_cache.update_dataset_name_to_type,
self._record_prefix,
)

return record_dict
Expand Down
4 changes: 2 additions & 2 deletions tests/test_hdf_ioc.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ async def hdf5_controller(
}

hdf5_controller = HDF5RecordController(
AsyncioClient("localhost"), dataset_name_cache, test_prefix
AsyncioClient("localhost"), dataset_name_cache, lambda: None, test_prefix
)

# When using tests w/o CA, need to manually set _directory_exists to 1
Expand All @@ -254,7 +254,7 @@ def subprocess_func(
async def wrapper():
builder.SetDeviceName(namespace_prefix)
client = MockedAsyncioClient(standard_responses)
HDF5RecordController(client, {}, namespace_prefix)
HDF5RecordController(client, {}, lambda: None, namespace_prefix)
dispatcher = asyncio_dispatcher.AsyncioDispatcher()
builder.LoadDatabase()
softioc.iocInit(dispatcher)
Expand Down

0 comments on commit 2aa4d46

Please sign in to comment.