Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add new :DATASET pv to capture records and pass the name to the client #118

Merged
merged 11 commits into from
Jun 14, 2024
26 changes: 21 additions & 5 deletions src/pandablocks_ioc/_hdf_ioc.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@
import os
from asyncio import CancelledError
from collections import deque
from copy import deepcopy
from enum import Enum
from importlib.util import find_spec
from pathlib import Path
from typing import Callable, Deque, Optional, Union
from typing import Callable, Deque, Dict, Optional, Union

from pandablocks.asyncio import AsyncioClient
from pandablocks.hdf import (
Expand Down Expand Up @@ -70,6 +71,7 @@ def __init__(
status_message_setter: Callable,
number_received_setter: Callable,
number_captured_setter_pipeline: NumCapturedSetter,
capture_record_hdf_names: Dict[EpicsName, Dict[str, str]],
):
# Only one filename - user must stop capture and set new FileName/FilePath
# for new files
Expand All @@ -94,6 +96,8 @@ def __init__(
self.number_captured_setter_pipeline = number_captured_setter_pipeline
self.number_captured_setter_pipeline.number_captured_setter(0)

self.capture_record_hdf_names = capture_record_hdf_names

if (
self.capture_mode == CaptureMode.LAST_N
and self.number_of_rows_to_capture <= 0
Expand All @@ -114,7 +118,9 @@ def put_data_to_file(self, data: HDFReceived):

def start_pipeline(self):
self.pipeline = create_default_pipeline(
iter([self.filepath]), self.number_captured_setter_pipeline
iter([self.filepath]),
self.capture_record_hdf_names,
self.number_captured_setter_pipeline,
)

def _handle_StartData(self, data: StartData):
Expand Down Expand Up @@ -325,12 +331,18 @@ class HDF5RecordController:

_handle_hdf5_data_task: Optional[asyncio.Task] = None

def __init__(self, client: AsyncioClient, record_prefix: str):
def __init__(
self,
client: AsyncioClient,
capture_record_hdf_name: Dict[str, Callable[[str], str]],
record_prefix: str,
):
if find_spec("h5py") is None:
logging.warning("No HDF5 support detected - skipping creating HDF5 records")
return

self._client = client
self.capture_record_hdf_name = capture_record_hdf_name

path_length = os.pathconf("/", "PC_PATH_MAX")
filename_length = os.pathconf("/", "PC_NAME_MAX")
Expand Down Expand Up @@ -615,10 +627,10 @@ async def _update_directory_path(self, new_val) -> None:
self._directory_exists_record.set(0)

if self._directory_exists_record.get() == 0:
sevr = alarm.MAJOR_ALARM, alrm = alarm.STATE_ALARM
sevr, alrm = alarm.MAJOR_ALARM, alarm.STATE_ALARM
logging.error(status_msg)
else:
sevr = alarm.NO_ALARM, alrm = alarm.NO_ALARM
sevr, alrm = alarm.NO_ALARM, alarm.NO_ALARM
logging.debug(status_msg)

self._status_message_record.set(status_msg, severity=sevr, alarm=alrm)
Expand Down Expand Up @@ -647,13 +659,17 @@ async def _handle_hdf5_data(self) -> None:
number_captured_setter_pipeline = NumCapturedSetter(
self._num_captured_record.set
)

# Get the dataset names, or use the record name if no
# dataset name is provided
buffer = HDF5Buffer(
capture_mode,
filepath,
num_capture,
self._status_message_record.set,
self._num_received_record.set,
number_captured_setter_pipeline,
deepcopy(self.capture_record_hdf_name),
)
flush_period: float = self._flush_period_record.get()
async for data in self._client.data(
Expand Down
9 changes: 8 additions & 1 deletion src/pandablocks_ioc/_pvi.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def add_automatic_pvi_info(
_positions_table_group = Group(
name="PositionsTable", layout=Grid(labelled=True), children=[]
)
_positions_table_headers = ["VALUE", "UNITS", "SCALE", "OFFSET", "CAPTURE"]
_positions_table_headers = ["VALUE", "UNITS", "SCALE", "OFFSET", "DATASET", "CAPTURE"]


# TODO: Replicate this for the BITS table
Expand All @@ -174,6 +174,7 @@ def add_positions_table_row(
units_record_name: EpicsName,
scale_record_name: EpicsName,
offset_record_name: EpicsName,
dataset_record_name: EpicsName,
capture_record_name: EpicsName,
) -> None:
"""Add a Row to the Positions table"""
Expand Down Expand Up @@ -205,6 +206,12 @@ def add_positions_table_row(
pv=offset_record_name,
widget=TextWrite(),
),
SignalRW(
name=epics_to_pvi_name(dataset_record_name),
label=dataset_record_name,
pv=dataset_record_name,
widget=TextWrite(),
),
SignalRW(
name=epics_to_pvi_name(capture_record_name),
label=capture_record_name,
Expand Down
52 changes: 51 additions & 1 deletion src/pandablocks_ioc/ioc.py
Original file line number Diff line number Diff line change
Expand Up @@ -537,6 +537,10 @@ def __init__(
self._client = client
self._all_values_dict = all_values_dict

# A dictionary of record name to capture type to HDF dataset name
# e.g {"COUNTER1": {"Max": "SOME_OTHER_DATASET_NAME"}}
self._capture_record_hdf_names: Dict[EpicsName, Dict[str, str]] = {}

# Set the record prefix
builder.SetDeviceName(self._record_prefix)

Expand Down Expand Up @@ -863,9 +867,35 @@ def _make_pos_out(
)

capture_record_name = EpicsName(record_name + ":CAPTURE")
dataset_record_name = EpicsName(record_name + ":DATASET")
labels, capture_index = self._process_labels(
field_info.capture_labels, values[capture_record_name]
)

def adjust_hdf_field_name_based_on_dataset(dataset_name) -> str:
current_capture_mode = labels[record_dict[capture_record_name].record.get()]

# Throw away all the old settings
self._capture_record_hdf_names[record_name] = {}

# Add a -Max or -Min or -Mean to the dataset name if it exists
if current_capture_mode in ("Min Max", "Min Max Mean"):
for capture_mode_instance in current_capture_mode.split(" "):
self._capture_record_hdf_names[record_name][
capture_mode_instance
] = f"{(dataset_name or record_name)}-{capture_mode_instance}"

# Current capture mode will be the same as the capture mode instance
else:
self._capture_record_hdf_names[record_name][current_capture_mode] = (
dataset_name or f"{record_name}-{current_capture_mode}"
)
evalott100 marked this conversation as resolved.
Show resolved Hide resolved

def adjust_hdf_field_name_based_on_capture_mode(capture_mode) -> str:
current_dataset_name = record_dict[capture_record_name].record.get()
adjust_hdf_field_name_based_on_dataset(current_dataset_name)

print("RECORD NAME", record_name)
record_dict[capture_record_name] = self._create_record_info(
capture_record_name,
"Capture options",
Expand All @@ -874,6 +904,16 @@ def _make_pos_out(
PviGroup.CAPTURE,
labels=labels,
initial_value=capture_index,
on_update=adjust_hdf_field_name_based_on_capture_mode,
)
record_dict[dataset_record_name] = self._create_record_info(
dataset_record_name,
"Used to adjust the dataset name to one more scientifically relevant",
builder.stringOut,
str,
PviGroup.CAPTURE,
initial_value="",
on_update=adjust_hdf_field_name_based_on_dataset,
)

offset_record_name = EpicsName(record_name + ":OFFSET")
Expand Down Expand Up @@ -957,6 +997,13 @@ def _make_pos_out(
+ ":"
+ units_record_name.split(":")[-1]
)
record_dict[dataset_record_name].record.add_alias(
self._record_prefix
+ ":"
+ positions_record_name
+ ":"
+ dataset_record_name.split(":")[-1]
)

self._pos_out_row_counter += 1
add_positions_table_row(
Expand All @@ -965,6 +1012,7 @@ def _make_pos_out(
units_record_name,
scale_record_name,
offset_record_name,
dataset_record_name,
capture_record_name,
)

Expand Down Expand Up @@ -1758,7 +1806,9 @@ def create_block_records(

add_pcap_arm_pvi_info(PviGroup.INPUTS, pcap_arm_record)

HDF5RecordController(self._client, self._record_prefix)
HDF5RecordController(
self._client, self._capture_record_hdf_names, self._record_prefix
)

return record_dict

Expand Down
18 changes: 15 additions & 3 deletions tests/test_hdf_ioc.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,16 @@ async def hdf5_controller(

test_prefix, hdf5_test_prefix = new_random_hdf5_prefix

hdf5_controller = HDF5RecordController(AsyncioClient("localhost"), test_prefix)
dataset_name_getters = {
"COUNTER1": lambda: "some_other_dataset_name",
# these datasets haven't been overwritten, they should be the default
"COUNTER2": lambda: "",
"COUNTER3": lambda: "",
}

hdf5_controller = HDF5RecordController(
AsyncioClient("localhost"), dataset_name_getters, test_prefix
)

# When using tests w/o CA, need to manually set _directory_exists to 1
hdf5_controller._directory_exists_record.set(1)
Expand All @@ -247,7 +256,7 @@ def subprocess_func(
async def wrapper():
builder.SetDeviceName(namespace_prefix)
client = MockedAsyncioClient(standard_responses)
HDF5RecordController(client, namespace_prefix)
HDF5RecordController(client, {}, namespace_prefix)
dispatcher = asyncio_dispatcher.AsyncioDispatcher()
builder.LoadDatabase()
softioc.iocInit(dispatcher)
Expand Down Expand Up @@ -619,7 +628,7 @@ async def test_hdf5_file_writing_last_n_endreason_not_ok(

# Initially Status should be "Dir exists and is writable"
val = await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR)
assert val == "OK"
assert val == "Dir exists and is writable"

await caput(hdf5_test_prefix + ":Capture", 1, wait=True, timeout=TIMEOUT)

Expand Down Expand Up @@ -762,6 +771,7 @@ def test_hdf_buffer_forever(differently_sized_framedata, tmp_path):
status_output.append,
num_received_output.append,
num_captured_setter_pipeline,
{},
)
buffer.put_data_to_file = frames_written_to_file.append

Expand Down Expand Up @@ -805,6 +815,7 @@ def test_hdf_buffer_last_n(differently_sized_framedata, tmp_path):
status_output.append,
num_received_output.append,
num_captured_setter_pipeline,
{},
)
buffer.put_data_to_file = frames_written_to_file.append

Expand Down Expand Up @@ -881,6 +892,7 @@ def test_hdf_buffer_last_n_large_data(tmp_path):
status_output.append,
num_received_output.append,
num_captured_setter_pipeline,
{},
)
buffer.put_data_to_file = frames_written_to_file.append

Expand Down
2 changes: 2 additions & 0 deletions tests/test_ioc.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,13 +201,15 @@ def idfn(val):
f"{TEST_RECORD}:CAPTURE": "Diff",
f"{TEST_RECORD}:OFFSET": "5",
f"{TEST_RECORD}:SCALE": "0.5",
f"{TEST_RECORD}:DATASET": "",
f"{TEST_RECORD}:UNITS": "MyUnits",
},
[
f"{TEST_RECORD}",
f"{TEST_RECORD}:CAPTURE",
f"{TEST_RECORD}:OFFSET",
f"{TEST_RECORD}:SCALE",
f"{TEST_RECORD}:DATASET",
f"{TEST_RECORD}:UNITS",
],
),
Expand Down
Loading