Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add two records: CreateDirectories and DirectoryExists. Automatically create directories based on this PV like in AD #102

Merged
merged 15 commits into from
Jun 5, 2024
Merged
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions docs/how-to/capture-hdf.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ These can be viewed from the DATA screen.
```

- The file directory and name are chosen with `:DATA:HDFDirectory` and `:DATA:HDFFileName`.
- The number of directories that the IOC is allowed to create provided they don't exist is determined by `:DATA:CreateDirectory`. The behavior of this signal is the same the identical PV in [`areaDetector`](https://areadetector.github.io/areaDetector/ADCore/NDPluginFile.html).
jwlodek marked this conversation as resolved.
Show resolved Hide resolved
- `DATA:DirectoryExists` represents whether or not the directory specified exists and is writable by the user under which the IOC is running.
- `:DATA:NumCapture` is the number of frames to capture in the file.
- `:DATA:NumCaptured` is the number of frames written to file.
- `:DATA:NumReceived` is the number of frames received from the panda.
Expand Down
115 changes: 114 additions & 1 deletion src/pandablocks_ioc/_hdf_ioc.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from collections import deque
from enum import Enum
from importlib.util import find_spec
from pathlib import Path
from typing import Callable, Deque, Optional, Union

from pandablocks.asyncio import AsyncioClient
Expand Down Expand Up @@ -311,6 +312,8 @@ class HDF5RecordController:
_client: AsyncioClient

_directory_record: RecordWrapper
_create_directory_record: RecordWrapper
_directory_exists_record: RecordWrapper
_file_name_record: RecordWrapper
_file_number_record: RecordWrapper
_file_format_record: RecordWrapper
Expand Down Expand Up @@ -340,7 +343,8 @@ def __init__(self, client: AsyncioClient, record_prefix: str):
length=path_length,
DESC="File path for HDF5 files",
validate=self._parameter_validate,
on_update=self._update_full_file_path,
on_update=self._update_directory_path,
always_update=True,
)
add_automatic_pvi_info(
PviGroup.HDF,
Expand All @@ -352,6 +356,40 @@ def __init__(self, client: AsyncioClient, record_prefix: str):
record_prefix + ":" + self._DATA_PREFIX + ":HDFDirectory"
)

create_directory_record_name = EpicsName(self._DATA_PREFIX + ":CreateDirectory")
self._create_directory_record = builder.longOut(
create_directory_record_name,
initial_value=0,
DESC="Directory creation depth",
)
add_automatic_pvi_info(
PviGroup.HDF,
self._create_directory_record,
create_directory_record_name,
builder.longOut,
)
self._create_directory_record.add_alias(
record_prefix + ":" + create_directory_record_name.upper()
)

directory_exists_name = EpicsName(self._DATA_PREFIX + ":DirectoryExists")
self._directory_exists_record = builder.boolIn(
directory_exists_name,
ZNAM="No",
ONAM="Yes",
initial_value=0,
DESC="Directory exists",
)
add_automatic_pvi_info(
PviGroup.HDF,
self._directory_exists_record,
directory_exists_name,
builder.boolIn,
)
self._directory_exists_record.add_alias(
record_prefix + ":" + directory_exists_name.upper()
)

file_name_record_name = EpicsName(self._DATA_PREFIX + ":HDF_FILE_NAME")
self._file_name_record = builder.longStringOut(
file_name_record_name,
Expand Down Expand Up @@ -523,6 +561,75 @@ def _parameter_validate(self, record: RecordWrapper, new_val) -> bool:
return False
return True

async def _update_directory_path(self, new_val) -> None:
"""Handles writes to the directory path PV, creating
directories based on the setting of the CreateDirectory record"""
new_path = Path(new_val).absolute()
create_dir_depth = self._create_directory_record.get()
max_dirs_to_create = 0
if create_dir_depth < 0:
max_dirs_to_create = abs(create_dir_depth)
elif create_dir_depth > len(new_path.parents):
max_dirs_to_create = 0
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Need a test covering line 561

elif create_dir_depth > 0:
max_dirs_to_create = len(new_path.parents) - create_dir_depth

logging.debug(f"Permitted to create up to {max_dirs_to_create} dirs.")
dirs_to_create = 0
for p in reversed(new_path.parents):
if not p.exists():
if dirs_to_create == 0:
# First directory level that does not exist, log it.
logging.error(f"All dir from {str(p)} and below do not exist!")
dirs_to_create += 1
else:
logging.info(f"{str(p)} exists")

# Account for target path itself not existing
if not os.path.exists(new_path):
dirs_to_create += 1

logging.debug(f"Need to create {dirs_to_create} directories.")

# Default message is "OK"
status_msg = "OK"

# Case where all dirs exist
if dirs_to_create == 0:
if os.access(new_path, os.W_OK):
status_msg = "Dir exists and is writable"
self._directory_exists_record.set(1)
else:
status_msg = "Dirs exist but aren't writable."
self._directory_exists_record.set(0)
# Case where we will create directories
elif dirs_to_create <= max_dirs_to_create:
logging.debug(f"Attempting to create {dirs_to_create} dir(s)...")
try:
os.makedirs(new_path, exist_ok=True)
status_msg = f"Created {dirs_to_create} dirs."
self._directory_exists_record.set(1)
except PermissionError:
status_msg = "Permission error creating dirs!"
self._directory_exists_record.set(0)
# Case where too many directories need to be created
else:
status_msg = f"Need to create {dirs_to_create} > {max_dirs_to_create} dirs."
self._directory_exists_record.set(0)

sevr = alarm.NO_ALARM
alrm = alarm.NO_ALARM
if self._directory_exists_record.get() == 0:
sevr = alarm.MAJOR_ALARM
alrm = alarm.STATE_ALARM
logging.error(status_msg)
else:
logging.debug(status_msg)

self._status_message_record.set(status_msg, severity=sevr, alarm=alrm)

await self._update_full_file_path(new_val)

async def _update_full_file_path(self, new_val) -> None:
self._full_file_path_record.set(self._get_filepath())

Expand All @@ -532,6 +639,12 @@ async def _handle_hdf5_data(self) -> None:
This method expects to be run as an asyncio Task."""
try:
# Set up the hdf buffer

if not self._directory_exists_record.get() == 1:
raise RuntimeError(
"Configured HDF directory does not exist or is not writable!"
)
jwlodek marked this conversation as resolved.
Show resolved Hide resolved

num_capture: int = self._num_capture_record.get()
capture_mode: CaptureMode = CaptureMode(self._capture_mode_record.get())
filepath = self._get_filepath()
Expand Down
19 changes: 19 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,23 @@
# flake8: noqa

import os

from fixtures.mocked_panda import *
from fixtures.panda_data import *


# Autouse fixture that will set all EPICS networking env vars to use lo interface
# to avoid false failures caused by things like firewalls blocking EPICS traffic.
@pytest.fixture(scope="session", autouse=True)
def configure_epics_environment():
os.environ["EPICS_CAS_INTF_ADDR_LIST"] = "127.0.0.1"
os.environ["EPICS_CAS_BEACON_ADDR_LIST"] = "127.0.0.1"
os.environ["EPICS_CA_ADDR_LIST"] = "127.0.0.1"
os.environ["EPICS_CAS_AUTO_ADDR_LIST"] = "NO"
os.environ["EPICS_CA_AUTO_BEACON_ADDR_LIST"] = "NO"

os.environ["EPICS_PVAS_INTF_ADDR_LIST"] = "127.0.0.1"
os.environ["EPICS_PVAS_BEACON_ADDR_LIST"] = "127.0.0.1"
os.environ["EPICS_PVA_ADDR_LIST"] = "127.0.0.1"
os.environ["EPICS_PVAS_AUTO_BEACON_ADDR_LIST"] = "NO"
os.environ["EPICS_PVA_AUTO_ADDR_LIST"] = "NO"
103 changes: 103 additions & 0 deletions tests/test_hdf_ioc.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import asyncio
import logging
import os
from asyncio import CancelledError
from collections import deque
from multiprocessing.connection import Connection
Expand Down Expand Up @@ -228,6 +229,10 @@ async def hdf5_controller(
test_prefix, hdf5_test_prefix = new_random_hdf5_prefix

hdf5_controller = HDF5RecordController(AsyncioClient("localhost"), test_prefix)

# When using tests w/o CA, need to manually set _directory_exists to 1
hdf5_controller._directory_exists_record.set(1)

yield hdf5_controller
# Give time for asyncio to fully close its connections
await asyncio.sleep(0)
Expand Down Expand Up @@ -345,6 +350,12 @@ async def test_hdf5_ioc(hdf5_subprocess_ioc):
val = await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR)
assert val == "OK"

val = await caget(hdf5_test_prefix + ":CreateDirectory")
assert val == 0

val = await caget(hdf5_test_prefix + ":DirectoryExists")
assert val == 0


async def test_hdf5_ioc_parameter_validate_works(
hdf5_subprocess_ioc_no_logging_check, tmp_path
Expand Down Expand Up @@ -384,6 +395,98 @@ async def test_hdf5_ioc_parameter_validate_works(
assert val == str(tmp_path) + "/name.h5" # put should have been stopped


@pytest.mark.parametrize(
"create_depth, path, expect_exists, restrict_permissions",
[
(0, ".", True, False),
(0, "panda_test1", False, False),
(-2, "panda_test2", True, False),
(-1, "panda_test3/depth_2", False, False),
(1, "panda_test4/depth_2", True, False),
(0, ".", False, True),
(1, "panda_test5", False, True),
(-1, "panda_test6", False, True),
],
)
async def test_hdf5_dir_creation(
hdf5_subprocess_ioc,
tmp_path: Path,
create_depth: int,
path: str,
expect_exists: bool,
restrict_permissions: bool,
):
"""Test to see if directory creation/exists works as expected"""

if restrict_permissions:
# Artificially restrict perms for temp folder to simulate perm issues.
tmp_path.chmod(0o444)

_, hdf5_test_prefix = hdf5_subprocess_ioc

target_path = str(tmp_path / path)

await caput(
hdf5_test_prefix + ":CreateDirectory",
create_depth,
wait=True,
)
await caput(
hdf5_test_prefix + ":HDFDirectory",
target_path,
datatype=DBR_CHAR_STR,
wait=True,
)
exists = await caget(hdf5_test_prefix + ":DirectoryExists")
Comment on lines +430 to +441
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It would be nice to add timeout=TIMEOUT to all these ca* calls, just to ensure a known maximum termination time. I do note we haven't been particularly good about always doing it in other tests.


assert (exists > 0) == expect_exists
if expect_exists:
assert os.path.exists(target_path)
assert os.access(target_path, os.W_OK)

if restrict_permissions:
# Put back default permissions
tmp_path.chmod(0o700)


async def test_hdf5_file_writing_no_dir(hdf5_subprocess_ioc, tmp_path: Path, caplog):
"""Test that if dir doesn't exist, HDF file writing fails with a runtime error"""
test_prefix, hdf5_test_prefix = hdf5_subprocess_ioc

test_dir = tmp_path
test_filename = "test.h5"
await caput(
hdf5_test_prefix + ":HDFDirectory",
str(test_dir / "panda_test1"),
wait=True,
datatype=DBR_CHAR_STR,
)

exists = await caget(hdf5_test_prefix + ":DirectoryExists")
assert exists == 0

await caput(
hdf5_test_prefix + ":HDFFileName", "name.h5", wait=True, datatype=DBR_CHAR_STR
)
await caput(
hdf5_test_prefix + ":HDFFileName",
test_filename,
wait=True,
timeout=TIMEOUT,
datatype=DBR_CHAR_STR,
)

val = await caget(hdf5_test_prefix + ":HDFFullFilePath", datatype=DBR_CHAR_STR)
assert val == "/".join([str(tmp_path), "panda_test1", test_filename])

await caput(hdf5_test_prefix + ":NumCapture", 1, wait=True, timeout=TIMEOUT)

await caput(hdf5_test_prefix + ":Capture", 1, wait=True, timeout=TIMEOUT)

val = await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR)
assert val == "Capture disabled, unexpected exception"


@pytest.mark.parametrize("num_capture", [1, 1000, 10000])
async def test_hdf5_file_writing_first_n(
hdf5_subprocess_ioc, tmp_path: Path, caplog, num_capture
Expand Down
Loading