From 84289337a592ae2cd7f7dac9debe3449ae44a904 Mon Sep 17 00:00:00 2001 From: Dominic Oram Date: Mon, 29 Apr 2024 10:20:22 +0100 Subject: [PATCH 1/4] (#475) Fix tests against latest ophyd_async version --- tests/devices/unit_tests/test_slits.py | 6 +++--- tests/devices/unit_tests/test_synchrotron.py | 14 ++++++-------- tests/devices/unit_tests/test_tetramm.py | 10 +++++----- 3 files changed, 14 insertions(+), 16 deletions(-) diff --git a/tests/devices/unit_tests/test_slits.py b/tests/devices/unit_tests/test_slits.py index 7ea883ec4f..8cd584e1b2 100644 --- a/tests/devices/unit_tests/test_slits.py +++ b/tests/devices/unit_tests/test_slits.py @@ -16,9 +16,9 @@ async def slits() -> Slits: async def test_reading_slits_reads_gaps_and_centres(slits: Slits): - set_sim_value(slits.x_gap.readback, 0.5) - set_sim_value(slits.y_centre.readback, 1.0) - set_sim_value(slits.y_gap.readback, 1.5) + set_sim_value(slits.x_gap.user_readback, 0.5) + set_sim_value(slits.y_centre.user_readback, 1.0) + set_sim_value(slits.y_gap.user_readback, 1.5) await assert_reading( slits, diff --git a/tests/devices/unit_tests/test_synchrotron.py b/tests/devices/unit_tests/test_synchrotron.py index b935df7e48..1144c583db 100644 --- a/tests/devices/unit_tests/test_synchrotron.py +++ b/tests/devices/unit_tests/test_synchrotron.py @@ -7,8 +7,6 @@ from ophyd_async.core import DeviceCollector, StandardReadable, set_sim_value from dodal.devices.synchrotron import ( - Prefix, - Suffix, Synchrotron, SynchrotronMode, ) @@ -29,15 +27,15 @@ READING_FIELDS = ["value", "alarm_severity"] DESCRIPTION_FIELDS = ["source", "dtype", "shape"] READING_ADDRESSES = [ - f"sim://{Prefix.SIGNAL + Suffix.SIGNAL}", - f"sim://{Prefix.STATUS + Suffix.USER_COUNTDOWN}", - f"sim://{Prefix.TOP_UP + Suffix.COUNTDOWN}", - f"sim://{Prefix.TOP_UP + Suffix.END_COUNTDOWN}", + "soft://synchrotron-ring_current", + "soft://synchrotron-machine_user_countdown", + "soft://synchrotron-topup_start_countdown", + "soft://synchrotron-top_up_end_countdown", ] CONFIG_ADDRESSES = [ - f"sim://{Prefix.STATUS + Suffix.BEAM_ENERGY}", - f"sim://{Prefix.STATUS + Suffix.MODE}", + "soft://synchrotron-beam_energy", + "soft://synchrotron-synchrotron_mode", ] READ_SIGNALS = [ diff --git a/tests/devices/unit_tests/test_tetramm.py b/tests/devices/unit_tests/test_tetramm.py index ae790812bf..fb7ffe1ee6 100644 --- a/tests/devices/unit_tests/test_tetramm.py +++ b/tests/devices/unit_tests/test_tetramm.py @@ -243,7 +243,7 @@ async def test_prepare_arms_tetramm( async def test_stage_sets_up_writer( tetramm: TetrammDetector, ): - set_sim_value(tetramm.hdf.file_path_exists, 1) + set_sim_value(tetramm.hdf.file_path_exists, True) await tetramm.stage() assert (await tetramm.hdf.num_capture.get_value()) == 0 @@ -257,14 +257,14 @@ async def test_stage_sets_up_writer( async def test_stage_sets_up_accurate_describe_output( tetramm: TetrammDetector, ): - assert tetramm.describe() == {} + assert await tetramm.describe() == {} - set_sim_value(tetramm.hdf.file_path_exists, 1) + set_sim_value(tetramm.hdf.file_path_exists, True) await tetramm.stage() - assert tetramm.describe() == { + assert await tetramm.describe() == { TEST_TETRAMM_NAME: { - "source": "sim://MY-TETRAMM:HDF5:FullFileName_RBV", + "source": "soft://foobar-hdf-full_file_name", "shape": (11, 1000), "dtype": "array", "external": "STREAM:", From e6a39d2dc94994dc91b9d36886c59601490c561b Mon Sep 17 00:00:00 2001 From: Robert Tuck Date: Mon, 29 Apr 2024 10:49:17 +0100 Subject: [PATCH 2/4] (DiamondLightSource/dodal#446) Remove dodal implementation of epics_signal_rw_rbv() in favour of the one in ophyd-async --- src/dodal/devices/robot.py | 2 +- src/dodal/devices/util/epics_util.py | 7 ------- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/src/dodal/devices/robot.py b/src/dodal/devices/robot.py index 3fd436ce99..7339ceed3c 100644 --- a/src/dodal/devices/robot.py +++ b/src/dodal/devices/robot.py @@ -7,8 +7,8 @@ from bluesky.protocols import Descriptor, Movable, Reading from ophyd_async.core import AsyncStatus, StandardReadable, wait_for_value from ophyd_async.epics.signal import epics_signal_r, epics_signal_x +from ophyd_async.epics.signal.signal import epics_signal_rw_rbv -from dodal.devices.util.epics_util import epics_signal_rw_rbv from dodal.log import LOGGER diff --git a/src/dodal/devices/util/epics_util.py b/src/dodal/devices/util/epics_util.py index 0af3a88b21..083460b9fd 100644 --- a/src/dodal/devices/util/epics_util.py +++ b/src/dodal/devices/util/epics_util.py @@ -3,7 +3,6 @@ from ophyd import Component, Device, EpicsSignal from ophyd.status import Status, StatusBase -from ophyd_async.epics.signal import epics_signal_rw from dodal.devices.status import await_value from dodal.log import LOGGER @@ -126,9 +125,3 @@ def set(self, proc: int) -> Status: lambda: self.proc.set(proc), ] ) - - -def epics_signal_rw_rbv( - T, write_pv: str -): # Remove when https://github.com/bluesky/ophyd-async/issues/139 is done - return epics_signal_rw(T, write_pv + "_RBV", write_pv) From 7c57aca8cd7bfcc2036f2bb93393db9270209f07 Mon Sep 17 00:00:00 2001 From: Joe Shannon Date: Mon, 29 Apr 2024 14:32:44 +0100 Subject: [PATCH 3/4] Add fswitch device for i22 (#469) This is an initial implementation for providing the required data for i22 Nexus files. In the future this device should be combined with the i04 version. See #399 for further details. --- src/dodal/beamlines/i22.py | 14 ++++++++ src/dodal/devices/i22/fswitch.py | 56 +++++++++++++++++++++++++++++++ tests/devices/i22/test_fswitch.py | 28 ++++++++++++++++ 3 files changed, 98 insertions(+) create mode 100644 src/dodal/devices/i22/fswitch.py create mode 100644 tests/devices/i22/test_fswitch.py diff --git a/src/dodal/beamlines/i22.py b/src/dodal/beamlines/i22.py index 9366bc8260..3cf7e4c31e 100644 --- a/src/dodal/beamlines/i22.py +++ b/src/dodal/beamlines/i22.py @@ -5,6 +5,7 @@ ) from dodal.beamlines.beamline_utils import set_beamline as set_utils_beamline from dodal.common.visit import StaticVisitDirectoryProvider +from dodal.devices.i22.fswitch import FSwitch from dodal.devices.slits import Slits from dodal.devices.tetramm import TetrammDetector from dodal.log import set_beamline as set_log_beamline @@ -120,3 +121,16 @@ def slits_6( wait_for_connection, fake_with_ophyd_sim, ) + + +def fswitch( + wait_for_connection: bool = True, + fake_with_ophyd_sim: bool = False, +) -> FSwitch: + return device_instantiation( + FSwitch, + "fswitch", + "-MO-FSWT-01:", + wait_for_connection, + fake_with_ophyd_sim, + ) diff --git a/src/dodal/devices/i22/fswitch.py b/src/dodal/devices/i22/fswitch.py new file mode 100644 index 0000000000..bf07175ade --- /dev/null +++ b/src/dodal/devices/i22/fswitch.py @@ -0,0 +1,56 @@ +import asyncio +import time +from enum import Enum +from typing import Dict + +from bluesky.protocols import Reading +from ophyd_async.core import StandardReadable +from ophyd_async.core.device import DeviceVector +from ophyd_async.epics.signal import epics_signal_r + + +class FilterState(str, Enum): + """ + Note that the in/out here refers to the internal rocker + position so a PV value of IN implies a filter OUT of beam + """ + + IN_BEAM = "OUT" + OUT_BEAM = "IN" + + +class FSwitch(StandardReadable): + """ + Device for i22's fswitch. A filter switch for manipulating + compound refractive lenses. Also referred to as a transfocator. + + This currently only implements the minimum + functionality for retrieving the number of lenses inserted. + + Eventually this should be combined with the transfocator device in the i04 + module but is currently incompatible as the Epics interfaces are different. + See https://github.com/DiamondLightSource/dodal/issues/399 + + """ + + NUM_FILTERS = 128 + + def __init__(self, prefix: str, name: str = "") -> None: + self.filters = DeviceVector( + { + i: epics_signal_r(FilterState, f"{prefix}FILTER-{i:03}:STATUS_RBV") + for i in range(FSwitch.NUM_FILTERS) + } + ) + super().__init__(name) + + async def read(self) -> Dict[str, Reading]: + result = await asyncio.gather( + *(filter.get_value() for filter in self.filters.values()) + ) + num_in = sum(r.value == FilterState.IN_BEAM for r in result) + default_reading = await super().read() + return { + "number_of_lenses": Reading(value=num_in, timestamp=time.time()), + **default_reading, + } diff --git a/tests/devices/i22/test_fswitch.py b/tests/devices/i22/test_fswitch.py new file mode 100644 index 0000000000..6c28a287a0 --- /dev/null +++ b/tests/devices/i22/test_fswitch.py @@ -0,0 +1,28 @@ +from unittest import mock + +import pytest +from ophyd_async.core import DeviceCollector, set_sim_value + +from dodal.devices.i22.fswitch import FilterState, FSwitch + + +@pytest.fixture +async def fswitch() -> FSwitch: + async with DeviceCollector(sim=True): + fswitch = FSwitch("DEMO-FSWT-01:") + + return fswitch + + +async def test_reading_fswitch(fswitch: FSwitch): + set_sim_value(fswitch.filters.get(0), FilterState.OUT_BEAM) + set_sim_value(fswitch.filters.get(1), FilterState.OUT_BEAM) + set_sim_value(fswitch.filters.get(2), FilterState.OUT_BEAM) + + reading = await fswitch.read() + assert reading == { + "number_of_lenses": { + "timestamp": mock.ANY, + "value": 125, # three filters out + } + } From 295b6ffcf5d65bd2f90d5fcc7c28e995f48896ac Mon Sep 17 00:00:00 2001 From: DiamondJoseph <53935796+DiamondJoseph@users.noreply.github.com> Date: Mon, 29 Apr 2024 14:33:58 +0100 Subject: [PATCH 4/4] Restore tests for DirectoryProvider and assosciated pre-processor (#484) --- src/dodal/common/visit.py | 10 +- .../preprocessors/test_filesystem_metadata.py | 401 ++++++++++++++++++ 2 files changed, 408 insertions(+), 3 deletions(-) create mode 100644 tests/preprocessors/test_filesystem_metadata.py diff --git a/src/dodal/common/visit.py b/src/dodal/common/visit.py index 19cebbe447..19f91ca46b 100644 --- a/src/dodal/common/visit.py +++ b/src/dodal/common/visit.py @@ -163,7 +163,7 @@ def __call__(self) -> DirectoryInfo: def attach_metadata( - plan: MsgGenerator, + plan: MsgGenerator, provider: UpdatingDirectoryProvider | None ) -> MsgGenerator: """ Attach data session metadata to the runs within a plan and make it correlate @@ -183,10 +183,14 @@ def attach_metadata( Yields: Iterator[Msg]: Plan messages """ - provider = beamline_utils.get_directory_provider() + if provider is None: + provider = beamline_utils.get_directory_provider() yield from bps.wait_for([provider.update]) directory_info: DirectoryInfo = provider() - yield from bpp.inject_md_wrapper(plan, md={DATA_SESSION: directory_info.prefix}) + # https://github.com/DiamondLightSource/dodal/issues/452 + # As part of 452, write each dataCollection into their own folder, then can use resource_dir directly + data_session = directory_info.prefix.removesuffix("-") + yield from bpp.inject_md_wrapper(plan, md={DATA_SESSION: data_session}) attach_metadata_decorator = make_decorator(attach_metadata) diff --git a/tests/preprocessors/test_filesystem_metadata.py b/tests/preprocessors/test_filesystem_metadata.py new file mode 100644 index 0000000000..cd4d84da64 --- /dev/null +++ b/tests/preprocessors/test_filesystem_metadata.py @@ -0,0 +1,401 @@ +from collections.abc import Callable, Mapping +from pathlib import Path +from typing import Any + +import bluesky.plan_stubs as bps +import bluesky.plans as bp +import pytest +from aiohttp import ClientResponseError +from bluesky import RunEngine +from bluesky.preprocessors import ( + run_decorator, + run_wrapper, + set_run_key_decorator, + set_run_key_wrapper, + stage_wrapper, +) +from bluesky.protocols import ( + HasName, + Readable, + Reading, + Status, + Triggerable, +) +from bluesky.run_engine import RunEngine +from event_model.documents.event_descriptor import DataKey +from ophyd.status import StatusBase +from ophyd_async.core import DeviceCollector, DirectoryProvider +from pydantic import BaseModel + +from dodal.common.types import MsgGenerator, UpdatingDirectoryProvider +from dodal.common.visit import ( + DATA_SESSION, + DataCollectionIdentifier, + DirectoryServiceClientBase, + LocalDirectoryServiceClient, + StaticVisitDirectoryProvider, + attach_metadata, +) + + +class FakeDetector(Readable, HasName, Triggerable): + _name: str + _provider: DirectoryProvider + + def __init__( + self, + name: str, + provider: DirectoryProvider, + ) -> None: + self._name = name + self._provider = provider + + async def read(self) -> dict[str, Reading]: + return { + f"{self.name}_data": { + "value": "test", + "timestamp": 0.0, + }, + } + + async def describe(self) -> dict[str, DataKey]: + directory_info = self._provider() + source = str( + directory_info.root + / directory_info.resource_dir + / f"{directory_info.prefix}{self.name}{directory_info.suffix}.h5" + ) + return { + f"{self.name}_data": { + "dtype": "string", + "shape": [1], + "source": source, + } + } + + def trigger(self) -> Status: + status = StatusBase() + status.set_finished() + return status + + @property + def name(self) -> str: + return self._name + + @property + def parent(self) -> None: + return None + + +class MockDirectoryServiceClient(LocalDirectoryServiceClient): + def __init__(self): + self.fail = False + super().__init__() + + async def create_new_collection(self) -> DataCollectionIdentifier: + if self.fail: + raise ClientResponseError(None, ()) # type: ignore + + return await super().create_new_collection() + + async def get_current_collection(self) -> DataCollectionIdentifier: + if self.fail: + raise ClientResponseError(None, ()) # type: ignore + + return await super().get_current_collection() + + +class DataEvent(BaseModel): + name: str + doc: Any + + +@pytest.fixture +def client() -> DirectoryServiceClientBase: + return MockDirectoryServiceClient() + + +@pytest.fixture +def provider( + client: DirectoryServiceClientBase, tmp_path: Path +) -> UpdatingDirectoryProvider: + return StaticVisitDirectoryProvider("example", tmp_path, client=client) + + +@pytest.fixture(params=[1, 2]) +def detectors(request, provider: UpdatingDirectoryProvider) -> list[Readable]: + number_of_detectors = request.param + with DeviceCollector(sim=True): + dets: list[Readable] = [ + FakeDetector(name=f"det{i}", provider=provider) + for i in range(number_of_detectors) + ] + return dets + + +def simple_run(detectors: list[Readable]) -> MsgGenerator: + yield from bp.count(detectors) + + +def multi_run(detectors: list[Readable]) -> MsgGenerator: + yield from bp.count(detectors) + yield from bp.count(detectors) + + +def multi_nested_plan(detectors: list[Readable]) -> MsgGenerator: + yield from simple_run(detectors) + yield from simple_run(detectors) + + +def multi_run_single_stage(detectors: list[Readable]) -> MsgGenerator: + def stageless_count() -> MsgGenerator: + return (yield from bps.one_shot(detectors)) + + def inner_plan() -> MsgGenerator: + yield from run_wrapper(stageless_count()) + yield from run_wrapper(stageless_count()) + + yield from stage_wrapper(inner_plan(), detectors) + + +def multi_run_single_stage_multi_group( + detectors: list[Readable], +) -> MsgGenerator: + def stageless_count() -> MsgGenerator: + return (yield from bps.one_shot(detectors)) + + def inner_plan() -> MsgGenerator: + yield from run_wrapper(stageless_count(), md={DATA_SESSION: 1}) + yield from run_wrapper(stageless_count(), md={DATA_SESSION: 1}) + yield from run_wrapper(stageless_count(), md={DATA_SESSION: 2}) + yield from run_wrapper(stageless_count(), md={DATA_SESSION: 2}) + + yield from stage_wrapper(inner_plan(), detectors) + + +@run_decorator(md={DATA_SESSION: 12345}) +@set_run_key_decorator("outer") +def nested_run_with_metadata(detectors: list[Readable]) -> MsgGenerator: + yield from set_run_key_wrapper(bp.count(detectors), "inner") + yield from set_run_key_wrapper(bp.count(detectors), "inner") + + +@run_decorator() +@set_run_key_decorator("outer") +def nested_run_without_metadata( + detectors: list[Readable], +) -> MsgGenerator: + yield from set_run_key_wrapper(bp.count(detectors), "inner") + yield from set_run_key_wrapper(bp.count(detectors), "inner") + + +def test_simple_run_gets_scan_number( + RE: RunEngine, + detectors: list[Readable], + provider: UpdatingDirectoryProvider, + tmp_path: Path, +) -> None: + docs = collect_docs( + RE, + simple_run(detectors), + provider, + ) + assert docs[0].name == "start" + assert docs[0].doc[DATA_SESSION] == "example-1" + assert_all_detectors_used_collection_numbers(tmp_path, docs, detectors, ["1"]) + + +@pytest.mark.parametrize("plan", [multi_run, multi_nested_plan]) +def test_multi_run_gets_scan_numbers( + RE: RunEngine, + detectors: list[Readable], + plan: Callable[[list[Readable]], MsgGenerator], + provider: UpdatingDirectoryProvider, + tmp_path: Path, +) -> None: + """Test is here to demonstrate that multi run plans will overwrite files.""" + docs = collect_docs( + RE, + plan(detectors), + provider, + ) + start_docs = find_start_docs(docs) + assert len(start_docs) == 2 + assert start_docs[0].doc[DATA_SESSION] == "example-1" + assert start_docs[1].doc[DATA_SESSION] == "example-1" + assert_all_detectors_used_collection_numbers(tmp_path, docs, detectors, ["1", "1"]) + + +def test_multi_run_single_stage( + RE: RunEngine, + detectors: list[Readable], + provider: UpdatingDirectoryProvider, + tmp_path: Path, +) -> None: + docs = collect_docs( + RE, + multi_run_single_stage(detectors), + provider, + ) + start_docs = find_start_docs(docs) + assert len(start_docs) == 2 + assert start_docs[0].doc[DATA_SESSION] == "example-1" + assert start_docs[1].doc[DATA_SESSION] == "example-1" + assert_all_detectors_used_collection_numbers( + tmp_path, + docs, + detectors, + [ + "1", + "1", + ], + ) + + +def test_multi_run_single_stage_multi_group( + RE: RunEngine, + detectors: list[Readable], + provider: UpdatingDirectoryProvider, + tmp_path: Path, +) -> None: + docs = collect_docs( + RE, + multi_run_single_stage_multi_group(detectors), + provider, + ) + start_docs = find_start_docs(docs) + assert len(start_docs) == 4 + assert start_docs[0].doc[DATA_SESSION] == "example-1" + assert start_docs[1].doc[DATA_SESSION] == "example-1" + assert start_docs[2].doc[DATA_SESSION] == "example-1" + assert start_docs[3].doc[DATA_SESSION] == "example-1" + assert_all_detectors_used_collection_numbers( + tmp_path, + docs, + detectors, + ["1", "1", "1", "1"], + ) + + +def test_nested_run_with_metadata( + RE: RunEngine, + detectors: list[Readable], + provider: UpdatingDirectoryProvider, + tmp_path: Path, +) -> None: + """Test is here to demonstrate that nested runs will be treated as a single run. + + That means detectors in such runs will overwrite files. + """ + docs = collect_docs( + RE, + nested_run_with_metadata(detectors), + provider, + ) + start_docs = find_start_docs(docs) + assert len(start_docs) == 3 + assert start_docs[0].doc[DATA_SESSION] == "example-1" + assert start_docs[1].doc[DATA_SESSION] == "example-1" + assert start_docs[2].doc[DATA_SESSION] == "example-1" + assert_all_detectors_used_collection_numbers(tmp_path, docs, detectors, ["1", "1"]) + + +def test_nested_run_without_metadata( + RE: RunEngine, + detectors: list[Readable], + provider: UpdatingDirectoryProvider, + tmp_path: Path, +) -> None: + """Test is here to demonstrate that nested runs will be treated as a single run. + + That means detectors in such runs will overwrite files. + """ + docs = collect_docs( + RE, + nested_run_without_metadata(detectors), + provider, + ) + start_docs = find_start_docs(docs) + assert len(start_docs) == 3 + assert start_docs[0].doc[DATA_SESSION] == "example-1" + assert start_docs[1].doc[DATA_SESSION] == "example-1" + assert start_docs[2].doc[DATA_SESSION] == "example-1" + assert_all_detectors_used_collection_numbers(tmp_path, docs, detectors, ["1", "1"]) + + +def test_visit_directory_provider_fails( + RE: RunEngine, + detectors: list[Readable], + provider: UpdatingDirectoryProvider, + client: MockDirectoryServiceClient, +) -> None: + client.fail = True + with pytest.raises(ValueError): + collect_docs( + RE, + simple_run(detectors), + provider, + ) + + +def test_visit_directory_provider_fails_after_one_sucess( + RE: RunEngine, + detectors: list[Readable], + provider: UpdatingDirectoryProvider, + client: MockDirectoryServiceClient, +) -> None: + collect_docs( + RE, + simple_run(detectors), + provider, + ) + client.fail = True + with pytest.raises(ValueError): + collect_docs( + RE, + simple_run(detectors), + provider, + ) + + +def collect_docs( + RE: RunEngine, + plan: MsgGenerator, + provider: UpdatingDirectoryProvider, +) -> list[DataEvent]: + events = [] + + def on_event(name: str, doc: Mapping[str, Any]) -> None: + events.append(DataEvent(name=name, doc=doc)) + + wrapped_plan = attach_metadata(plan, provider) + RE(wrapped_plan, on_event) + return events + + +def assert_all_detectors_used_collection_numbers( + tmp_path: Path, + docs: list[DataEvent], + detectors: list[Readable], + dataCollectionIds: list[str], +) -> None: + descriptors = find_descriptor_docs(docs) + assert len(descriptors) == len(dataCollectionIds) + + for descriptor, dataCollectionId in zip( + descriptors, dataCollectionIds, strict=False + ): + for detector in detectors: + source = descriptor.doc.get("data_keys", {}).get(f"{detector.name}_data")[ + "source" + ] + expected_source = f"example-{dataCollectionId}-{detector.name}.h5" + assert Path(source) == tmp_path / expected_source + + +def find_start_docs(docs: list[DataEvent]) -> list[DataEvent]: + return list(filter(lambda event: event.name == "start", docs)) + + +def find_descriptor_docs(docs: list[DataEvent]) -> list[DataEvent]: + return list(filter(lambda event: event.name == "descriptor", docs))