Skip to content

Commit

Permalink
Merge branch 'main' into adsim_async
Browse files Browse the repository at this point in the history
  • Loading branch information
ZohebShaikh authored Nov 14, 2024
2 parents 10c39ce + ad6cc0d commit 055b074
Show file tree
Hide file tree
Showing 17 changed files with 904 additions and 21 deletions.
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ dependencies = [
"redis",
"deepdiff",
"h5py",
"scanspec>=0.7.3",
]

dynamic = ["version"]
Expand All @@ -48,6 +49,7 @@ dev = [
# Commented out due to dependency version conflict with pydantic 1.x
# "copier",
"myst-parser",
"ophyd_async[sim]",
"pipdeptree",
"pre-commit",
"psutil",
Expand Down
Empty file.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from bluesky import preprocessors as bpp
from bluesky.utils import MsgGenerator, make_decorator

from dodal.common.beamlines import beamline_utils
from dodal.common.beamlines.beamline_utils import get_path_provider
from dodal.common.types import UpdatingPathProvider

DATA_SESSION = "data_session"
Expand Down Expand Up @@ -31,7 +31,7 @@ def attach_data_session_metadata_wrapper(
Iterator[Msg]: Plan messages
"""
if provider is None:
provider = beamline_utils.get_path_provider()
provider = get_path_provider()
yield from bps.wait_for([provider.update])
ress = yield from bps.wait_for([provider.data_session])
data_session = ress[0].result()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def __init__(
super().__init__(*args)


def _check_and_cache_values(
def check_and_cache_values(
devices_and_positions: dict[MovableReadableDevice, float],
smallest_move: float,
maximum_move: float,
Expand Down Expand Up @@ -89,7 +89,7 @@ def move_and_reset_wrapper(
on. If false it is left up to the caller to wait on
them. Defaults to True.
"""
initial_positions = yield from _check_and_cache_values(
initial_positions = yield from check_and_cache_values(
device_and_positions, smallest_move, maximum_move
)

Expand Down
150 changes: 150 additions & 0 deletions src/dodal/plan_stubs/wrapped.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
import itertools
from collections.abc import Mapping
from typing import Annotated, Any

import bluesky.plan_stubs as bps
from bluesky.protocols import Movable
from bluesky.utils import MsgGenerator

"""
Wrappers for Bluesky built-in plan stubs with type hinting
"""

Group = Annotated[str, "String identifier used by 'wait' or stubs that await"]


# After bluesky 1.14, bounds for stubs that move can be narrowed
# https://github.com/bluesky/bluesky/issues/1821
def set_absolute(
movable: Movable, value: Any, group: Group | None = None, wait: bool = False
) -> MsgGenerator:
"""
Set a device, wrapper for `bp.abs_set`.
Args:
movable (Movable): The device to set
value (T): The new value
group (Group | None, optional): The message group to associate with the
setting, for sequencing. Defaults to None.
wait (bool, optional): The group should wait until all setting is complete
(e.g. a motor has finished moving). Defaults to False.
Returns:
MsgGenerator: Plan
Yields:
Iterator[MsgGenerator]: Bluesky messages
"""
return (yield from bps.abs_set(movable, value, group=group, wait=wait))


def set_relative(
movable: Movable, value: Any, group: Group | None = None, wait: bool = False
) -> MsgGenerator:
"""
Change a device, wrapper for `bp.rel_set`.
Args:
movable (Movable): The device to set
value (T): The new value
group (Group | None, optional): The message group to associate with the
setting, for sequencing. Defaults to None.
wait (bool, optional): The group should wait until all setting is complete
(e.g. a motor has finished moving). Defaults to False.
Returns:
MsgGenerator: Plan
Yields:
Iterator[MsgGenerator]: Bluesky messages
"""

return (yield from bps.rel_set(movable, value, group=group, wait=wait))


def move(moves: Mapping[Movable, Any], group: Group | None = None) -> MsgGenerator:
"""
Move a device, wrapper for `bp.mv`.
Args:
moves (Mapping[Movable, T]): Mapping of Movables to target positions
group (Group | None, optional): The message group to associate with the
setting, for sequencing. Defaults to None.
Returns:
MsgGenerator: Plan
Yields:
Iterator[MsgGenerator]: Bluesky messages
"""

return (
# type ignore until https://github.com/bluesky/bluesky/issues/1809
yield from bps.mv(*itertools.chain.from_iterable(moves.items()), group=group) # type: ignore
)


def move_relative(
moves: Mapping[Movable, Any], group: Group | None = None
) -> MsgGenerator:
"""
Move a device relative to its current position, wrapper for `bp.mvr`.
Args:
moves (Mapping[Movable, T]): Mapping of Movables to target deltas
group (Group | None, optional): The message group to associate with the
setting, for sequencing. Defaults to None.
Returns:
MsgGenerator: Plan
Yields:
Iterator[MsgGenerator]: Bluesky messages
"""

return (
# type ignore until https://github.com/bluesky/bluesky/issues/1809
yield from bps.mvr(*itertools.chain.from_iterable(moves.items()), group=group) # type: ignore
)


def sleep(time: float) -> MsgGenerator:
"""
Suspend all action for a given time, wrapper for `bp.sleep`
Args:
time (float): Time to wait in seconds
Returns:
MsgGenerator: Plan
Yields:
Iterator[MsgGenerator]: Bluesky messages
"""

return (yield from bps.sleep(time))


def wait(
group: Group | None = None,
timeout: float | None = None,
) -> MsgGenerator:
"""
Wait for a group status to complete, wrapper for `bp.wait`.
Does not expose move_on, as when used as a stub will not fail on Timeout.
Args:
group (Group | None, optional): The name of the group to wait for, defaults
to None, in which case waits for all
groups that have not yet been awaited.
timeout (float | None, default=None): a timeout in seconds
Returns:
MsgGenerator: Plan
Yields:
Iterator[MsgGenerator]: Bluesky messages
"""

return (yield from bps.wait(group, timeout=timeout))
4 changes: 4 additions & 0 deletions src/dodal/plans/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from .scanspec import spec_scan
from .wrapped import count

__all__ = ["count", "spec_scan"]
66 changes: 66 additions & 0 deletions src/dodal/plans/scanspec.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import operator
from functools import reduce
from typing import Annotated, Any

import bluesky.plans as bp
from bluesky.protocols import Movable, Readable
from cycler import Cycler, cycler
from pydantic import Field, validate_call
from scanspec.specs import Spec

from dodal.common import MsgGenerator
from dodal.plan_stubs.data_session import attach_data_session_metadata_decorator


@attach_data_session_metadata_decorator()
@validate_call(config={"arbitrary_types_allowed": True})
def spec_scan(
detectors: Annotated[
set[Readable],
Field(
description="Set of readable devices, will take a reading at each point, \
in addition to any Movables in the Spec",
),
],
spec: Annotated[
Spec[Movable],
Field(description="ScanSpec modelling the path of the scan"),
],
metadata: dict[str, Any] | None = None,
) -> MsgGenerator:
"""Generic plan for reading `detectors` at every point of a ScanSpec `Spec`.
A `Spec` is an N-dimensional path.
"""
# TODO: https://github.com/bluesky/scanspec/issues/154
# support Static.duration: Spec[Literal["DURATION"]]

_md = {
"plan_args": {
"detectors": {det.name for det in detectors},
"spec": repr(spec),
},
"plan_name": "spec_scan",
"shape": spec.shape(),
**(metadata or {}),
}

yield from bp.scan_nd(tuple(detectors), _as_cycler(spec), md=_md)


def _as_cycler(spec: Spec[Movable]) -> Cycler:
"""
Convert a scanspec to a cycler for compatibility with legacy Bluesky plans such as
`bp.scan_nd`. Use the midpoints of the scanspec since cyclers are normally used
for software triggered scans.
Args:
spec: A scanspec
Returns:
Cycler: A new cycler
"""

midpoints = spec.frames().midpoints
# Need to "add" the cyclers for all the axes together. The code below is
# effectively: cycler(motor1, [...]) + cycler(motor2, [...]) + ...
return reduce(operator.add, (cycler(*args) for args in midpoints.items()))
57 changes: 57 additions & 0 deletions src/dodal/plans/wrapped.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
from collections.abc import Sequence
from typing import Annotated, Any

import bluesky.plans as bp
from bluesky.protocols import Readable
from pydantic import Field, NonNegativeFloat, validate_call

from dodal.common import MsgGenerator
from dodal.plan_stubs.data_session import attach_data_session_metadata_decorator

"""This module wraps plan(s) from bluesky.plans until required handling for them is
moved into bluesky or better handled in downstream services.
Required decorators are installed on plan import
https://github.com/DiamondLightSource/blueapi/issues/474
Non-serialisable fields are ignored when they are optional
https://github.com/DiamondLightSource/blueapi/issues/711
We may also need other adjustments for UI purposes, e.g.
Forcing uniqueness or orderedness of Readables
Limits and metadata (e.g. units)
"""


@attach_data_session_metadata_decorator()
@validate_call(config={"arbitrary_types_allowed": True})
def count(
detectors: Annotated[
set[Readable],
Field(
description="Set of readable devices, will take a reading at each point",
min_length=1,
),
],
num: Annotated[int, Field(description="Number of frames to collect", ge=1)] = 1,
delay: Annotated[
NonNegativeFloat | Sequence[NonNegativeFloat],
Field(
description="Delay between readings: if tuple, len(delay) == num - 1 and \
the delays are between each point, if value or None is the delay for every \
gap",
json_schema_extra={"units": "s"},
),
] = 0.0,
metadata: dict[str, Any] | None = None,
) -> MsgGenerator:
"""Reads from a number of devices.
Wraps bluesky.plans.count(det, num, delay, md=metadata) exposing only serializable
parameters and metadata."""
if isinstance(delay, Sequence):
assert (
len(delay) == num - 1
), f"Number of delays given must be {num - 1}: was given {len(delay)}"
metadata = metadata or {}
metadata["shape"] = (num,)
yield from bp.count(tuple(detectors), num, delay=delay, md=metadata)
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@
from ophyd_async.epics.motor import Motor

from dodal.devices.util.test_utils import patch_motor
from dodal.plans.motor_util_plans import (
from dodal.plan_stubs.motor_utils import (
MoveTooLarge,
_check_and_cache_values,
check_and_cache_values,
home_and_reset_wrapper,
)

Expand Down Expand Up @@ -59,7 +59,7 @@ def my_device(RE):
"device_type",
[DeviceWithOnlyMotors, DeviceWithNoMotors, DeviceWithSomeMotors],
)
@patch("dodal.plans.motor_util_plans.move_and_reset_wrapper")
@patch("dodal.plan_stubs.motor_utils.move_and_reset_wrapper")
def test_given_types_of_device_when_home_and_reset_wrapper_called_then_motors_and_zeros_passed_to_move_and_reset_wrapper(
patch_move_and_reset, device_type, RE
):
Expand All @@ -80,7 +80,7 @@ def test_given_a_device_when_check_and_cache_values_then_motor_values_returned(
set_mock_value(motor.user_readback, i * 100)

motors_and_positions: dict[Motor, float] = RE(
_check_and_cache_values(
check_and_cache_values(
{motor_obj: 0.0 for motor_obj in my_device.motors}, 0, 1000
)
).plan_result # type: ignore
Expand Down Expand Up @@ -109,7 +109,7 @@ def test_given_a_device_with_a_too_large_move_when_check_and_cache_values_then_e
motors_and_positions = {motor_obj: new_position for motor_obj in my_device.motors}

with pytest.raises(MoveTooLarge) as e:
RE(_check_and_cache_values(motors_and_positions, 0, max))
RE(check_and_cache_values(motors_and_positions, 0, max))
assert e.value.axis == my_device.y
assert e.value.maximum_move == max

Expand All @@ -136,7 +136,7 @@ def test_given_a_device_where_one_move_too_small_when_check_and_cache_values_the
}

motors_and_positions: dict[Motor, float] = RE(
_check_and_cache_values(motors_and_new_positions, min, 1000)
check_and_cache_values(motors_and_new_positions, min, 1000)
).plan_result # type: ignore
cached_positions = motors_and_positions.values()

Expand All @@ -156,7 +156,7 @@ def test_given_a_device_where_all_moves_too_small_when_check_and_cache_values_th
motors_and_new_positions = {motor_obj: 0.0 for motor_obj in my_device.motors}

motors_and_positions: dict[Motor, float] = RE(
_check_and_cache_values(motors_and_new_positions, 40, 1000)
check_and_cache_values(motors_and_new_positions, 40, 1000)
).plan_result # type: ignore
cached_positions = motors_and_positions.values()

Expand Down
Loading

0 comments on commit 055b074

Please sign in to comment.