Skip to content

Commit

Permalink
Merge pull request #77 from evalott100/allow_for_gate_duration_sample…
Browse files Browse the repository at this point in the history
…_name

adjusted code to allow the server to send either  name
  • Loading branch information
evalott100 authored Feb 28, 2024
2 parents 0385fd5 + eac8ef5 commit 2ecafc4
Show file tree
Hide file tree
Showing 10 changed files with 84 additions and 44 deletions.
15 changes: 10 additions & 5 deletions src/pandablocks/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,31 +92,36 @@ class CommandException(Exception):
# zip() because typing does not support variadic type variables. See
# typeshed PR #1550 for discussion.
@overload
def _execute_commands(c1: Command[T]) -> ExchangeGenerator[Tuple[T]]: ...
def _execute_commands(c1: Command[T]) -> ExchangeGenerator[Tuple[T]]:
...


@overload
def _execute_commands(
c1: Command[T], c2: Command[T2]
) -> ExchangeGenerator[Tuple[T, T2]]: ...
) -> ExchangeGenerator[Tuple[T, T2]]:
...


@overload
def _execute_commands(
c1: Command[T], c2: Command[T2], c3: Command[T3]
) -> ExchangeGenerator[Tuple[T, T2, T3]]: ...
) -> ExchangeGenerator[Tuple[T, T2, T3]]:
...


@overload
def _execute_commands(
c1: Command[T], c2: Command[T2], c3: Command[T3], c4: Command[T4]
) -> ExchangeGenerator[Tuple[T, T2, T3, T4]]: ...
) -> ExchangeGenerator[Tuple[T, T2, T3, T4]]:
...


@overload
def _execute_commands(
*commands: Command[Any],
) -> ExchangeGenerator[Tuple[Any, ...]]: ...
) -> ExchangeGenerator[Tuple[Any, ...]]:
...


def _execute_commands(*commands):
Expand Down
5 changes: 4 additions & 1 deletion src/pandablocks/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,10 @@
"DataConnection",
]

# The name of the samples field used for averaging unscaled fields
# The names of the samples field used for averaging unscaled fields
# In newer versions it's GATE_DURATION but we keep SAMPLES for backwards
# compatibility
GATE_DURATION_FIELD = "PCAP.GATE_DURATION.Value"
SAMPLES_FIELD = "PCAP.SAMPLES.Value"


Expand Down
17 changes: 12 additions & 5 deletions src/pandablocks/hdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from pandablocks.commands import Arm

from .asyncio import AsyncioClient
from .connections import SAMPLES_FIELD
from .connections import GATE_DURATION_FIELD, SAMPLES_FIELD
from .responses import EndData, EndReason, FieldCapture, FrameData, ReadyData, StartData

# Define the public API of this module
Expand Down Expand Up @@ -159,11 +159,18 @@ def __init__(self) -> None:

def create_processor(self, field: FieldCapture, raw: bool):
column_name = f"{field.name}.{field.capture}"

if raw and field.capture == "Mean":
return (
lambda data: data[column_name] * field.scale / data[SAMPLES_FIELD]
+ field.offset
)

def mean_callable(data):
if GATE_DURATION_FIELD in data.dtype.names:
gate_duration = data[GATE_DURATION_FIELD]
else:
gate_duration = data[SAMPLES_FIELD]

return (data[column_name] * field.scale / gate_duration) + field.offset

return mean_callable
elif raw and (field.scale != 1 or field.offset != 0):
return lambda data: data[column_name] * field.scale + field.offset
else:
Expand Down
29 changes: 25 additions & 4 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,28 +30,35 @@ def chunked_read(f: BufferedReader, size: int) -> Iterator[bytes]:

@pytest_asyncio.fixture
def slow_dump():
with open(Path(__file__).parent / "slow_dump.txt", "rb") as f:
with open(Path(__file__).parent / "data_dumps/slow_dump.bin", "rb") as f:
# Simulate small chunked read, sized so we hit the middle of a "BIN " marker
yield chunked_read(f, 44)


@pytest_asyncio.fixture
def fast_dump():
with open(Path(__file__).parent / "fast_dump.txt", "rb") as f:
with open(Path(__file__).parent / "data_dumps/fast_dump.bin", "rb") as f:
# Simulate larger chunked read
yield chunked_read(f, 500)


@pytest_asyncio.fixture
def raw_dump():
with open(Path(__file__).parent / "raw_dump.txt", "rb") as f:
with open(Path(__file__).parent / "data_dumps/raw_dump.bin", "rb") as f:
# Simulate largest chunked read
yield chunked_read(f, 200000)


@pytest_asyncio.fixture
def raw_dump_no_duration():
with open(Path(__file__).parent / "data_dumps/raw_dump_no_duration.bin", "rb") as f:
# Simulate largest chunked read
yield chunked_read(f, 200000)


@pytest.fixture
def overrun_dump():
with open(Path(__file__).parent / "raw_dump.txt", "rb") as f:
with open(Path(__file__).parent / "data_dumps/raw_dump.bin", "rb") as f:
# All in one go
return f.read().replace(b"Disarmed", b"Data overrun")

Expand Down Expand Up @@ -116,6 +123,20 @@ def overrun_dump():
]


def assert_all_data_in_hdf_file(hdf_file, samples_name):
def multiples(num, offset=0):
return pytest.approx(np.arange(1, 10001) * num + offset)

assert hdf_file["/COUNTER1.OUT.Max"][:] == multiples(1)
assert hdf_file["/COUNTER1.OUT.Mean"][:] == multiples(1)
assert hdf_file["/COUNTER1.OUT.Min"][:] == multiples(1)
assert hdf_file["/COUNTER2.OUT.Mean"][:] == multiples(2)
assert hdf_file["/COUNTER3.OUT.Value"][:] == multiples(3)
assert hdf_file["/PCAP.BITS2.Value"][:] == multiples(0)
assert hdf_file[f"/{samples_name}"][:] == multiples(0, offset=125)
assert hdf_file["/PCAP.TS_START.Value"][:] == multiples(2e-6, offset=7.2e-8 - 2e-6)


class Rows:
def __init__(self, *rows):
self.rows = rows
Expand Down
File renamed without changes.
Binary file added tests/data_dumps/raw_dump.bin
Binary file not shown.
File renamed without changes.
File renamed without changes.
49 changes: 25 additions & 24 deletions tests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,33 @@
from unittest.mock import patch

import h5py
import numpy as np
import pytest
from click.testing import CliRunner

from pandablocks import cli
from pandablocks.hdf import HDFDataOverrunException
from tests.conftest import STATE_RESPONSES, STATE_SAVEFILE, DummyServer


def test_writing_fast_hdf(dummy_server_in_thread: DummyServer, raw_dump, tmp_path):
from pandablocks.hdf import GATE_DURATION_FIELD, SAMPLES_FIELD, HDFDataOverrunException
from tests.conftest import (
STATE_RESPONSES,
STATE_SAVEFILE,
DummyServer,
assert_all_data_in_hdf_file,
)


@pytest.mark.parametrize("samples_name", [GATE_DURATION_FIELD, SAMPLES_FIELD])
def test_writing_fast_hdf(
samples_name,
dummy_server_in_thread: DummyServer,
raw_dump,
raw_dump_no_duration,
tmp_path,
):
dummy_server_in_thread.send.append("OK")
dummy_server_in_thread.data = raw_dump
if samples_name == GATE_DURATION_FIELD:
dummy_server_in_thread.data = raw_dump
else:
dummy_server_in_thread.data = raw_dump_no_duration

runner = CliRunner()
result = runner.invoke(
cli.cli, ["hdf", "localhost", str(tmp_path / "%d.h5"), "--arm"]
Expand All @@ -28,11 +43,11 @@ def test_writing_fast_hdf(dummy_server_in_thread: DummyServer, raw_dump, tmp_pat
"COUNTER2.OUT.Mean",
"COUNTER3.OUT.Value",
"PCAP.BITS2.Value",
"PCAP.SAMPLES.Value",
samples_name,
"PCAP.TS_START.Value",
]
assert dummy_server_in_thread.received == ["*PCAP.ARM="]
assert_all_data_in_hdf_file(hdf_file)
assert_all_data_in_hdf_file(hdf_file, samples_name)


def test_writing_overrun_hdf(
Expand All @@ -47,21 +62,7 @@ def test_writing_overrun_hdf(
assert result.exit_code == 1
assert isinstance(result.exception, HDFDataOverrunException)
hdf_file = h5py.File(tmp_path / "1.h5", "r")
assert_all_data_in_hdf_file(hdf_file)


def assert_all_data_in_hdf_file(hdf_file):
def multiples(num, offset=0):
return pytest.approx(np.arange(1, 10001) * num + offset)

assert hdf_file["/COUNTER1.OUT.Max"][:] == multiples(1)
assert hdf_file["/COUNTER1.OUT.Mean"][:] == multiples(1)
assert hdf_file["/COUNTER1.OUT.Min"][:] == multiples(1)
assert hdf_file["/COUNTER2.OUT.Mean"][:] == multiples(2)
assert hdf_file["/COUNTER3.OUT.Value"][:] == multiples(3)
assert hdf_file["/PCAP.BITS2.Value"][:] == multiples(0)
assert hdf_file["/PCAP.SAMPLES.Value"][:] == multiples(0, offset=125)
assert hdf_file["/PCAP.TS_START.Value"][:] == multiples(2e-6, offset=7.2e-8 - 2e-6)
assert_all_data_in_hdf_file(hdf_file, "PCAP.GATE_DURATION.Value")


class MockInput:
Expand Down
13 changes: 8 additions & 5 deletions tests/test_pandablocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,18 +344,21 @@ def test_get_fields():
]


def test_get_fields_type_ext_out():
@pytest.mark.parametrize("gate_duration_name", ["GATE_DURATION", "SAMPLES"])
def test_get_fields_type_ext_out(gate_duration_name):
"""Test for field type == ext_out, ensuring we add .CAPTURE to the end of the
*ENUMS command"""
conn = ControlConnection()
cmd = GetFieldInfo("PCAP")
assert conn.send(cmd) == b"PCAP.*?\n"

# First yield, the response to "PCAP.*?"
assert (
conn.receive_bytes(b"!SAMPLES 9 ext_out samples\n.\n")
== b"*DESC.PCAP.SAMPLES?\n*ENUMS.PCAP.SAMPLES.CAPTURE?\n"
request_str = bytes(f"!{gate_duration_name} 9 ext_out samples\n.\n", "utf-8")
response_str = bytes(
f"*DESC.PCAP.{gate_duration_name}?\n*ENUMS.PCAP.{gate_duration_name}.CAPTURE?\n",
"utf-8",
)
assert conn.receive_bytes(request_str) == response_str

# Responses to the *DESC and *ENUM commands
responses = [
Expand All @@ -371,7 +374,7 @@ def test_get_fields_type_ext_out():
(
cmd,
{
"SAMPLES": ExtOutFieldInfo(
gate_duration_name: ExtOutFieldInfo(
type="ext_out",
subtype="samples",
description="Number of gated samples in the current capture",
Expand Down

0 comments on commit 2ecafc4

Please sign in to comment.