Skip to content

Commit

Permalink
editied to test panda with both field
Browse files Browse the repository at this point in the history
  • Loading branch information
evalott100 committed Mar 12, 2024
1 parent 2277585 commit 7316225
Show file tree
Hide file tree
Showing 6 changed files with 50 additions and 28 deletions.
29 changes: 25 additions & 4 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,28 +30,35 @@ def chunked_read(f: BufferedReader, size: int) -> Iterator[bytes]:

@pytest_asyncio.fixture
def slow_dump():
with open(Path(__file__).parent / "slow_dump.txt", "rb") as f:
with open(Path(__file__).parent / "data_dumps/slow_dump.bin", "rb") as f:
# Simulate small chunked read, sized so we hit the middle of a "BIN " marker
yield chunked_read(f, 44)


@pytest_asyncio.fixture
def fast_dump():
with open(Path(__file__).parent / "fast_dump.txt", "rb") as f:
with open(Path(__file__).parent / "data_dumps/fast_dump.bin", "rb") as f:
# Simulate larger chunked read
yield chunked_read(f, 500)


@pytest_asyncio.fixture
def raw_dump():
with open(Path(__file__).parent / "raw_dump.txt", "rb") as f:
with open(Path(__file__).parent / "data_dumps/raw_dump.bin", "rb") as f:
# Simulate largest chunked read
yield chunked_read(f, 200000)


@pytest_asyncio.fixture
def raw_dump_no_duration():
with open(Path(__file__).parent / "data_dumps/raw_dump_no_duration.bin", "rb") as f:
# Simulate largest chunked read
yield chunked_read(f, 200000)


@pytest.fixture
def overrun_dump():
with open(Path(__file__).parent / "raw_dump.txt", "rb") as f:
with open(Path(__file__).parent / "data_dumps/raw_dump.bin", "rb") as f:
# All in one go
return f.read().replace(b"Disarmed", b"Data overrun")

Expand Down Expand Up @@ -116,6 +123,20 @@ def overrun_dump():
]


def assert_all_data_in_hdf_file(hdf_file, samples_name):
def multiples(num, offset=0):
return pytest.approx(np.arange(1, 10001) * num + offset)

assert hdf_file["/COUNTER1.OUT.Max"][:] == multiples(1)
assert hdf_file["/COUNTER1.OUT.Mean"][:] == multiples(1)
assert hdf_file["/COUNTER1.OUT.Min"][:] == multiples(1)
assert hdf_file["/COUNTER2.OUT.Mean"][:] == multiples(2)
assert hdf_file["/COUNTER3.OUT.Value"][:] == multiples(3)
assert hdf_file["/PCAP.BITS2.Value"][:] == multiples(0)
assert hdf_file[f"/{samples_name}"][:] == multiples(0, offset=125)
assert hdf_file["/PCAP.TS_START.Value"][:] == multiples(2e-6, offset=7.2e-8 - 2e-6)


class Rows:
def __init__(self, *rows):
self.rows = rows
Expand Down
File renamed without changes.
Binary file added tests/data_dumps/raw_dump.bin
Binary file not shown.
File renamed without changes.
File renamed without changes.
49 changes: 25 additions & 24 deletions tests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,33 @@
from unittest.mock import patch

import h5py
import numpy as np
import pytest
from click.testing import CliRunner

from pandablocks import cli
from pandablocks.hdf import HDFDataOverrunException
from tests.conftest import STATE_RESPONSES, STATE_SAVEFILE, DummyServer


def test_writing_fast_hdf(dummy_server_in_thread: DummyServer, raw_dump, tmp_path):
from pandablocks.hdf import GATE_DURATION_FIELD, SAMPLES_FIELD, HDFDataOverrunException
from tests.conftest import (
STATE_RESPONSES,
STATE_SAVEFILE,
DummyServer,
assert_all_data_in_hdf_file,
)


@pytest.mark.parametrize("samples_name", [GATE_DURATION_FIELD, SAMPLES_FIELD])
def test_writing_fast_hdf(
samples_name,
dummy_server_in_thread: DummyServer,
raw_dump,
raw_dump_no_duration,
tmp_path,
):
dummy_server_in_thread.send.append("OK")
dummy_server_in_thread.data = raw_dump
if samples_name == GATE_DURATION_FIELD:
dummy_server_in_thread.data = raw_dump
else:
dummy_server_in_thread.data = raw_dump_no_duration

runner = CliRunner()
result = runner.invoke(
cli.cli, ["hdf", "localhost", str(tmp_path / "%d.h5"), "--arm"]
Expand All @@ -28,11 +43,11 @@ def test_writing_fast_hdf(dummy_server_in_thread: DummyServer, raw_dump, tmp_pat
"COUNTER2.OUT.Mean",
"COUNTER3.OUT.Value",
"PCAP.BITS2.Value",
"PCAP.SAMPLES.Value",
samples_name,
"PCAP.TS_START.Value",
]
assert dummy_server_in_thread.received == ["*PCAP.ARM="]
assert_all_data_in_hdf_file(hdf_file)
assert_all_data_in_hdf_file(hdf_file, samples_name)


def test_writing_overrun_hdf(
Expand All @@ -47,21 +62,7 @@ def test_writing_overrun_hdf(
assert result.exit_code == 1
assert isinstance(result.exception, HDFDataOverrunException)
hdf_file = h5py.File(tmp_path / "1.h5", "r")
assert_all_data_in_hdf_file(hdf_file)


def assert_all_data_in_hdf_file(hdf_file):
def multiples(num, offset=0):
return pytest.approx(np.arange(1, 10001) * num + offset)

assert hdf_file["/COUNTER1.OUT.Max"][:] == multiples(1)
assert hdf_file["/COUNTER1.OUT.Mean"][:] == multiples(1)
assert hdf_file["/COUNTER1.OUT.Min"][:] == multiples(1)
assert hdf_file["/COUNTER2.OUT.Mean"][:] == multiples(2)
assert hdf_file["/COUNTER3.OUT.Value"][:] == multiples(3)
assert hdf_file["/PCAP.BITS2.Value"][:] == multiples(0)
assert hdf_file["/PCAP.SAMPLES.Value"][:] == multiples(0, offset=125)
assert hdf_file["/PCAP.TS_START.Value"][:] == multiples(2e-6, offset=7.2e-8 - 2e-6)
assert_all_data_in_hdf_file(hdf_file, "PCAP.GATE_DURATION.Value")


class MockInput:
Expand Down

0 comments on commit 7316225

Please sign in to comment.