Skip to content

Commit

Permalink
Write tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Cadair committed Feb 7, 2024
1 parent 4f457db commit 5c45731
Show file tree
Hide file tree
Showing 3 changed files with 188 additions and 10 deletions.
19 changes: 11 additions & 8 deletions dkist/net/attrs_values.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from dkist.net import attrs as dattrs
from dkist.net import conf as net_conf

__all__ = ['attempt_local_update', "get_search_attrs_values"]
__all__ = ["attempt_local_update", "get_search_attrs_values"]

# Map keys in dataset inventory to Fido attrs
INVENTORY_ATTR_MAP = {
Expand All @@ -30,13 +30,13 @@
}


def _get_file_age(path):
def _get_file_age(path: Path) -> dt.timedelta:
last_modified = dt.datetime.fromtimestamp(path.stat().st_mtime)
now = dt.datetime.now()
return (now - last_modified).total_seconds()
return (now - last_modified)


def _get_cached_json():
def _get_cached_json() -> list[Path, bool]:
"""
Return the path to a local copy of the JSON file, and if the file should be updated.
Expand All @@ -52,13 +52,13 @@ def _get_cached_json():
update_needed = False
if not user_file_exists:
update_needed = True
if not user_file_exists and _get_file_age(return_file) > net_conf.attr_max_age:
if user_file_exists and _get_file_age(return_file) > dt.timedelta(days=net_conf.attr_max_age):
update_needed = True

return return_file, update_needed


def _fetch_values_to_file(filepath, *, timeout=1):
def _fetch_values_to_file(filepath: Path, *, timeout: int = 1):
data = urllib.request.urlopen(
net_conf.dataset_endpoint + net_conf.dataset_search_values_path, timeout=timeout
)
Expand Down Expand Up @@ -92,7 +92,7 @@ def attempt_local_update(*, timeout: int = 1, user_file: Path = None, silence_er
user_file = Path(user_file)
user_file.parent.mkdir(exist_ok=True, parents=True)

log.info("Fetching updated search values for the DKIST client.")
log.info(f"Fetching updated search values for the DKIST client to {user_file}")

success = False
try:
Expand All @@ -107,15 +107,18 @@ def attempt_local_update(*, timeout: int = 1, user_file: Path = None, silence_er
if not silence_errors:
raise

return success

# Test that the file we just saved can be parsed as json
try:
with open(user_file, "r") as f:
json.load(f)
except Exception:
log.error("Downloaded file is not valid JSON.")
user_file.unlink(missing_ok=True)
if not silence_errors:
raise
return False
success = False

return success

Expand Down
13 changes: 11 additions & 2 deletions dkist/net/tests/strategies.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Hypothesis strategies for testing the DKIST client.
"""
import datetime
from functools import cache

import hypothesis.strategies as st
from hypothesis import HealthCheck, assume, settings
Expand All @@ -17,8 +18,16 @@
from dkist.net.attr_walker import walker


@cache
def get_registered_values():
"""
Cache this so we only update it once.
"""
return DKISTClient.register_values()


def _generate_from_register_values(attr_type):
possible_values = DKISTClient.register_values()[attr_type]
possible_values = get_registered_values()[attr_type]
possible_values = list(map(lambda x: x[0], possible_values))

return st.builds(attr_type, st.sampled_from(possible_values))
Expand Down Expand Up @@ -73,7 +82,7 @@ def _embargo_end(draw, time=Times(
return a.dkist.EmbargoEndTime(t1, t2)


for attr_type in DKISTClient.register_values():
for attr_type in get_registered_values():
st.register_type_strategy(attr_type, _generate_from_register_values)

st.register_type_strategy(a.Time, time_attr())
Expand Down
166 changes: 166 additions & 0 deletions dkist/net/tests/test_attrs_values.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
import os
import json
import shutil
import logging
import datetime
import importlib

import platformdirs
import pytest

from sunpy.net import attrs as a

import dkist.data
from dkist.net.attrs_values import (_fetch_values_to_file, _get_cached_json,
attempt_local_update, get_search_attrs_values)

PACKAGE_FILE = importlib.resources.files(dkist.data) / "api_search_values.json"


@pytest.fixture
def tmp_homedir(tmp_path):
ori_home = os.environ.get("HOME")
os.environ["HOME"] = tmp_path.as_posix()
yield tmp_path
os.environ["HOME"] = ori_home


@pytest.fixture
def user_file(tmp_homedir):
return platformdirs.user_data_path("dkist") / "api_search_values.json"


@pytest.fixture
def values_in_home(tmp_homedir, user_file):
# Sanity check that platformdirs is doing what we expect
assert tmp_homedir in user_file.parents
user_file.parent.mkdir(parents=True, exist_ok=True)
shutil.copy(PACKAGE_FILE, user_file)


def test_get_cached_json_no_local(tmp_homedir):
return_file, update_needed = _get_cached_json()
assert return_file == PACKAGE_FILE
# Update is always needed if there is no local file
assert update_needed is True


def test_get_cached_json_local(tmp_homedir, values_in_home, user_file):
return_file, update_needed = _get_cached_json()
assert return_file == user_file
# We just copied the file so shouldn't need an update
assert update_needed is False


def test_get_cached_json_local_out_of_date(tmp_homedir, values_in_home, user_file):
ten_ago = (datetime.datetime.now() - datetime.timedelta(days=10)).timestamp()
os.utime(user_file, (ten_ago, ten_ago))
return_file, update_needed = _get_cached_json()
assert return_file == user_file
# We changed mtime, so we need an update
assert update_needed is True


def test_get_cached_json_local_not_quite_out_of_date(tmp_homedir, values_in_home, user_file):
ten_ago = (datetime.datetime.now() - datetime.timedelta(days=6)).timestamp()
os.utime(user_file, (ten_ago, ten_ago))
return_file, update_needed = _get_cached_json()
assert return_file == user_file
# We changed mtime, so we need don't an update
assert update_needed is False


@pytest.mark.remote_data
def test_fetch_values_to_file(tmp_path):
json_file = tmp_path / "api_search_values.json"

assert json_file.exists() is False
_fetch_values_to_file(json_file)
assert json_file.exists() is True

# Check we can load the file as json and it looks very roughly like what we
# would expect from the API response
with open(json_file) as f:
data = json.load(f)
assert "parameterValues" in data.keys()
assert isinstance(data["parameterValues"], list)


def _local_fetch_values(user_file, *, timeout):
user_file.parent.mkdir(parents=True, exist_ok=True)
shutil.copy(PACKAGE_FILE, user_file)


def test_attempt_local_update(mocker, tmp_path, caplog_dkist):
json_file = tmp_path / "api_search_values.json"
mocker.patch("dkist.net.attrs_values._fetch_values_to_file",
new_callable=lambda: _local_fetch_values)
success = attempt_local_update(user_file=json_file, silence_errors=False)
assert success

assert caplog_dkist.record_tuples == [
("dkist", logging.INFO, f"Fetching updated search values for the DKIST client to {json_file}")
]


def raise_error(*args, **kwargs):
raise ValueError("This is a value error")


def test_attempt_local_update_error_download(mocker, caplog_dkist, tmp_homedir, user_file):
mocker.patch("dkist.net.attrs_values._fetch_values_to_file",
side_effect=raise_error)
success = attempt_local_update(silence_errors=True)
assert not success

assert caplog_dkist.record_tuples == [
("dkist", logging.INFO, f"Fetching updated search values for the DKIST client to {user_file}"),
("dkist", logging.ERROR, "Failed to download new attrs values."),
]

with pytest.raises(ValueError):
success = attempt_local_update(silence_errors=False)


def _definately_not_json(user_file, *, timeout):
with open(user_file, "w") as f:
f.write("This is not json")


def test_attempt_local_update_fail_invalid_download(mocker, tmp_path, caplog_dkist):
json_file = tmp_path / "api_search_values.json"
mocker.patch("dkist.net.attrs_values._fetch_values_to_file",
new_callable=lambda: _definately_not_json)
success = attempt_local_update(user_file=json_file, silence_errors=True)
assert not success

assert caplog_dkist.record_tuples == [
("dkist", logging.INFO, f"Fetching updated search values for the DKIST client to {json_file}"),
("dkist", logging.ERROR, "Downloaded file is not valid JSON."),
]

with pytest.raises(json.JSONDecodeError):
success = attempt_local_update(user_file=json_file, silence_errors=False)


@pytest.mark.parametrize("user_file, update_needed, allow_update, should_update", [
("user_file", False, True, False),
("user_file", True, True, True),
("user_file", True, False, False),
], indirect=["user_file"])
def test_get_search_attrs_values(mocker, caplog_dkist, values_in_home, user_file, update_needed, allow_update, should_update):
mocker.patch("dkist.net.attrs_values._get_cached_json",
new_callable=lambda: lambda: (user_file, update_needed))

alu_mock = mocker.patch("dkist.net.attrs_values.attempt_local_update")

attr_values = get_search_attrs_values(allow_update=allow_update)

if should_update:
alu_mock.assert_called_once()
else:
alu_mock.assert_not_called()

assert isinstance(attr_values, dict)
# Test that some known attrs are in the result
assert set((a.Instrument, a.dkist.HeaderVersion, a.dkist.WorkflowName)).issubset(attr_values.keys())

0 comments on commit 5c45731

Please sign in to comment.