From dda0859c45d752b0621716d674bca4c83cefcaac Mon Sep 17 00:00:00 2001 From: James Meakin <12661555+jmsmkn@users.noreply.github.com> Date: Wed, 7 Aug 2024 19:00:58 +0200 Subject: [PATCH] Add DeprecationWarnings for item getting, setting and deleting (#162) Closes #160 --- gcapi/client.py | 36 ++++--- gcapi/model_base.py | 18 ++++ gcapi/retries.py | 6 +- tests/async_integration_tests.py | 163 +++++++++++++++-------------- tests/integration_tests.py | 170 ++++++++++++++----------------- tests/test_models.py | 39 +++++++ 6 files changed, 234 insertions(+), 198 deletions(-) diff --git a/gcapi/client.py b/gcapi/client.py index 14500af..5287f89 100644 --- a/gcapi/client.py +++ b/gcapi/client.py @@ -654,7 +654,7 @@ def run_external_job(self, *, algorithm: str, inputs: dict[str, Any]): The created job """ alg = yield from self.__org_api_meta.algorithms.detail(slug=algorithm) - input_interfaces = {ci["slug"]: ci for ci in alg["inputs"]} + input_interfaces = {ci.slug: ci for ci in alg.inputs} for ci in input_interfaces: if ( @@ -663,16 +663,16 @@ def run_external_job(self, *, algorithm: str, inputs: dict[str, Any]): ): raise ValueError(f"{ci} is not provided") - job = {"algorithm": alg["api_url"], "inputs": []} + job = {"algorithm": alg.api_url, "inputs": []} for input_title, value in inputs.items(): - ci = input_interfaces.get(input_title, None) + ci = input_interfaces.get(input_title, None) # type: ignore if not ci: raise ValueError( f"{input_title} is not an input interface for this algorithm" ) - i = {"interface": ci["slug"]} - if ci["super_kind"].lower() == "image": + i = {"interface": ci.slug} # type: ignore + if ci.super_kind.lower() == "image": # type: ignore if isinstance(value, list): raw_image_upload_session = ( yield from self._upload_image_files(files=value) @@ -680,16 +680,16 @@ def run_external_job(self, *, algorithm: str, inputs: dict[str, Any]): i["upload_session"] = raw_image_upload_session["api_url"] elif isinstance(value, str): i["image"] = value - elif ci["super_kind"].lower() == "file": + elif ci["super_kind"].lower() == "file": # type: ignore if len(value) != 1: raise ValueError( - f"Only a single file can be provided for {ci['title']}." + f"Only a single file can be provided for {ci['title']}." # type: ignore ) upload = yield from self._upload_file(value) i["user_upload"] = upload["api_url"] else: i["value"] = value - job["inputs"].append(i) + job["inputs"].append(i) # type: ignore return (yield from self.__org_api_meta.algorithm_jobs.create(**job)) @@ -744,14 +744,12 @@ def update_archive_item( f"Please provide one from this list: " f"https://grand-challenge.org/algorithms/interfaces/" ) from e - i = {"interface": ci["slug"]} + i = {"interface": ci.slug} if not value: - raise ValueError( - f"You need to provide a value for {ci['slug']}" - ) + raise ValueError(f"You need to provide a value for {ci.slug}") - if ci["super_kind"].lower() == "image": + if ci.super_kind.lower() == "image": if isinstance(value, list): raw_image_upload_session = ( yield from self._upload_image_files(files=value) @@ -759,11 +757,11 @@ def update_archive_item( i["upload_session"] = raw_image_upload_session["api_url"] elif isinstance(value, str): i["image"] = value - elif ci["super_kind"].lower() == "file": + elif ci.super_kind.lower() == "file": if len(value) != 1: raise ValueError( f"You can only upload one single file " - f"to a {ci['slug']} interface." + f"to a {ci.slug} interface." ) upload = yield from self._upload_file(value) i["user_upload"] = upload["api_url"] @@ -773,7 +771,7 @@ def update_archive_item( return ( yield from self.__org_api_meta.archive_items.partial_update( - pk=item["pk"], **civs + pk=item.pk, **civs ) ) @@ -799,7 +797,7 @@ def _validate_display_set_values(self, values, interfaces): else: interface = yield from self._fetch_interface(slug) interfaces[slug] = interface - super_kind = interface["super_kind"].casefold() + super_kind = interface.super_kind.casefold() if super_kind != "value": if not isinstance(value, list): raise ValueError( @@ -854,7 +852,7 @@ def add_cases_to_reader_study( The pks of the newly created display sets. """ res = [] - interfaces: dict[str, dict] = {} + interfaces: dict[str, gcapi.models.ComponentInterface] = {} for display_set in display_sets: new_interfaces = yield from self._validate_display_set_values( display_set.items(), interfaces @@ -869,7 +867,7 @@ def add_cases_to_reader_study( for slug, value in display_set.items(): interface = interfaces[slug] data = {"interface": slug} - super_kind = interface["super_kind"].casefold() + super_kind = interface.super_kind.casefold() if super_kind == "image": yield from self._upload_image_files( display_set=ds["pk"], interface=slug, files=value diff --git a/gcapi/model_base.py b/gcapi/model_base.py index 85fbe93..c67843f 100644 --- a/gcapi/model_base.py +++ b/gcapi/model_base.py @@ -1,9 +1,27 @@ +import warnings + + class BaseModel: def __getitem__(self, key): + self._warn_deprecated_access(key, "getting") return getattr(self, key) def __setitem__(self, key, value): + self._warn_deprecated_access(key, "setting") return setattr(self, key, value) def __delitem__(self, key): + self._warn_deprecated_access(key, "deleting") return delattr(self, key) + + @staticmethod + def _warn_deprecated_access(key, action): + warnings.warn( + message=( + f'Using ["{key}"] for {action} attributes is deprecated ' + "and will be removed in the next release. " + f'Suggestion: Replace ["{key}"] with .{key}' + ), + category=DeprecationWarning, + stacklevel=3, + ) diff --git a/gcapi/retries.py b/gcapi/retries.py index 32c7ffc..a08700b 100644 --- a/gcapi/retries.py +++ b/gcapi/retries.py @@ -30,9 +30,9 @@ class SelectiveBackoffStrategy(BaseRetryStrategy): Each response code has its own backoff counter. """ - def __init__(self, backoff_factor, maximum_number_of_retries): - self.backoff_factor: float = backoff_factor - self.maximum_number_of_retries: int = maximum_number_of_retries + def __init__(self, backoff_factor: float, maximum_number_of_retries: int): + self.backoff_factor = backoff_factor + self.maximum_number_of_retries = maximum_number_of_retries self.earlier_number_of_retries: dict[int, int] = dict() def __call__(self) -> BaseRetryStrategy: diff --git a/tests/async_integration_tests.py b/tests/async_integration_tests.py index 01a015a..bd22758 100644 --- a/tests/async_integration_tests.py +++ b/tests/async_integration_tests.py @@ -18,7 +18,7 @@ @async_recurse_call async def get_upload_session(client, upload_pk): upl = await client.raw_image_upload_sessions.detail(upload_pk) - if upl["status"] != "Succeeded": + if upl.status != "Succeeded": raise ValueError return upl @@ -134,27 +134,25 @@ async def test_upload_cases_to_archive( us = await get_upload_session(c, us["pk"]) # Check that only one image was created - assert len(us["image_set"]) == 1 - image = await get_file(c, us["image_set"][0]) + assert len(us.image_set) == 1 + image = await get_file(c, us.image_set[0]) # And that it was added to the archive archive = await c.archives.iterate_all( params={"slug": "archive"} ).__anext__() - archive_images = c.images.iterate_all( - params={"archive": archive["pk"]} - ) - assert image["pk"] in [im["pk"] async for im in archive_images] + archive_images = c.images.iterate_all(params={"archive": archive.pk}) + assert image["pk"] in [im.pk async for im in archive_images] archive_items = c.archive_items.iterate_all( - params={"archive": archive["pk"]} + params={"archive": archive.pk} ) # with the correct interface image_url_to_interface_slug = { - val["image"]: val["interface"]["slug"] + val.image: val.interface.slug async for item in archive_items - for val in item["values"] - if val["image"] + for val in item.values + if val.image } if interface: @@ -184,12 +182,12 @@ async def test_upload_cases_to_archive_item_without_interface( params={"slug": "archive"} ).__anext__() item = await c.archive_items.iterate_all( - params={"archive": archive["pk"]} + params={"archive": archive.pk} ).__anext__() with pytest.raises(ValueError) as e: _ = await c.upload_cases( - archive_item=item["pk"], + archive_item=item.pk, files=[ Path(__file__).parent / "testdata" / "image10x10x101.mha" ], @@ -211,7 +209,7 @@ async def test_upload_cases_to_archive_item_with_existing_interface( archive = await c.archives.iterate_all( params={"slug": "archive"} ).__anext__() - items = c.archive_items.iterate_all(params={"archive": archive["pk"]}) + items = c.archive_items.iterate_all(params={"archive": archive.pk}) old_items_list = [item async for item in items] # create new archive item @@ -222,11 +220,11 @@ async def test_upload_cases_to_archive_item_with_existing_interface( # retrieve existing archive item pk items_list = await get_archive_items( - c, archive["pk"], len(old_items_list) + c, archive.pk, len(old_items_list) ) us = await c.upload_cases( - archive_item=items_list[-1]["pk"], + archive_item=items_list[-1].pk, interface="generic-medical-image", files=[Path(__file__).parent / "testdata" / "image10x10x101.mha"], ) @@ -234,15 +232,15 @@ async def test_upload_cases_to_archive_item_with_existing_interface( us = await get_upload_session(c, us["pk"]) # Check that only one image was created - assert len(us["image_set"]) == 1 - image = await get_file(c, us["image_set"][0]) + assert len(us.image_set) == 1 + image = await get_file(c, us.image_set[0]) # And that it was added to the archive item - item = await c.archive_items.detail(pk=items_list[-1]["pk"]) - assert image["api_url"] in [civ["image"] for civ in item["values"]] + item = await c.archive_items.detail(pk=items_list[-1].pk) + assert image["api_url"] in [civ.image for civ in item.values] # with the correct interface im_to_interface = { - civ["image"]: civ["interface"]["slug"] for civ in item["values"] + civ.image: civ.interface.slug for civ in item.values } assert im_to_interface[image["api_url"]] == "generic-medical-image" @@ -257,7 +255,7 @@ async def test_upload_cases_to_archive_item_with_new_interface( archive = await c.archives.iterate_all( params={"slug": "archive"} ).__anext__() - items = c.archive_items.iterate_all(params={"archive": archive["pk"]}) + items = c.archive_items.iterate_all(params={"archive": archive.pk}) old_items_list = [item async for item in items] # create new archive item @@ -267,11 +265,11 @@ async def test_upload_cases_to_archive_item_with_new_interface( ) items_list = await get_archive_items( - c, archive["pk"], len(old_items_list) + c, archive.pk, len(old_items_list) ) us = await c.upload_cases( - archive_item=items_list[-1]["pk"], + archive_item=items_list[-1].pk, interface="generic-overlay", files=[Path(__file__).parent / "testdata" / "image10x10x101.mha"], ) @@ -279,15 +277,15 @@ async def test_upload_cases_to_archive_item_with_new_interface( us = await get_upload_session(c, us["pk"]) # Check that only one image was created - assert len(us["image_set"]) == 1 - image = await get_file(c, us["image_set"][0]) + assert len(us.image_set) == 1 + image = await get_file(c, us.image_set[0]) # And that it was added to the archive item - item = await c.archive_items.detail(pk=items_list[-1]["pk"]) - assert image["api_url"] in [civ["image"] for civ in item["values"]] + item = await c.archive_items.detail(pk=items_list[-1].pk) + assert image["api_url"] in [civ.image for civ in item.values] # with the correct interface im_to_interface = { - civ["image"]: civ["interface"]["slug"] for civ in item["values"] + civ.image: civ.interface.slug for civ in item.values } assert im_to_interface[image["api_url"]] == "generic-overlay" @@ -311,7 +309,7 @@ async def test_download_cases(local_grand_challenge, files, tmpdir): @async_recurse_call async def get_download(): return await c.images.download( - filename=tmpdir / "image", url=us["image_set"][0] + filename=tmpdir / "image", url=us.image_set[0] ) downloaded_files = await get_download() @@ -362,8 +360,9 @@ async def run_job(): assert job["status"] == "Queued" assert len(job["inputs"]) == 1 + job = await c.algorithm_jobs.detail(job["pk"]) - assert job["status"] in {"Queued", "Started"} + assert job.status in {"Queued", "Started"} @pytest.mark.parametrize( @@ -398,7 +397,7 @@ async def test_get_algorithm_by_slug(local_grand_challenge): by_slug = await c.algorithms.detail( slug="test-algorithm-evaluation-image-1" ) - by_pk = await c.algorithms.detail(pk=by_slug["pk"]) + by_pk = await c.algorithms.detail(pk=by_slug.pk) assert by_pk == by_slug @@ -409,7 +408,7 @@ async def test_get_reader_study_by_slug(local_grand_challenge): base_url=local_grand_challenge, verify=False, token=READERSTUDY_TOKEN ) as c: by_slug = await c.reader_studies.detail(slug="reader-study") - by_pk = await c.reader_studies.detail(pk=by_slug["pk"]) + by_pk = await c.reader_studies.detail(pk=by_slug.pk) assert by_pk == by_slug @@ -459,7 +458,7 @@ async def test_add_and_update_file_to_archive_item(local_grand_challenge): archive = await c.archives.iterate_all( params={"slug": "archive"} ).__anext__() - items = c.archive_items.iterate_all(params={"archive": archive["pk"]}) + items = c.archive_items.iterate_all(params={"archive": archive.pk}) old_items_list = [item async for item in items] # create new archive item @@ -470,14 +469,14 @@ async def test_add_and_update_file_to_archive_item(local_grand_challenge): # retrieve existing archive item pk items_list = await get_archive_items( - c, archive["pk"], len(old_items_list) + c, archive.pk, len(old_items_list) ) - old_civ_count = len(items_list[-1]["values"]) + old_civ_count = len(items_list[-1].values) with pytest.raises(ValueError) as e: _ = await c.update_archive_item( - archive_item_pk=items_list[-1]["pk"], + archive_item_pk=items_list[-1].pk, values={ "predictions-csv-file": [ Path(__file__).parent / "testdata" / f @@ -491,7 +490,7 @@ async def test_add_and_update_file_to_archive_item(local_grand_challenge): ) _ = await c.update_archive_item( - archive_item_pk=items_list[-1]["pk"], + archive_item_pk=items_list[-1].pk, values={ "predictions-csv-file": [ Path(__file__).parent / "testdata" / "test.csv" @@ -501,22 +500,22 @@ async def test_add_and_update_file_to_archive_item(local_grand_challenge): @async_recurse_call async def get_archive_detail(): - item = await c.archive_items.detail(items_list[-1]["pk"]) - if len(item["values"]) != old_civ_count + 1: + item = await c.archive_items.detail(items_list[-1].pk) + if len(item.values) != old_civ_count + 1: # csv interface value has not been added to item yet raise ValueError return item item_updated = await get_archive_detail() - csv_civ = item_updated["values"][-1] - assert csv_civ["interface"]["slug"] == "predictions-csv-file" - assert "test.csv" in csv_civ["file"] + csv_civ = item_updated.values[-1] + assert csv_civ.interface.slug == "predictions-csv-file" + assert "test.csv" in csv_civ.file - updated_civ_count = len(item_updated["values"]) + updated_civ_count = len(item_updated.values) # a new pdf upload will overwrite the old pdf interface value _ = await c.update_archive_item( - archive_item_pk=items_list[-1]["pk"], + archive_item_pk=items_list[-1].pk, values={ "predictions-csv-file": [ Path(__file__).parent / "testdata" / "test.csv" @@ -526,17 +525,17 @@ async def get_archive_detail(): @async_recurse_call async def get_updated_again_archive_item(): - item = await c.archive_items.detail(items_list[-1]["pk"]) - if csv_civ in item["values"]: + item = await c.archive_items.detail(items_list[-1].pk) + if csv_civ in item.values: # csv interface value has been added to item raise ValueError return item item_updated_again = await get_updated_again_archive_item() - assert len(item_updated_again["values"]) == updated_civ_count - new_csv_civ = item_updated_again["values"][-1] - assert new_csv_civ["interface"]["slug"] == "predictions-csv-file" + assert len(item_updated_again.values) == updated_civ_count + new_csv_civ = item_updated_again.values[-1] + assert new_csv_civ.interface.slug == "predictions-csv-file" @pytest.mark.anyio @@ -548,7 +547,7 @@ async def test_add_and_update_value_to_archive_item(local_grand_challenge): archive = await c.archives.iterate_all( params={"slug": "archive"} ).__anext__() - items = c.archive_items.iterate_all(params={"archive": archive["pk"]}) + items = c.archive_items.iterate_all(params={"archive": archive.pk}) old_items_list = [item async for item in items] # create new archive item @@ -559,39 +558,39 @@ async def test_add_and_update_value_to_archive_item(local_grand_challenge): # retrieve existing archive item pk items_list = await get_archive_items( - c, archive["pk"], len(old_items_list) + c, archive.pk, len(old_items_list) ) - old_civ_count = len(items_list[-1]["values"]) + old_civ_count = len(items_list[-1].values) _ = await c.update_archive_item( - archive_item_pk=items_list[-1]["pk"], + archive_item_pk=items_list[-1].pk, values={"results-json-file": {"foo": 0.5}}, ) @async_recurse_call async def get_archive_detail(): - item = await c.archive_items.detail(items_list[-1]["pk"]) - if len(item["values"]) != old_civ_count + 1: + item = await c.archive_items.detail(items_list[-1].pk) + if len(item.values) != old_civ_count + 1: # csv interface value has been added to item raise ValueError return item item_updated = await get_archive_detail() - json_civ = item_updated["values"][-1] - assert json_civ["interface"]["slug"] == "results-json-file" - assert json_civ["value"] == {"foo": 0.5} - updated_civ_count = len(item_updated["values"]) + json_civ = item_updated.values[-1] + assert json_civ.interface.slug == "results-json-file" + assert json_civ.value == {"foo": 0.5} + updated_civ_count = len(item_updated.values) _ = await c.update_archive_item( - archive_item_pk=items_list[-1]["pk"], + archive_item_pk=items_list[-1].pk, values={"results-json-file": {"foo": 0.8}}, ) @async_recurse_call async def get_updated_archive_detail(): - item = await c.archive_items.detail(items_list[-1]["pk"]) - if json_civ in item["values"]: + item = await c.archive_items.detail(items_list[-1].pk) + if json_civ in item.values: # results json interface value has been added to the item and # the previously added json civ is no longer attached # to this archive item @@ -600,10 +599,10 @@ async def get_updated_archive_detail(): item_updated_again = await get_updated_archive_detail() - assert len(item_updated_again["values"]) == updated_civ_count - new_json_civ = item_updated_again["values"][-1] - assert new_json_civ["interface"]["slug"] == "results-json-file" - assert new_json_civ["value"] == {"foo": 0.8} + assert len(item_updated_again.values) == updated_civ_count + new_json_civ = item_updated_again.values[-1] + assert new_json_civ.interface.slug == "results-json-file" + assert new_json_civ.value == {"foo": 0.8} @pytest.mark.anyio @@ -617,8 +616,8 @@ async def test_update_archive_item_with_non_existing_interface( archive = await c.archives.iterate_all( params={"slug": "archive"} ).__anext__() - items = c.archive_items.iterate_all(params={"archive": archive["pk"]}) - item_ids = [item["pk"] async for item in items] + items = c.archive_items.iterate_all(params={"archive": archive.pk}) + item_ids = [item.pk async for item in items] with pytest.raises(ValueError) as e: _ = await c.update_archive_item( archive_item_pk=item_ids[0], values={"new-interface": 5} @@ -635,8 +634,8 @@ async def test_update_archive_item_without_value(local_grand_challenge): archive = await c.archives.iterate_all( params={"slug": "archive"} ).__anext__() - items = c.archive_items.iterate_all(params={"archive": archive["pk"]}) - item_ids = [item["pk"] async for item in items] + items = c.archive_items.iterate_all(params={"archive": archive.pk}) + item_ids = [item.pk async for item in items] with pytest.raises(ValueError) as e: _ = await c.update_archive_item( @@ -738,22 +737,22 @@ async def test_add_cases_to_reader_study(display_sets, local_grand_challenge): params={"slug": "reader-study"} ).__anext__() all_display_sets = c.reader_studies.display_sets.iterate_all( - params={"reader_study": reader_study["pk"]} + params={"reader_study": reader_study.pk} ) - all_display_sets = {x["pk"]: x async for x in all_display_sets} + all_display_sets = {x.pk: x async for x in all_display_sets} assert all([x in all_display_sets for x in added_display_sets]) @async_recurse_call async def check_image(interface_value, expected_name): - image = await get_file(c, interface_value["image"]) + image = await get_file(c, interface_value.image) assert image["name"] == expected_name def check_annotation(interface_value, expected): - assert interface_value["value"] == expected + assert interface_value.value == expected @async_recurse_call async def check_file(interface_value, expected_name): - response = await get_file(c, interface_value["file"]) + response = await get_file(c, interface_value.file) assert response.url.path.endswith(expected_name) # Check for each display set that the values are added @@ -762,25 +761,23 @@ async def check_file(interface_value, expected_name): ): ds = await c.reader_studies.display_sets.detail(pk=display_set_pk) # may take a while for the images to be added - while len(ds["values"]) != len(display_set): + while len(ds.values) != len(display_set): ds = await c.reader_studies.display_sets.detail( pk=display_set_pk ) for interface, value in display_set.items(): civ = [ - civ - for civ in ds["values"] - if civ["interface"]["slug"] == interface + civ for civ in ds.values if civ.interface.slug == interface ][0] - if civ["interface"]["super_kind"] == "Image": + if civ.interface.super_kind == "Image": file_name = value[0].name await check_image(civ, file_name) - elif civ["interface"]["kind"] == "2D bounding box": + elif civ.interface.kind == "2D bounding box": check_annotation(civ, value) pass - elif civ["interface"]["super_kind"] == "File": + elif civ.interface.super_kind == "File": file_name = value[0].name await check_file(civ, file_name) diff --git a/tests/integration_tests.py b/tests/integration_tests.py index bc824b7..2665a66 100644 --- a/tests/integration_tests.py +++ b/tests/integration_tests.py @@ -18,7 +18,7 @@ @recurse_call def get_upload_session(client, upload_pk): upl = client.raw_image_upload_sessions.detail(upload_pk) - if upl["status"] != "Succeeded": + if upl.status != "Succeeded": raise ValueError return upl @@ -126,23 +126,21 @@ def test_upload_cases_to_archive(local_grand_challenge, files, interface): us = get_upload_session(c, us["pk"]) # Check that only one image was created - assert len(us["image_set"]) == 1 + assert len(us.image_set) == 1 - image = get_file(c, us["image_set"][0]) + image = get_file(c, us.image_set[0]) # And that it was added to the archive archive = next(c.archives.iterate_all(params={"slug": "archive"})) - archive_images = c.images.iterate_all(params={"archive": archive["pk"]}) - assert image["pk"] in [im["pk"] for im in archive_images] - archive_items = c.archive_items.iterate_all( - params={"archive": archive["pk"]} - ) + archive_images = c.images.iterate_all(params={"archive": archive.pk}) + assert image["pk"] in [im.pk for im in archive_images] + archive_items = c.archive_items.iterate_all(params={"archive": archive.pk}) # with the correct interface image_url_to_interface_slug_dict = { - value["image"]: value["interface"]["slug"] + value.image: value.interface.slug for item in archive_items - for value in item["values"] - if value["image"] + for value in item.values + if value.image } if interface: assert image_url_to_interface_slug_dict[image["api_url"]] == interface @@ -163,12 +161,12 @@ def test_upload_cases_to_archive_item_without_interface(local_grand_challenge): ) # retrieve existing archive item pk archive = next(c.archives.iterate_all(params={"slug": "archive"})) - item = next(c.archive_items.iterate_all(params={"archive": archive["pk"]})) + item = next(c.archive_items.iterate_all(params={"archive": archive.pk})) # try upload without providing interface with pytest.raises(ValueError) as e: _ = c.upload_cases( - archive_item=item["pk"], + archive_item=item.pk, files=[Path(__file__).parent / "testdata" / "image10x10x101.mha"], ) assert "You need to define an interface for archive item uploads" in str(e) @@ -194,10 +192,10 @@ def test_upload_cases_to_archive_item_with_existing_interface( ) # retrieve existing archive item pk archive = next(c.archives.iterate_all(params={"slug": "archive"})) - item = next(c.archive_items.iterate_all(params={"archive": archive["pk"]})) + item = next(c.archive_items.iterate_all(params={"archive": archive.pk})) us = c.upload_cases( - archive_item=item["pk"], + archive_item=item.pk, interface="generic-medical-image", files=[Path(__file__).parent / "testdata" / "image10x10x101.mha"], ) @@ -205,19 +203,15 @@ def test_upload_cases_to_archive_item_with_existing_interface( us = get_upload_session(c, us["pk"]) # Check that only one image was created - assert len(us["image_set"]) == 1 + assert len(us.image_set) == 1 - image = get_file(c, us["image_set"][0]) + image = get_file(c, us.image_set[0]) # And that it was added to the archive item - item = c.archive_items.detail(pk=item["pk"]) - assert image["api_url"] in [ - civ["image"] for civ in item["values"] if civ["image"] - ] + item = c.archive_items.detail(pk=item.pk) + assert image["api_url"] in [civ.image for civ in item.values if civ.image] # with the correct interface - im_to_interface = { - civ["image"]: civ["interface"]["slug"] for civ in item["values"] - } + im_to_interface = {civ.image: civ.interface.slug for civ in item.values} assert im_to_interface[image["api_url"]] == "generic-medical-image" @@ -229,29 +223,25 @@ def test_upload_cases_to_archive_item_with_new_interface( ) # retrieve existing archive item pk archive = next(c.archives.iterate_all(params={"slug": "archive"})) - item = next(c.archive_items.iterate_all(params={"archive": archive["pk"]})) + item = next(c.archive_items.iterate_all(params={"archive": archive.pk})) us = c.upload_cases( - archive_item=item["pk"], + archive_item=item.pk, interface="generic-overlay", files=[Path(__file__).parent / "testdata" / "image10x10x101.mha"], ) us = get_upload_session(c, us["pk"]) # Check that only one image was created - assert len(us["image_set"]) == 1 + assert len(us.image_set) == 1 - image = get_file(c, us["image_set"][0]) + image = get_file(c, us.image_set[0]) # And that it was added to the archive item - item = c.archive_items.detail(pk=item["pk"]) - assert image["api_url"] in [ - civ["image"] for civ in item["values"] if civ["image"] - ] + item = c.archive_items.detail(pk=item.pk) + assert image["api_url"] in [civ.image for civ in item.values if civ.image] # with the correct interface - im_to_interface = { - civ["image"]: civ["interface"]["slug"] for civ in item["values"] - } + im_to_interface = {civ.image: civ.interface.slug for civ in item.values} assert im_to_interface[image["api_url"]] == "generic-overlay" @@ -274,7 +264,7 @@ def test_download_cases(local_grand_challenge, files, tmpdir): @recurse_call def get_download(): return c.images.download( - filename=tmpdir / "image", url=us["image_set"][0] + filename=tmpdir / "image", url=us.image_set[0] ) downloaded_files = get_download() @@ -329,7 +319,7 @@ def run_job(): assert job["status"] == "Queued" assert len(job["inputs"]) == 1 job = c.algorithm_jobs.detail(job["pk"]) - assert job["status"] in {"Queued", "Started"} + assert job.status in {"Queued", "Started"} def test_get_algorithm_by_slug(local_grand_challenge): @@ -340,7 +330,7 @@ def test_get_algorithm_by_slug(local_grand_challenge): ) by_slug = c.algorithms.detail(slug="test-algorithm-evaluation-image-1") - by_pk = c.algorithms.detail(pk=by_slug["pk"]) + by_pk = c.algorithms.detail(pk=by_slug.pk) assert by_pk == by_slug @@ -351,7 +341,7 @@ def test_get_reader_study_by_slug(local_grand_challenge): ) by_slug = c.reader_studies.detail(slug="reader-study") - by_pk = c.reader_studies.detail(pk=by_slug["pk"]) + by_pk = c.reader_studies.detail(pk=by_slug.pk) assert by_pk == by_slug @@ -393,7 +383,7 @@ def test_add_and_update_file_to_archive_item(local_grand_challenge): # check number of archive items archive = next(c.archives.iterate_all(params={"slug": "archive"})) old_items_list = list( - c.archive_items.iterate_all(params={"archive": archive["pk"]}) + c.archive_items.iterate_all(params={"archive": archive.pk}) ) # create new archive item @@ -403,13 +393,13 @@ def test_add_and_update_file_to_archive_item(local_grand_challenge): ) # retrieve existing archive item pk - items = get_archive_items(c, archive["pk"], len(old_items_list)) + items = get_archive_items(c, archive.pk, len(old_items_list)) - old_civ_count = len(items[-1]["values"]) + old_civ_count = len(items[-1].values) with pytest.raises(ValueError) as e: _ = c.update_archive_item( - archive_item_pk=items[-1]["pk"], + archive_item_pk=items[-1].pk, values={ "predictions-csv-file": [ Path(__file__).parent / "testdata" / f @@ -423,7 +413,7 @@ def test_add_and_update_file_to_archive_item(local_grand_challenge): ) _ = c.update_archive_item( - archive_item_pk=items[-1]["pk"], + archive_item_pk=items[-1].pk, values={ "predictions-csv-file": [ Path(__file__).parent / "testdata" / "test.csv" @@ -433,22 +423,22 @@ def test_add_and_update_file_to_archive_item(local_grand_challenge): @recurse_call def get_updated_archive_item(): - archive_item = c.archive_items.detail(items[-1]["pk"]) - if len(archive_item["values"]) != old_civ_count + 1: + archive_item = c.archive_items.detail(items[-1].pk) + if len(archive_item.values) != old_civ_count + 1: # item has not been added raise ValueError return archive_item item_updated = get_updated_archive_item() - csv_civ = item_updated["values"][-1] - assert csv_civ["interface"]["slug"] == "predictions-csv-file" - assert "test.csv" in csv_civ["file"] + csv_civ = item_updated.values[-1] + assert csv_civ.interface.slug == "predictions-csv-file" + assert "test.csv" in csv_civ.file - updated_civ_count = len(item_updated["values"]) + updated_civ_count = len(item_updated.values) # a new pdf upload will overwrite the old pdf interface value _ = c.update_archive_item( - archive_item_pk=items[-1]["pk"], + archive_item_pk=items[-1].pk, values={ "predictions-csv-file": [ Path(__file__).parent / "testdata" / "test.csv" @@ -458,17 +448,17 @@ def get_updated_archive_item(): @recurse_call def get_updated_again_archive_item(): - archive_item = c.archive_items.detail(items[-1]["pk"]) - if csv_civ in archive_item["values"]: + archive_item = c.archive_items.detail(items[-1].pk) + if csv_civ in archive_item.values: # item has not been added raise ValueError return archive_item item_updated_again = get_updated_again_archive_item() - assert len(item_updated_again["values"]) == updated_civ_count - new_csv_civ = item_updated_again["values"][-1] - assert new_csv_civ["interface"]["slug"] == "predictions-csv-file" + assert len(item_updated_again.values) == updated_civ_count + new_csv_civ = item_updated_again.values[-1] + assert new_csv_civ.interface.slug == "predictions-csv-file" def test_add_and_update_value_to_archive_item(local_grand_challenge): @@ -478,7 +468,7 @@ def test_add_and_update_value_to_archive_item(local_grand_challenge): # check number of archive items archive = next(c.archives.iterate_all(params={"slug": "archive"})) old_items_list = list( - c.archive_items.iterate_all(params={"archive": archive["pk"]}) + c.archive_items.iterate_all(params={"archive": archive.pk}) ) # create new archive item @@ -488,48 +478,48 @@ def test_add_and_update_value_to_archive_item(local_grand_challenge): ) # retrieve existing archive item pk - items = get_archive_items(c, archive["pk"], len(old_items_list)) - old_civ_count = len(items[-1]["values"]) + items = get_archive_items(c, archive.pk, len(old_items_list)) + old_civ_count = len(items[-1].values) _ = c.update_archive_item( - archive_item_pk=items[-1]["pk"], + archive_item_pk=items[-1].pk, values={"results-json-file": {"foo": 0.5}}, ) @recurse_call def get_archive_item_detail(): - i = c.archive_items.detail(items[-1]["pk"]) - if len(i["values"]) != old_civ_count + 1: + i = c.archive_items.detail(items[-1].pk) + if len(i.values) != old_civ_count + 1: # item has been added raise ValueError return i item_updated = get_archive_item_detail() - json_civ = item_updated["values"][-1] - assert json_civ["interface"]["slug"] == "results-json-file" - assert json_civ["value"] == {"foo": 0.5} - updated_civ_count = len(item_updated["values"]) + json_civ = item_updated.values[-1] + assert json_civ.interface.slug == "results-json-file" + assert json_civ.value == {"foo": 0.5} + updated_civ_count = len(item_updated.values) _ = c.update_archive_item( - archive_item_pk=items[-1]["pk"], + archive_item_pk=items[-1].pk, values={"results-json-file": {"foo": 0.8}}, ) @recurse_call def get_updated_archive_item_detail(): - i = c.archive_items.detail(items[-1]["pk"]) - if json_civ in i["values"]: + i = c.archive_items.detail(items[-1].pk) + if json_civ in i.values: # item has not been added yet raise ValueError return i item_updated_again = get_updated_archive_item_detail() - assert len(item_updated_again["values"]) == updated_civ_count - new_json_civ = item_updated_again["values"][-1] - assert new_json_civ["interface"]["slug"] == "results-json-file" - assert new_json_civ["value"] == {"foo": 0.8} + assert len(item_updated_again.values) == updated_civ_count + new_json_civ = item_updated_again.values[-1] + assert new_json_civ.interface.slug == "results-json-file" + assert new_json_civ.value == {"foo": 0.8} def test_update_archive_item_with_non_existing_interface( @@ -541,12 +531,10 @@ def test_update_archive_item_with_non_existing_interface( # retrieve existing archive item pk archive = next(c.archives.iterate_all(params={"slug": "archive"})) - items = list( - c.archive_items.iterate_all(params={"archive": archive["pk"]}) - ) + items = list(c.archive_items.iterate_all(params={"archive": archive.pk})) with pytest.raises(ValueError) as e: _ = c.update_archive_item( - archive_item_pk=items[0]["pk"], values={"new-interface": 5} + archive_item_pk=items[0].pk, values={"new-interface": 5} ) assert "new-interface is not an existing interface" in str(e) @@ -558,13 +546,11 @@ def test_update_archive_item_without_value(local_grand_challenge): # retrieve existing archive item pk archive = next(c.archives.iterate_all(params={"slug": "archive"})) - items = list( - c.archive_items.iterate_all(params={"archive": archive["pk"]}) - ) + items = list(c.archive_items.iterate_all(params={"archive": archive.pk})) with pytest.raises(ValueError) as e: _ = c.update_archive_item( - archive_item_pk=items[0]["pk"], + archive_item_pk=items[0].pk, values={"generic-medical-image": None}, ) assert "You need to provide a value for generic-medical-image" in str(e) @@ -661,47 +647,45 @@ def test_add_cases_to_reader_study(display_sets, local_grand_challenge): ) all_display_sets = list( c.reader_studies.display_sets.iterate_all( - params={"reader_study": reader_study["pk"]} + params={"reader_study": reader_study.pk} ) ) assert all( - [x in [y["pk"] for y in all_display_sets] for x in added_display_sets] + [x in [y.pk for y in all_display_sets] for x in added_display_sets] ) @recurse_call def check_image(interface_value, expected_name): - image = get_file(c, interface_value["image"]) + image = get_file(c, interface_value.image) assert image["name"] == expected_name def check_annotation(interface_value, expected): - assert interface_value["value"] == expected + assert interface_value.value == expected @recurse_call def check_file(interface_value, expected_name): - response = get_file(c, interface_value["file"]) + response = get_file(c, interface_value.file) assert response.url.path.endswith(expected_name) for display_set_pk, display_set in zip(added_display_sets, display_sets): ds = c.reader_studies.display_sets.detail(pk=display_set_pk) # may take a while for the images to be added - while len(ds["values"]) != len(display_set): + while len(ds.values) != len(display_set): ds = c.reader_studies.display_sets.detail(pk=display_set_pk) for interface, value in display_set.items(): civ = [ - civ - for civ in ds["values"] - if civ["interface"]["slug"] == interface + civ for civ in ds.values if civ.interface.slug == interface ][0] - if civ["interface"]["super_kind"] == "Image": + if civ.interface.super_kind == "Image": file_name = value[0].name check_image(civ, file_name) - elif civ["interface"]["kind"] == "2D bounding box": + elif civ.interface.kind == "2D bounding box": check_annotation(civ, value) pass - elif civ["interface"]["super_kind"] == "File": + elif civ.interface.super_kind == "File": file_name = value[0].name check_file(civ, file_name) diff --git a/tests/test_models.py b/tests/test_models.py index 788d06d..d38f51e 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -16,6 +16,7 @@ } +@pytest.mark.filterwarnings("ignore::DeprecationWarning") def test_extra_definitions_allowed(): a = Algorithm(**DEFAULT_ALGORITHM_ARGS, extra="extra") @@ -25,6 +26,7 @@ def test_extra_definitions_allowed(): a.extra +@pytest.mark.filterwarnings("ignore::DeprecationWarning") def test_getitem(): a = Algorithm(**DEFAULT_ALGORITHM_ARGS) @@ -32,6 +34,7 @@ def test_getitem(): assert a.pk == "1234" +@pytest.mark.filterwarnings("ignore::DeprecationWarning") def test_setattribute(): a = Algorithm(**DEFAULT_ALGORITHM_ARGS) @@ -41,6 +44,7 @@ def test_setattribute(): assert a.pk == "5678" +@pytest.mark.filterwarnings("ignore::DeprecationWarning") def test_setitem(): a = Algorithm(**DEFAULT_ALGORITHM_ARGS) @@ -50,6 +54,7 @@ def test_setitem(): assert a.pk == "5678" +@pytest.mark.filterwarnings("ignore::DeprecationWarning") def test_delattr(): a = Algorithm(**DEFAULT_ALGORITHM_ARGS) @@ -62,6 +67,7 @@ def test_delattr(): assert a.pk == "5678" +@pytest.mark.filterwarnings("ignore::DeprecationWarning") def test_delitem(): a = Algorithm(**DEFAULT_ALGORITHM_ARGS) @@ -72,3 +78,36 @@ def test_delitem(): with pytest.raises(AttributeError): assert a.pk == "5678" + + +def test_deprecation_warning_for_getitem(): + a = Algorithm(**DEFAULT_ALGORITHM_ARGS) + + with pytest.warns(DeprecationWarning) as checker: + _ = a["pk"] + + assert 'Using ["pk"] for getting attributes is deprecated' in str( + checker.list[0].message + ) + + +def test_deprecation_warning_for_setitem(): + a = Algorithm(**DEFAULT_ALGORITHM_ARGS) + + with pytest.warns(DeprecationWarning) as checker: + a["pk"] = "5678" + + assert 'Using ["pk"] for setting attributes is deprecated' in str( + checker.list[0].message + ) + + +def test_deprecation_warning_for_delitem(): + a = Algorithm(**DEFAULT_ALGORITHM_ARGS) + + with pytest.warns(DeprecationWarning) as checker: + del a["pk"] + + assert 'Using ["pk"] for deleting attributes is deprecated' in str( + checker.list[0].message + )