From 66d29efb58cf2bc02d0e67624f5416b9ab02b650 Mon Sep 17 00:00:00 2001 From: John Wilkie Date: Wed, 8 Jan 2025 11:19:40 +0000 Subject: [PATCH 1/4] Removed NifTI export pixdim scaling --- darwin/exporter/formats/nifti.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/darwin/exporter/formats/nifti.py b/darwin/exporter/formats/nifti.py index ce32ee78c..baea46801 100644 --- a/darwin/exporter/formats/nifti.py +++ b/darwin/exporter/formats/nifti.py @@ -549,7 +549,7 @@ def shift_polygon_coords( else: return [{"x": p["y"], "y": p["x"]} for p in polygon] else: - return [{"x": p["y"] // pixdim[1], "y": p["x"] // pixdim[0]} for p in polygon] + return [{"x": p["y"], "y": p["x"]} for p in polygon] def get_view_idx(frame_idx: int, groups: List) -> int: From 10d1b3c9624e61704ea1b660d9db86bfa6811c7e Mon Sep 17 00:00:00 2001 From: John Wilkie Date: Fri, 7 Feb 2025 15:01:32 +0000 Subject: [PATCH 2/4] Replace Iterator with Iterable for better flexibility --- darwin/dataset/remote_dataset.py | 24 ++++++++++++------------ darwin/dataset/remote_dataset_v2.py | 25 +++++++++++++------------ 2 files changed, 25 insertions(+), 24 deletions(-) diff --git a/darwin/dataset/remote_dataset.py b/darwin/dataset/remote_dataset.py index e0ded8fc0..8acbf8068 100644 --- a/darwin/dataset/remote_dataset.py +++ b/darwin/dataset/remote_dataset.py @@ -467,68 +467,68 @@ def fetch_remote_files( """ @abstractmethod - def archive(self, items: Iterator[DatasetItem]) -> None: + def archive(self, items: Iterable[DatasetItem]) -> None: """ Archives (soft-deletion) the given ``DatasetItem``\\s belonging to this ``RemoteDataset``. Parameters ---------- - items : Iterator[DatasetItem] + items : Iterable[DatasetItem] The ``DatasetItem``\\s to be archived. """ @abstractmethod - def restore_archived(self, items: Iterator[DatasetItem]) -> None: + def restore_archived(self, items: Iterable[DatasetItem]) -> None: """ Restores the archived ``DatasetItem``\\s that belong to this ``RemoteDataset``. Parameters ---------- - items : Iterator[DatasetItem] + items : Iterable[DatasetItem] The ``DatasetItem``\\s to be restored. """ @abstractmethod - def move_to_new(self, items: Iterator[DatasetItem]) -> None: + def move_to_new(self, items: Iterable[DatasetItem]) -> None: """ Changes the given ``DatasetItem``\\s status to ``new``. Parameters ---------- - items : Iterator[DatasetItem] + items : Iterable[DatasetItem] The ``DatasetItem``\\s whose status will change. """ @abstractmethod - def reset(self, items: Iterator[DatasetItem]) -> None: + def reset(self, items: Iterable[DatasetItem]) -> None: """ Resets the given ``DatasetItem``\\s. Parameters ---------- - items : Iterator[DatasetItem] + items : Iterable[DatasetItem] The ``DatasetItem``\\s to be reset. """ @abstractmethod - def complete(self, items: Iterator[DatasetItem]) -> None: + def complete(self, items: Iterable[DatasetItem]) -> None: """ Completes the given ``DatasetItem``\\s. Parameters ---------- - items : Iterator[DatasetItem] + items : Iterable[DatasetItem] The ``DatasetItem``\\s to be completed. """ @abstractmethod - def delete_items(self, items: Iterator[DatasetItem]) -> None: + def delete_items(self, items: Iterable[DatasetItem]) -> None: """ Deletes the given ``DatasetItem``\\s. Parameters ---------- - items : Iterator[DatasetItem] + items : Iterable[DatasetItem] The ``DatasetItem``\\s to be deleted. """ diff --git a/darwin/dataset/remote_dataset_v2.py b/darwin/dataset/remote_dataset_v2.py index 8956a8a3a..7819a671c 100644 --- a/darwin/dataset/remote_dataset_v2.py +++ b/darwin/dataset/remote_dataset_v2.py @@ -10,6 +10,7 @@ Sequence, Tuple, Union, + Iterable, ) import numpy as np from pydantic import ValidationError @@ -362,13 +363,13 @@ def fetch_remote_files( else: return - def archive(self, items: Iterator[DatasetItem]) -> None: + def archive(self, items: Iterable[DatasetItem]) -> None: """ Archives (soft-deletion) the given ``DatasetItem``\\s belonging to this ``RemoteDataset``. Parameters ---------- - items : Iterator[DatasetItem] + items : Iterable[DatasetItem] The ``DatasetItem``\\s to be archived. """ payload: Dict[str, Any] = { @@ -379,13 +380,13 @@ def archive(self, items: Iterator[DatasetItem]) -> None: } self.client.api_v2.archive_items(payload, team_slug=self.team) - def restore_archived(self, items: Iterator[DatasetItem]) -> None: + def restore_archived(self, items: Iterable[DatasetItem]) -> None: """ Restores the archived ``DatasetItem``\\s that belong to this ``RemoteDataset``. Parameters ---------- - items : Iterator[DatasetItem] + items : Iterable[DatasetItem] The ``DatasetItem``\\s to be restored. """ payload: Dict[str, Any] = { @@ -396,13 +397,13 @@ def restore_archived(self, items: Iterator[DatasetItem]) -> None: } self.client.api_v2.restore_archived_items(payload, team_slug=self.team) - def move_to_new(self, items: Iterator[DatasetItem]) -> None: + def move_to_new(self, items: Iterable[DatasetItem]) -> None: """ Changes the given ``DatasetItem``\\s status to ``new``. Parameters ---------- - items : Iterator[DatasetItem] + items : Iterable[DatasetItem] The ``DatasetItem``\\s whose status will change. """ @@ -417,25 +418,25 @@ def move_to_new(self, items: Iterator[DatasetItem]) -> None: team_slug=self.team, ) - def reset(self, items: Iterator[DatasetItem]) -> None: + def reset(self, items: Iterable[DatasetItem]) -> None: """ Deprecated Resets the given ``DatasetItem``\\s. Parameters ---------- - items : Iterator[DatasetItem] + items : Iterable[DatasetItem] The ``DatasetItem``\\s to be resetted. """ raise ValueError("Reset is deprecated for version 2 datasets") - def complete(self, items: Iterator[DatasetItem]) -> None: + def complete(self, items: Iterable[DatasetItem]) -> None: """ Completes the given ``DatasetItem``\\s. Parameters ---------- - items : Iterator[DatasetItem] + items : Iterable[DatasetItem] The ``DatasetItem``\\s to be completed. """ (workflow_id, stages) = self._fetch_stages("complete") @@ -449,13 +450,13 @@ def complete(self, items: Iterator[DatasetItem]) -> None: team_slug=self.team, ) - def delete_items(self, items: Iterator[DatasetItem]) -> None: + def delete_items(self, items: Iterable[DatasetItem]) -> None: """ Deletes the given ``DatasetItem``\\s. Parameters ---------- - items : Iterator[DatasetItem] + items : Iterable[DatasetItem] The ``DatasetItem``\\s to be deleted. """ self.client.api_v2.delete_items( From ef8061581cdeb16be96c17606272fa25994a6ad2 Mon Sep 17 00:00:00 2001 From: John Wilkie Date: Fri, 7 Feb 2025 15:08:46 +0000 Subject: [PATCH 3/4] Removed deprecated dataset.reset method --- darwin/cli_functions.py | 4 +--- darwin/dataset/remote_dataset.py | 11 ----------- darwin/dataset/remote_dataset_v2.py | 12 ------------ tests/darwin/cli_functions_test.py | 19 ------------------- 4 files changed, 1 insertion(+), 45 deletions(-) diff --git a/darwin/cli_functions.py b/darwin/cli_functions.py index c1954c60c..74d77e550 100644 --- a/darwin/cli_functions.py +++ b/darwin/cli_functions.py @@ -1059,7 +1059,7 @@ def set_file_status(dataset_slug: str, status: str, files: List[str]) -> None: files: List[str] Names of the files we want to update. """ - available_statuses = ["archived", "clear", "new", "restore-archived", "complete"] + available_statuses = ["archived", "new", "restore-archived", "complete"] if status not in available_statuses: _error( f"Invalid status '{status}', available statuses: {', '.join(available_statuses)}" @@ -1075,8 +1075,6 @@ def set_file_status(dataset_slug: str, status: str, files: List[str]) -> None: ) if status == "archived": dataset.archive(items) - elif status == "clear": - dataset.reset(items) elif status == "new": dataset.move_to_new(items) elif status == "restore-archived": diff --git a/darwin/dataset/remote_dataset.py b/darwin/dataset/remote_dataset.py index 8acbf8068..9f9752fbc 100644 --- a/darwin/dataset/remote_dataset.py +++ b/darwin/dataset/remote_dataset.py @@ -499,17 +499,6 @@ def move_to_new(self, items: Iterable[DatasetItem]) -> None: The ``DatasetItem``\\s whose status will change. """ - @abstractmethod - def reset(self, items: Iterable[DatasetItem]) -> None: - """ - Resets the given ``DatasetItem``\\s. - - Parameters - ---------- - items : Iterable[DatasetItem] - The ``DatasetItem``\\s to be reset. - """ - @abstractmethod def complete(self, items: Iterable[DatasetItem]) -> None: """ diff --git a/darwin/dataset/remote_dataset_v2.py b/darwin/dataset/remote_dataset_v2.py index 7819a671c..db8d55932 100644 --- a/darwin/dataset/remote_dataset_v2.py +++ b/darwin/dataset/remote_dataset_v2.py @@ -418,18 +418,6 @@ def move_to_new(self, items: Iterable[DatasetItem]) -> None: team_slug=self.team, ) - def reset(self, items: Iterable[DatasetItem]) -> None: - """ - Deprecated - Resets the given ``DatasetItem``\\s. - - Parameters - ---------- - items : Iterable[DatasetItem] - The ``DatasetItem``\\s to be resetted. - """ - raise ValueError("Reset is deprecated for version 2 datasets") - def complete(self, items: Iterable[DatasetItem]) -> None: """ Completes the given ``DatasetItem``\\s. diff --git a/tests/darwin/cli_functions_test.py b/tests/darwin/cli_functions_test.py index 182aa5904..6f783b78e 100644 --- a/tests/darwin/cli_functions_test.py +++ b/tests/darwin/cli_functions_test.py @@ -289,25 +289,6 @@ def test_calls_dataset_archive( ) mock.assert_called_once_with(fetch_remote_files_mock.return_value) - def test_calls_dataset_clear( - self, dataset_identifier: str, remote_dataset: RemoteDataset - ): - with patch.object( - Client, "get_remote_dataset", return_value=remote_dataset - ) as get_remote_dataset_mock: - with patch.object( - RemoteDatasetV2, "fetch_remote_files" - ) as fetch_remote_files_mock: - with patch.object(RemoteDatasetV2, "reset") as mock: - set_file_status(dataset_identifier, "clear", ["one.jpg", "two.jpg"]) - get_remote_dataset_mock.assert_called_once_with( - dataset_identifier=dataset_identifier - ) - fetch_remote_files_mock.assert_called_once_with( - {"item_names": "one.jpg,two.jpg"} - ) - mock.assert_called_once_with(fetch_remote_files_mock.return_value) - def test_calls_dataset_new( self, dataset_identifier: str, remote_dataset: RemoteDataset ): From a3c254ec0feaaab8b508f2b940eeb4dc98bb85b9 Mon Sep 17 00:00:00 2001 From: John Wilkie Date: Sat, 8 Feb 2025 22:15:08 +0000 Subject: [PATCH 4/4] Unit tests --- darwin/exporter/formats/nifti.py | 3 +- .../exporter/formats/export_nifti_test.py | 55 +++++++++++++++++++ 2 files changed, 56 insertions(+), 2 deletions(-) diff --git a/darwin/exporter/formats/nifti.py b/darwin/exporter/formats/nifti.py index baea46801..ef878b674 100644 --- a/darwin/exporter/formats/nifti.py +++ b/darwin/exporter/formats/nifti.py @@ -3,7 +3,6 @@ import re from dataclasses import dataclass from enum import Enum -from numbers import Number from pathlib import Path from typing import Dict, Iterable, List, Optional, Tuple, Union @@ -538,7 +537,7 @@ def _get_reoriented_nifti_image( def shift_polygon_coords( - polygon: List[Dict], pixdim: List[Number], legacy: bool = False + polygon: List[Dict], pixdim: List[float], legacy: bool = False ) -> List: if legacy: # Need to make it clear that we flip x/y because we need to take the transpose later. diff --git a/tests/darwin/exporter/formats/export_nifti_test.py b/tests/darwin/exporter/formats/export_nifti_test.py index 0bfd6d167..629f00ef2 100644 --- a/tests/darwin/exporter/formats/export_nifti_test.py +++ b/tests/darwin/exporter/formats/export_nifti_test.py @@ -169,3 +169,58 @@ def test_export_creates_file_for_polygons_and_masks( # Empty the directory for the next test for output_file in video_annotation_files[video_annotation_file]: (Path(tmpdir) / output_file).unlink() + + +def test_shift_polygon_coords_legacy(): + """Test the `shift_polygon_coords` function in legacy mode with different pixdim ratios.""" + # Case 1: pixdim[1] > pixdim[0] + polygon = [{"x": 10, "y": 20}, {"x": 30, "y": 40}, {"x": 50, "y": 60}] + pixdim = [1.0, 2.0, 1.0] + result = nifti.shift_polygon_coords(polygon, pixdim, legacy=True) + expected = [ + {"x": 20, "y": 20}, + {"x": 40, "y": 60}, + {"x": 60, "y": 100}, + ] + assert result == expected + + # Case 2: pixdim[1] < pixdim[0] + polygon = [{"x": 10, "y": 20}, {"x": 30, "y": 40}, {"x": 50, "y": 60}] + pixdim = [2.0, 1.0, 1.0] + result = nifti.shift_polygon_coords(polygon, pixdim, legacy=True) + expected = [ + {"x": 40, "y": 10}, + {"x": 80, "y": 30}, + {"x": 120, "y": 50}, + ] + assert result == expected + + # Case 3: pixdim[1] == pixdim[0] + polygon = [{"x": 10, "y": 20}, {"x": 30, "y": 40}, {"x": 50, "y": 60}] + pixdim = [1.0, 1.0, 1.0] + result = nifti.shift_polygon_coords(polygon, pixdim, legacy=True) + expected = [ + {"x": 20, "y": 10}, + {"x": 40, "y": 30}, + {"x": 60, "y": 50}, + ] + assert result == expected + + +def test_shift_polygon_coords_no_legacy(): + """Test the `shift_polygon_coords` function in non-legacy mode.""" + polygon = [{"x": 10, "y": 20}, {"x": 30, "y": 40}, {"x": 50, "y": 60}] + pixdim = [2.0, 1.0, 1.0] + result = nifti.shift_polygon_coords(polygon, pixdim, legacy=False) + expected = [{"x": 20, "y": 10}, {"x": 40, "y": 30}, {"x": 60, "y": 50}] + assert result == expected + + +def test_shift_polygon_coords_empty_polygon(): + """Test the `shift_polygon_coords` function with an empty polygon.""" + empty_polygon = [] + pixdim = [1.0, 1.0, 1.0] + result = nifti.shift_polygon_coords(empty_polygon, pixdim, legacy=True) + assert result == [] + result = nifti.shift_polygon_coords(empty_polygon, pixdim, legacy=False) + assert result == []