From 679802bdc37ab2bf66122b074c38bfb7c9c63f8b Mon Sep 17 00:00:00 2001 From: Benoit Crickboom Date: Fri, 23 Aug 2024 10:26:59 +0200 Subject: [PATCH] improved to handle --- orthanc_tools/orthanc_cleaner.py | 38 ++++++++++++++++++--------- tests/docker-setup/docker-compose.yml | 1 + tests/test_3_orthancs.py | 30 +++++++++++++++++++++ 3 files changed, 56 insertions(+), 13 deletions(-) diff --git a/orthanc_tools/orthanc_cleaner.py b/orthanc_tools/orthanc_cleaner.py index acd291a..a88d853 100644 --- a/orthanc_tools/orthanc_cleaner.py +++ b/orthanc_tools/orthanc_cleaner.py @@ -65,9 +65,29 @@ def clean(self): for rule in labels_rules: logger.info(f"{rule.label_name} - {rule.retention_duration} weeks") - studies_to_delete = [] + # get the list of studies to delete + studies_to_delete = self.get_studies_to_delete(labels_rules=labels_rules) + + while len(studies_to_delete) > 0: + + # Delete the found studies + for s in studies_to_delete: + try: + self._api_client.studies.delete(s.orthanc_id) + logger.info(f"Deleting study {s.dicom_id} from {s.main_dicom_tags.get('StudyDate')}...") + except Exception as ex: + logger.error(f"ERROR: {str(ex)}") + + # Get one more time the list of studies to delete (because we may have been limited to the value of 'LimitFindResults') + studies_to_delete = self.get_studies_to_delete(labels_rules=labels_rules) + + logger.info("Clean up done!") - # Query Orthanc and delete the studies for each label rule + def get_studies_to_delete(self, labels_rules: LabelRule) -> List[str]: + ''' + Query Orthanc to get the list of studies to delete (depending on the date and the label) + ''' + studies_to_delete = [] for label_rule in labels_rules: # Let's compute the date @@ -79,21 +99,13 @@ def clean(self): labels=[label_rule.label_name] ) - # Filter the old studies which were recently stored in Orthanc + # Filter out the old studies which were recently stored in Orthanc for s in studies_to_delete_by_study_date: if limit_date > s.last_update.date(): studies_to_delete.append(s) - # Delete the found studies - for s in studies_to_delete: - try: - self._api_client.studies.delete(s.orthanc_id) - logger.info(f"Deleting study {s.dicom_id} from {s.main_dicom_tags.get('StudyDate')}...") - except Exception as ex: - logger.error(f"ERROR: {str(ex)}") - - logger.info("Clean up done!") - + logger.info(f"Found {len(studies_to_delete)} studies to delete...") + return studies_to_delete def compute_limit_date(self, number_of_weeks) -> datetime.date: limit_date = datetime.date.today() - datetime.timedelta(weeks=number_of_weeks) diff --git a/tests/docker-setup/docker-compose.yml b/tests/docker-setup/docker-compose.yml index 32868ca..63036e2 100644 --- a/tests/docker-setup/docker-compose.yml +++ b/tests/docker-setup/docker-compose.yml @@ -6,6 +6,7 @@ services: environment: VERBOSE_STARTUP: "true" VERBOSE_ENABLED: "true" + ORTHANC__LIMIT_FIND_RESULTS: 100 TRANSFERS_PLUGIN_ENABLED: "true" ORTHANC__STABLE_AGE: "1" ORTHANC__KEEP_ALIVE_TIMEOUT: "5" diff --git a/tests/test_3_orthancs.py b/tests/test_3_orthancs.py index 225cbc9..b622780 100644 --- a/tests/test_3_orthancs.py +++ b/tests/test_3_orthancs.py @@ -693,6 +693,36 @@ def test_orthanc_cleaner_with_future_studies(self): self.assertEqual(len(self.oa.studies.get_all_ids()), 1) self.assertNotEqual(old_study_id, self.oa.studies.get_all_ids()[0]) + def test_orthanc_cleaner_with_more_than_100_studies(self): + self.oa.delete_all_content() + + # populate Orthanc with 120 old studies... + populator = OrthancTestDbPopulator( + api_client=self.oa, + studies_count=120, + series_count=1, + instances_count=1, + from_study_date=datetime.date.today() + datetime.timedelta(weeks=3), + to_study_date=datetime.date.today() + datetime.timedelta(weeks=4) + ) + populator.execute() + + # apply a label to all the studies. + studies_ids = self.oa.studies.get_all_ids() + for id in studies_ids: + self.oa.studies.add_label(id, "LABEL3") + + # then, remove the label for a single study + self.oa.studies.delete_label(studies_ids[0], "LABEL3") + + # execute cleaner + cleaner = OrthancCleaner(api_client=self.oa, execution_time=None, labels_file_path=here / "stimuli/labels.csv") + cleaner.execute() + + # only one single study should be kept + self.assertEqual(len(self.oa.studies.get_all_ids()), 1) + + if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') unittest.main()