From d44a24230456ab5291711e5dddfb9cd1dab2d86f Mon Sep 17 00:00:00 2001 From: MartinBelthle Date: Thu, 24 Oct 2024 11:25:25 +0200 Subject: [PATCH] fix(archive): raise Exception when (un)archiving wrong outputs (#2199) Fix [ANT-2369] --- antarest/core/exceptions.py | 20 +++++++ antarest/study/service.py | 23 +++++++- .../study/storage/abstract_storage_service.py | 10 ---- antarest/study/storage/utils.py | 12 +++-- tests/integration/test_integration.py | 52 ++++++++++++++++++- tests/storage/test_service.py | 32 +++++++----- 6 files changed, 120 insertions(+), 29 deletions(-) diff --git a/antarest/core/exceptions.py b/antarest/core/exceptions.py index af5639b5ff..c02cbf966b 100644 --- a/antarest/core/exceptions.py +++ b/antarest/core/exceptions.py @@ -414,6 +414,26 @@ def __str__(self) -> str: return self.detail +class OutputAlreadyArchived(HTTPException): + """ + Exception raised when a user wants to archive an output which is already archived. + """ + + def __init__(self, output_id: str) -> None: + message = f"Output '{output_id}' is already archived" + super().__init__(HTTPStatus.EXPECTATION_FAILED, message) + + +class OutputAlreadyUnarchived(HTTPException): + """ + Exception raised when a user wants to unarchive an output which is already unarchived. + """ + + def __init__(self, output_id: str) -> None: + message = f"Output '{output_id}' is already unarchived" + super().__init__(HTTPStatus.EXPECTATION_FAILED, message) + + class OutputSubFolderNotFound(HTTPException): """ Exception raised when an output sub folders do not exist diff --git a/antarest/study/service.py b/antarest/study/service.py index 436f7af64d..9e3ad837ed 100644 --- a/antarest/study/service.py +++ b/antarest/study/service.py @@ -37,6 +37,9 @@ FileDeletionNotAllowed, IncorrectPathError, NotAManagedStudyException, + OutputAlreadyArchived, + OutputAlreadyUnarchived, + OutputNotFound, ReferencedObjectDeletionNotAllowed, StudyDeletionNotAllowed, StudyNotFoundError, @@ -136,7 +139,13 @@ from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.study_download_utils import StudyDownloader, get_output_variables_information from antarest.study.storage.study_upgrader import StudyUpgrader, check_versions_coherence, find_next_version -from antarest.study.storage.utils import assert_permission, get_start_date, is_managed, remove_from_cache +from antarest.study.storage.utils import ( + assert_permission, + get_start_date, + is_managed, + is_output_archived, + remove_from_cache, +) from antarest.study.storage.variantstudy.business.utils import transform_command_to_dto from antarest.study.storage.variantstudy.model.command.generate_thermal_cluster_timeseries import ( GenerateThermalClusterTimeSeries, @@ -2328,6 +2337,12 @@ def archive_output( assert_permission(params.user, study, StudyPermissionType.WRITE) self._assert_study_unarchived(study) + output_path = Path(study.path) / "output" / output_id + if is_output_archived(output_path): + raise OutputAlreadyArchived(output_id) + if not output_path.exists(): + raise OutputNotFound(output_id) + archive_task_names = StudyService._get_output_archive_task_names(study, output_id) task_name = archive_task_names[0] @@ -2386,6 +2401,12 @@ def unarchive_output( assert_permission(params.user, study, StudyPermissionType.READ) self._assert_study_unarchived(study) + output_path = Path(study.path) / "output" / output_id + if not is_output_archived(output_path): + if not output_path.exists(): + raise OutputNotFound(output_id) + raise OutputAlreadyUnarchived(output_id) + archive_task_names = StudyService._get_output_archive_task_names(study, output_id) task_name = archive_task_names[1] diff --git a/antarest/study/storage/abstract_storage_service.py b/antarest/study/storage/abstract_storage_service.py index d8d8993e2e..3de3c3a3aa 100644 --- a/antarest/study/storage/abstract_storage_service.py +++ b/antarest/study/storage/abstract_storage_service.py @@ -331,11 +331,6 @@ def _read_additional_data_from_files(self, file_study: FileStudy) -> StudyAdditi return study_additional_data def archive_study_output(self, study: T, output_id: str) -> bool: - if not (Path(study.path) / "output" / output_id).exists(): - logger.warning( - f"Failed to archive study {study.name} output {output_id}. Maybe it's already archived", - ) - return False try: zip_dir( Path(study.path) / "output" / output_id, @@ -352,11 +347,6 @@ def archive_study_output(self, study: T, output_id: str) -> bool: return False def unarchive_study_output(self, study: T, output_id: str, keep_src_zip: bool) -> bool: - if not (Path(study.path) / "output" / f"{output_id}.zip").exists(): - logger.warning( - f"Failed to archive study {study.name} output {output_id}. Maybe it's already unarchived", - ) - return False try: unzip( Path(study.path) / "output" / output_id, diff --git a/antarest/study/storage/utils.py b/antarest/study/storage/utils.py index 82b4c7f4bd..d1ab8828a0 100644 --- a/antarest/study/storage/utils.py +++ b/antarest/study/storage/utils.py @@ -116,10 +116,16 @@ def find_single_output_path(all_output_path: Path) -> Path: return all_output_path +def is_output_archived(path_output: Path) -> bool: + # Returns True it the given path is archived or if adding a suffix to the path points to an existing path + suffixes = [".zip"] + return path_output.suffix in suffixes or any(path_output.with_suffix(suffix).exists() for suffix in suffixes) + + def extract_output_name(path_output: Path, new_suffix_name: t.Optional[str] = None) -> str: ini_reader = IniReader() - is_output_archived = path_output.suffix == ".zip" - if is_output_archived: + archived = is_output_archived(path_output) + if archived: temp_dir = tempfile.TemporaryDirectory() s = StopWatch() with ZipFile(path_output, "r") as zip_obj: @@ -140,7 +146,7 @@ def extract_output_name(path_output: Path, new_suffix_name: t.Optional[str] = No if new_suffix_name: suffix_name = new_suffix_name general_info["name"] = suffix_name - if not is_output_archived: + if not archived: ini_writer = IniWriter() ini_writer.write(info_antares_output, path_output / "info.antares-output") else: diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 42b8cb2407..e3d614ea9d 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -1486,9 +1486,59 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: ] -def test_archive(client: TestClient, admin_access_token: str, tmp_path: Path) -> None: +def test_archive(client: TestClient, admin_access_token: str, tmp_path: Path, internal_study_id: str) -> None: client.headers = {"Authorization": f"Bearer {admin_access_token}"} + # ============================= + # OUTPUT PART + # ============================= + + res = client.get(f"/v1/studies/{internal_study_id}/outputs") + outputs = res.json() + fake_output = "fake_output" + unarchived_outputs = [output["name"] for output in outputs if not output["archived"]] + usual_output = unarchived_outputs[0] + + # Archive + res = client.post(f"/v1/studies/{internal_study_id}/outputs/{fake_output}/_archive") + assert res.json()["exception"] == "OutputNotFound" + assert res.json()["description"] == f"Output '{fake_output}' not found" + assert res.status_code == 404 + + res = client.post(f"/v1/studies/{internal_study_id}/outputs/{usual_output}/_archive") + assert res.status_code == 200 + task_id = res.json() + wait_for( + lambda: client.get( + f"/v1/tasks/{task_id}", + ).json()["status"] + == 3 + ) + + res = client.post(f"/v1/studies/{internal_study_id}/outputs/{usual_output}/_archive") + assert res.json()["exception"] == "OutputAlreadyArchived" + assert res.json()["description"] == f"Output '{usual_output}' is already archived" + assert res.status_code == 417 + + # Unarchive + res = client.post(f"/v1/studies/{internal_study_id}/outputs/{fake_output}/_unarchive") + assert res.json()["exception"] == "OutputNotFound" + assert res.json()["description"] == f"Output '{fake_output}' not found" + assert res.status_code == 404 + + unarchived_output = unarchived_outputs[1] + res = client.post(f"/v1/studies/{internal_study_id}/outputs/{unarchived_output}/_unarchive") + assert res.json()["exception"] == "OutputAlreadyUnarchived" + assert res.json()["description"] == f"Output '{unarchived_output}' is already unarchived" + assert res.status_code == 417 + + res = client.post(f"/v1/studies/{internal_study_id}/outputs/{usual_output}/_unarchive") + assert res.status_code == 200 + + # ============================= + # STUDY PART + # ============================= + study_res = client.post("/v1/studies?name=foo") study_id = study_res.json() diff --git a/tests/storage/test_service.py b/tests/storage/test_service.py index caf14dd2c2..a1d0724c76 100644 --- a/tests/storage/test_service.py +++ b/tests/storage/test_service.py @@ -1377,6 +1377,7 @@ def test_unarchive_output(tmp_path: Path) -> None: output_id = "some-output" service.task_service.add_worker_task.return_value = None # type: ignore service.task_service.list_tasks.return_value = [] # type: ignore + (tmp_path / "output" / f"{output_id}.zip").mkdir(parents=True, exist_ok=True) service.unarchive_output( study_id, output_id, @@ -1433,13 +1434,16 @@ def test_archive_output_locks(tmp_path: Path) -> None: service.task_service.reset_mock() - output_id = "some-output" + output_zipped = "some-output_zipped" + output_unzipped = "some-output_unzipped" service.task_service.add_worker_task.return_value = None # type: ignore + (tmp_path / "output" / output_unzipped).mkdir(parents=True) + (tmp_path / "output" / f"{output_zipped}.zip").touch() service.task_service.list_tasks.side_effect = [ [ TaskDTO( id="1", - name=f"Archive output {study_id}/{output_id}", + name=f"Archive output {study_id}/{output_zipped}", status=TaskStatus.PENDING, creation_date_utc=str(datetime.utcnow()), type=TaskType.ARCHIVE, @@ -1449,7 +1453,7 @@ def test_archive_output_locks(tmp_path: Path) -> None: [ TaskDTO( id="1", - name=f"Unarchive output {study_name}/{output_id} ({study_id})", + name=f"Unarchive output {study_name}/{output_zipped} ({study_id})", status=TaskStatus.PENDING, creation_date_utc=str(datetime.utcnow()), type=TaskType.UNARCHIVE, @@ -1459,7 +1463,7 @@ def test_archive_output_locks(tmp_path: Path) -> None: [ TaskDTO( id="1", - name=f"Archive output {study_id}/{output_id}", + name=f"Archive output {study_id}/{output_unzipped}", status=TaskStatus.PENDING, creation_date_utc=str(datetime.utcnow()), type=TaskType.ARCHIVE, @@ -1469,7 +1473,7 @@ def test_archive_output_locks(tmp_path: Path) -> None: [ TaskDTO( id="1", - name=f"Unarchive output {study_name}/{output_id} ({study_id})", + name=f"Unarchive output {study_name}/{output_unzipped} ({study_id})", status=TaskStatus.RUNNING, creation_date_utc=str(datetime.utcnow()), type=TaskType.UNARCHIVE, @@ -1482,7 +1486,7 @@ def test_archive_output_locks(tmp_path: Path) -> None: with pytest.raises(TaskAlreadyRunning): service.unarchive_output( study_id, - output_id, + output_zipped, keep_src_zip=True, params=RequestParameters(user=DEFAULT_ADMIN_USER), ) @@ -1490,7 +1494,7 @@ def test_archive_output_locks(tmp_path: Path) -> None: with pytest.raises(TaskAlreadyRunning): service.unarchive_output( study_id, - output_id, + output_zipped, keep_src_zip=True, params=RequestParameters(user=DEFAULT_ADMIN_USER), ) @@ -1498,20 +1502,20 @@ def test_archive_output_locks(tmp_path: Path) -> None: with pytest.raises(TaskAlreadyRunning): service.archive_output( study_id, - output_id, + output_unzipped, params=RequestParameters(user=DEFAULT_ADMIN_USER), ) with pytest.raises(TaskAlreadyRunning): service.archive_output( study_id, - output_id, + output_unzipped, params=RequestParameters(user=DEFAULT_ADMIN_USER), ) service.unarchive_output( study_id, - output_id, + output_zipped, keep_src_zip=True, params=RequestParameters(user=DEFAULT_ADMIN_USER), ) @@ -1520,17 +1524,17 @@ def test_archive_output_locks(tmp_path: Path) -> None: TaskType.UNARCHIVE, "unarchive_other_workspace", ArchiveTaskArgs( - src=str(tmp_path / "output" / f"{output_id}.zip"), - dest=str(tmp_path / "output" / output_id), + src=str(tmp_path / "output" / f"{output_zipped}.zip"), + dest=str(tmp_path / "output" / output_zipped), remove_src=False, ).model_dump(), - name=f"Unarchive output {study_name}/{output_id} ({study_id})", + name=f"Unarchive output {study_name}/{output_zipped} ({study_id})", ref_id=study_id, request_params=RequestParameters(user=DEFAULT_ADMIN_USER), ) service.task_service.add_task.assert_called_once_with( ANY, - f"Unarchive output {study_name}/{output_id} ({study_id})", + f"Unarchive output {study_name}/{output_zipped} ({study_id})", task_type=TaskType.UNARCHIVE, ref_id=study_id, progress=None,