Skip to content

Commit

Permalink
fix(archive): raise Exception when (un)archiving wrong outputs (#2199)
Browse files Browse the repository at this point in the history
Fix [ANT-2369]
  • Loading branch information
MartinBelthle authored Oct 24, 2024
1 parent 2b5795b commit d44a242
Show file tree
Hide file tree
Showing 6 changed files with 120 additions and 29 deletions.
20 changes: 20 additions & 0 deletions antarest/core/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,6 +414,26 @@ def __str__(self) -> str:
return self.detail


class OutputAlreadyArchived(HTTPException):
"""
Exception raised when a user wants to archive an output which is already archived.
"""

def __init__(self, output_id: str) -> None:
message = f"Output '{output_id}' is already archived"
super().__init__(HTTPStatus.EXPECTATION_FAILED, message)


class OutputAlreadyUnarchived(HTTPException):
"""
Exception raised when a user wants to unarchive an output which is already unarchived.
"""

def __init__(self, output_id: str) -> None:
message = f"Output '{output_id}' is already unarchived"
super().__init__(HTTPStatus.EXPECTATION_FAILED, message)


class OutputSubFolderNotFound(HTTPException):
"""
Exception raised when an output sub folders do not exist
Expand Down
23 changes: 22 additions & 1 deletion antarest/study/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,9 @@
FileDeletionNotAllowed,
IncorrectPathError,
NotAManagedStudyException,
OutputAlreadyArchived,
OutputAlreadyUnarchived,
OutputNotFound,
ReferencedObjectDeletionNotAllowed,
StudyDeletionNotAllowed,
StudyNotFoundError,
Expand Down Expand Up @@ -136,7 +139,13 @@
from antarest.study.storage.storage_service import StudyStorageService
from antarest.study.storage.study_download_utils import StudyDownloader, get_output_variables_information
from antarest.study.storage.study_upgrader import StudyUpgrader, check_versions_coherence, find_next_version
from antarest.study.storage.utils import assert_permission, get_start_date, is_managed, remove_from_cache
from antarest.study.storage.utils import (
assert_permission,
get_start_date,
is_managed,
is_output_archived,
remove_from_cache,
)
from antarest.study.storage.variantstudy.business.utils import transform_command_to_dto
from antarest.study.storage.variantstudy.model.command.generate_thermal_cluster_timeseries import (
GenerateThermalClusterTimeSeries,
Expand Down Expand Up @@ -2328,6 +2337,12 @@ def archive_output(
assert_permission(params.user, study, StudyPermissionType.WRITE)
self._assert_study_unarchived(study)

output_path = Path(study.path) / "output" / output_id
if is_output_archived(output_path):
raise OutputAlreadyArchived(output_id)
if not output_path.exists():
raise OutputNotFound(output_id)

archive_task_names = StudyService._get_output_archive_task_names(study, output_id)
task_name = archive_task_names[0]

Expand Down Expand Up @@ -2386,6 +2401,12 @@ def unarchive_output(
assert_permission(params.user, study, StudyPermissionType.READ)
self._assert_study_unarchived(study)

output_path = Path(study.path) / "output" / output_id
if not is_output_archived(output_path):
if not output_path.exists():
raise OutputNotFound(output_id)
raise OutputAlreadyUnarchived(output_id)

archive_task_names = StudyService._get_output_archive_task_names(study, output_id)
task_name = archive_task_names[1]

Expand Down
10 changes: 0 additions & 10 deletions antarest/study/storage/abstract_storage_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,11 +331,6 @@ def _read_additional_data_from_files(self, file_study: FileStudy) -> StudyAdditi
return study_additional_data

def archive_study_output(self, study: T, output_id: str) -> bool:
if not (Path(study.path) / "output" / output_id).exists():
logger.warning(
f"Failed to archive study {study.name} output {output_id}. Maybe it's already archived",
)
return False
try:
zip_dir(
Path(study.path) / "output" / output_id,
Expand All @@ -352,11 +347,6 @@ def archive_study_output(self, study: T, output_id: str) -> bool:
return False

def unarchive_study_output(self, study: T, output_id: str, keep_src_zip: bool) -> bool:
if not (Path(study.path) / "output" / f"{output_id}.zip").exists():
logger.warning(
f"Failed to archive study {study.name} output {output_id}. Maybe it's already unarchived",
)
return False
try:
unzip(
Path(study.path) / "output" / output_id,
Expand Down
12 changes: 9 additions & 3 deletions antarest/study/storage/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,10 +116,16 @@ def find_single_output_path(all_output_path: Path) -> Path:
return all_output_path


def is_output_archived(path_output: Path) -> bool:
# Returns True it the given path is archived or if adding a suffix to the path points to an existing path
suffixes = [".zip"]
return path_output.suffix in suffixes or any(path_output.with_suffix(suffix).exists() for suffix in suffixes)


def extract_output_name(path_output: Path, new_suffix_name: t.Optional[str] = None) -> str:
ini_reader = IniReader()
is_output_archived = path_output.suffix == ".zip"
if is_output_archived:
archived = is_output_archived(path_output)
if archived:
temp_dir = tempfile.TemporaryDirectory()
s = StopWatch()
with ZipFile(path_output, "r") as zip_obj:
Expand All @@ -140,7 +146,7 @@ def extract_output_name(path_output: Path, new_suffix_name: t.Optional[str] = No
if new_suffix_name:
suffix_name = new_suffix_name
general_info["name"] = suffix_name
if not is_output_archived:
if not archived:
ini_writer = IniWriter()
ini_writer.write(info_antares_output, path_output / "info.antares-output")
else:
Expand Down
52 changes: 51 additions & 1 deletion tests/integration/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -1486,9 +1486,59 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None:
]


def test_archive(client: TestClient, admin_access_token: str, tmp_path: Path) -> None:
def test_archive(client: TestClient, admin_access_token: str, tmp_path: Path, internal_study_id: str) -> None:
client.headers = {"Authorization": f"Bearer {admin_access_token}"}

# =============================
# OUTPUT PART
# =============================

res = client.get(f"/v1/studies/{internal_study_id}/outputs")
outputs = res.json()
fake_output = "fake_output"
unarchived_outputs = [output["name"] for output in outputs if not output["archived"]]
usual_output = unarchived_outputs[0]

# Archive
res = client.post(f"/v1/studies/{internal_study_id}/outputs/{fake_output}/_archive")
assert res.json()["exception"] == "OutputNotFound"
assert res.json()["description"] == f"Output '{fake_output}' not found"
assert res.status_code == 404

res = client.post(f"/v1/studies/{internal_study_id}/outputs/{usual_output}/_archive")
assert res.status_code == 200
task_id = res.json()
wait_for(
lambda: client.get(
f"/v1/tasks/{task_id}",
).json()["status"]
== 3
)

res = client.post(f"/v1/studies/{internal_study_id}/outputs/{usual_output}/_archive")
assert res.json()["exception"] == "OutputAlreadyArchived"
assert res.json()["description"] == f"Output '{usual_output}' is already archived"
assert res.status_code == 417

# Unarchive
res = client.post(f"/v1/studies/{internal_study_id}/outputs/{fake_output}/_unarchive")
assert res.json()["exception"] == "OutputNotFound"
assert res.json()["description"] == f"Output '{fake_output}' not found"
assert res.status_code == 404

unarchived_output = unarchived_outputs[1]
res = client.post(f"/v1/studies/{internal_study_id}/outputs/{unarchived_output}/_unarchive")
assert res.json()["exception"] == "OutputAlreadyUnarchived"
assert res.json()["description"] == f"Output '{unarchived_output}' is already unarchived"
assert res.status_code == 417

res = client.post(f"/v1/studies/{internal_study_id}/outputs/{usual_output}/_unarchive")
assert res.status_code == 200

# =============================
# STUDY PART
# =============================

study_res = client.post("/v1/studies?name=foo")
study_id = study_res.json()

Expand Down
32 changes: 18 additions & 14 deletions tests/storage/test_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -1377,6 +1377,7 @@ def test_unarchive_output(tmp_path: Path) -> None:
output_id = "some-output"
service.task_service.add_worker_task.return_value = None # type: ignore
service.task_service.list_tasks.return_value = [] # type: ignore
(tmp_path / "output" / f"{output_id}.zip").mkdir(parents=True, exist_ok=True)
service.unarchive_output(
study_id,
output_id,
Expand Down Expand Up @@ -1433,13 +1434,16 @@ def test_archive_output_locks(tmp_path: Path) -> None:

service.task_service.reset_mock()

output_id = "some-output"
output_zipped = "some-output_zipped"
output_unzipped = "some-output_unzipped"
service.task_service.add_worker_task.return_value = None # type: ignore
(tmp_path / "output" / output_unzipped).mkdir(parents=True)
(tmp_path / "output" / f"{output_zipped}.zip").touch()
service.task_service.list_tasks.side_effect = [
[
TaskDTO(
id="1",
name=f"Archive output {study_id}/{output_id}",
name=f"Archive output {study_id}/{output_zipped}",
status=TaskStatus.PENDING,
creation_date_utc=str(datetime.utcnow()),
type=TaskType.ARCHIVE,
Expand All @@ -1449,7 +1453,7 @@ def test_archive_output_locks(tmp_path: Path) -> None:
[
TaskDTO(
id="1",
name=f"Unarchive output {study_name}/{output_id} ({study_id})",
name=f"Unarchive output {study_name}/{output_zipped} ({study_id})",
status=TaskStatus.PENDING,
creation_date_utc=str(datetime.utcnow()),
type=TaskType.UNARCHIVE,
Expand All @@ -1459,7 +1463,7 @@ def test_archive_output_locks(tmp_path: Path) -> None:
[
TaskDTO(
id="1",
name=f"Archive output {study_id}/{output_id}",
name=f"Archive output {study_id}/{output_unzipped}",
status=TaskStatus.PENDING,
creation_date_utc=str(datetime.utcnow()),
type=TaskType.ARCHIVE,
Expand All @@ -1469,7 +1473,7 @@ def test_archive_output_locks(tmp_path: Path) -> None:
[
TaskDTO(
id="1",
name=f"Unarchive output {study_name}/{output_id} ({study_id})",
name=f"Unarchive output {study_name}/{output_unzipped} ({study_id})",
status=TaskStatus.RUNNING,
creation_date_utc=str(datetime.utcnow()),
type=TaskType.UNARCHIVE,
Expand All @@ -1482,36 +1486,36 @@ def test_archive_output_locks(tmp_path: Path) -> None:
with pytest.raises(TaskAlreadyRunning):
service.unarchive_output(
study_id,
output_id,
output_zipped,
keep_src_zip=True,
params=RequestParameters(user=DEFAULT_ADMIN_USER),
)

with pytest.raises(TaskAlreadyRunning):
service.unarchive_output(
study_id,
output_id,
output_zipped,
keep_src_zip=True,
params=RequestParameters(user=DEFAULT_ADMIN_USER),
)

with pytest.raises(TaskAlreadyRunning):
service.archive_output(
study_id,
output_id,
output_unzipped,
params=RequestParameters(user=DEFAULT_ADMIN_USER),
)

with pytest.raises(TaskAlreadyRunning):
service.archive_output(
study_id,
output_id,
output_unzipped,
params=RequestParameters(user=DEFAULT_ADMIN_USER),
)

service.unarchive_output(
study_id,
output_id,
output_zipped,
keep_src_zip=True,
params=RequestParameters(user=DEFAULT_ADMIN_USER),
)
Expand All @@ -1520,17 +1524,17 @@ def test_archive_output_locks(tmp_path: Path) -> None:
TaskType.UNARCHIVE,
"unarchive_other_workspace",
ArchiveTaskArgs(
src=str(tmp_path / "output" / f"{output_id}.zip"),
dest=str(tmp_path / "output" / output_id),
src=str(tmp_path / "output" / f"{output_zipped}.zip"),
dest=str(tmp_path / "output" / output_zipped),
remove_src=False,
).model_dump(),
name=f"Unarchive output {study_name}/{output_id} ({study_id})",
name=f"Unarchive output {study_name}/{output_zipped} ({study_id})",
ref_id=study_id,
request_params=RequestParameters(user=DEFAULT_ADMIN_USER),
)
service.task_service.add_task.assert_called_once_with(
ANY,
f"Unarchive output {study_name}/{output_id} ({study_id})",
f"Unarchive output {study_name}/{output_zipped} ({study_id})",
task_type=TaskType.UNARCHIVE,
ref_id=study_id,
progress=None,
Expand Down

0 comments on commit d44a242

Please sign in to comment.