diff --git a/antarest/__init__.py b/antarest/__init__.py index f4fae4cd35..9895b016bd 100644 --- a/antarest/__init__.py +++ b/antarest/__init__.py @@ -7,9 +7,9 @@ # Standard project metadata -__version__ = "2.16.8" +__version__ = "2.17" __author__ = "RTE, Antares Web Team" -__date__ = "2024-04-19" +__date__ = "2024-05-15" # noinspection SpellCheckingInspection __credits__ = "(c) Réseau de Transport de l’Électricité (RTE)" diff --git a/antarest/core/exceptions.py b/antarest/core/exceptions.py index 87804de393..0ca8af4f7d 100644 --- a/antarest/core/exceptions.py +++ b/antarest/core/exceptions.py @@ -376,11 +376,6 @@ def __init__(self, message: str) -> None: super().__init__(HTTPStatus.NOT_FOUND, message) -class ConstraintAlreadyExistError(HTTPException): - def __init__(self, message: str) -> None: - super().__init__(HTTPStatus.NOT_FOUND, message) - - class DuplicateConstraintName(HTTPException): def __init__(self, message: str) -> None: super().__init__(HTTPStatus.CONFLICT, message) @@ -406,14 +401,61 @@ def __init__(self, message: str) -> None: super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message) -class MissingDataError(HTTPException): - def __init__(self, message: str) -> None: +class ConstraintTermNotFound(HTTPException): + """ + Exception raised when a constraint term is not found. + """ + + def __init__(self, binding_constraint_id: str, *ids: str) -> None: + count = len(ids) + id_enum = ", ".join(f"'{term}'" for term in ids) + message = { + 0: f"Constraint terms not found in BC '{binding_constraint_id}'", + 1: f"Constraint term {id_enum} not found in BC '{binding_constraint_id}'", + 2: f"Constraint terms {id_enum} not found in BC '{binding_constraint_id}'", + }[min(count, 2)] super().__init__(HTTPStatus.NOT_FOUND, message) + def __str__(self) -> str: + """Return a string representation of the exception.""" + return self.detail -class ConstraintIdNotFoundError(HTTPException): - def __init__(self, message: str) -> None: - super().__init__(HTTPStatus.NOT_FOUND, message) + +class DuplicateConstraintTerm(HTTPException): + """ + Exception raised when an attempt is made to create a constraint term which already exists. + """ + + def __init__(self, binding_constraint_id: str, *ids: str) -> None: + count = len(ids) + id_enum = ", ".join(f"'{term}'" for term in ids) + message = { + 0: f"Constraint terms already exist in BC '{binding_constraint_id}'", + 1: f"Constraint term {id_enum} already exists in BC '{binding_constraint_id}'", + 2: f"Constraint terms {id_enum} already exist in BC '{binding_constraint_id}'", + }[min(count, 2)] + super().__init__(HTTPStatus.CONFLICT, message) + + def __str__(self) -> str: + """Return a string representation of the exception.""" + return self.detail + + +class InvalidConstraintTerm(HTTPException): + """ + Exception raised when a constraint term is not correctly specified (no term data). + """ + + def __init__(self, binding_constraint_id: str, term_json: str) -> None: + message = ( + f"Invalid constraint term for binding constraint '{binding_constraint_id}': {term_json}," + f" term 'data' is missing or empty" + ) + super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message) + + def __str__(self) -> str: + """Return a string representation of the exception.""" + return self.detail class LayerNotFound(HTTPException): diff --git a/antarest/study/business/advanced_parameters_management.py b/antarest/study/business/advanced_parameters_management.py index 6cd0825322..f18e47a71f 100644 --- a/antarest/study/business/advanced_parameters_management.py +++ b/antarest/study/business/advanced_parameters_management.py @@ -91,7 +91,7 @@ def check_accuracy_on_correlation(cls, v: str) -> str: return "" allowed_values = ["wind", "load", "solar"] - values_list = re.split("\s*,\s*", v.strip()) + values_list = re.split(r"\s*,\s*", v.strip()) if len(values_list) != len(set(values_list)): raise ValueError("Duplicate value") diff --git a/antarest/study/business/area_management.py b/antarest/study/business/area_management.py index db04120884..8f0758d9dc 100644 --- a/antarest/study/business/area_management.py +++ b/antarest/study/business/area_management.py @@ -232,8 +232,11 @@ def _to_optimization(self) -> OptimizationProperties: nodal_optimization=nodal_optimization_section, ) - def _to_adequacy_patch(self) -> AdequacyPathProperties: + def _to_adequacy_patch(self) -> t.Optional[AdequacyPathProperties]: obj = {name: getattr(self, name) for name in AdequacyPathProperties.AdequacyPathSection.__fields__} + # If all fields are `None`, the object is empty. + if all(value is None for value in obj.values()): + return None adequacy_path_section = AdequacyPathProperties.AdequacyPathSection(**obj) return AdequacyPathProperties(adequacy_patch=adequacy_path_section) diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index 9c29c5925d..28881ef874 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -10,14 +10,13 @@ from antarest.core.exceptions import ( BindingConstraintNotFound, - ConstraintAlreadyExistError, - ConstraintIdNotFoundError, + ConstraintTermNotFound, DuplicateConstraintName, + DuplicateConstraintTerm, InvalidConstraintName, + InvalidConstraintTerm, InvalidFieldForVersionError, MatrixWidthMismatchError, - MissingDataError, - NoConstraintError, WrongMatrixHeightError, ) from antarest.core.model import JSON @@ -805,90 +804,92 @@ def remove_binding_constraint(self, study: Study, binding_constraint_id: str) -> command = RemoveBindingConstraint(id=bc.id, command_context=command_context) execute_or_add_commands(study, file_study, [command], self.storage_service) - def update_constraint_term( - self, - study: Study, - binding_constraint_id: str, - term: ConstraintTerm, + def _update_constraint_with_terms( + self, study: Study, bc: ConstraintOutput, terms: t.Mapping[str, ConstraintTerm] ) -> None: - file_study = self.storage_service.get_storage(study).get_raw(study) - constraint = self.get_binding_constraint(study, binding_constraint_id) - constraint_terms = constraint.terms # existing constraint terms - if not constraint_terms: - raise NoConstraintError(study.id) - - term_id = term.id if isinstance(term, ConstraintTerm) else term - if term_id is None: - raise ConstraintIdNotFoundError(study.id) - - term_id_index = find_constraint_term_id(constraint_terms, term_id) - if term_id_index < 0: - raise ConstraintIdNotFoundError(study.id) - - if isinstance(term, ConstraintTerm): - updated_term_id = term.data.generate_id() if term.data else term_id - current_constraint = constraint_terms[term_id_index] - - constraint_terms[term_id_index] = ConstraintTerm( - id=updated_term_id, - weight=term.weight or current_constraint.weight, - offset=term.offset, - data=term.data or current_constraint.data, - ) - else: - del constraint_terms[term_id_index] - - coeffs = {term.id: [term.weight, term.offset] if term.offset else [term.weight] for term in constraint_terms} - + coeffs = { + term_id: [term.weight, term.offset] if term.offset else [term.weight] for term_id, term in terms.items() + } command = UpdateBindingConstraint( - id=constraint.id, + id=bc.id, coeffs=coeffs, command_context=self.storage_service.variant_study_service.command_factory.command_context, ) + file_study = self.storage_service.get_storage(study).get_raw(study) execute_or_add_commands(study, file_study, [command], self.storage_service) - def create_constraint_term( + def update_constraint_terms( self, study: Study, binding_constraint_id: str, - constraint_term: ConstraintTerm, + constraint_terms: t.Sequence[ConstraintTerm], + update_mode: str = "replace", ) -> None: - file_study = self.storage_service.get_storage(study).get_raw(study) - constraint = self.get_binding_constraint(study, binding_constraint_id) - - if constraint_term.data is None: - raise MissingDataError("Add new constraint term : data is missing") + """ + Update or add the specified constraint terms. - constraint_id = constraint_term.data.generate_id() - constraint_terms = constraint.terms or [] - if find_constraint_term_id(constraint_terms, constraint_id) >= 0: - raise ConstraintAlreadyExistError(study.id) + Args: + study: The study from which to update the binding constraint. + binding_constraint_id: The ID of the binding constraint to update. + constraint_terms: The constraint terms to update. + update_mode: The update mode, either "replace" or "add". + """ + if update_mode == "add": + for term in constraint_terms: + if term.data is None: + raise InvalidConstraintTerm(binding_constraint_id, term.json()) - constraint_terms.append( - ConstraintTerm( - id=constraint_id, - weight=constraint_term.weight if constraint_term.weight is not None else 0.0, - offset=constraint_term.offset, - data=constraint_term.data, - ) - ) + constraint = self.get_binding_constraint(study, binding_constraint_id) + existing_terms = collections.OrderedDict((term.generate_id(), term) for term in constraint.terms) + updated_terms = collections.OrderedDict((term.generate_id(), term) for term in constraint_terms) + + if update_mode == "replace": + missing_terms = set(updated_terms) - set(existing_terms) + if missing_terms: + raise ConstraintTermNotFound(binding_constraint_id, *missing_terms) + elif update_mode == "add": + duplicate_terms = set(updated_terms) & set(existing_terms) + if duplicate_terms: + raise DuplicateConstraintTerm(binding_constraint_id, *duplicate_terms) + else: # pragma: no cover + raise NotImplementedError(f"Unsupported update mode: {update_mode}") + + existing_terms.update(updated_terms) + self._update_constraint_with_terms(study, constraint, existing_terms) + + def create_constraint_terms( + self, study: Study, binding_constraint_id: str, constraint_terms: t.Sequence[ConstraintTerm] + ) -> None: + """ + Adds new constraint terms to an existing binding constraint. - coeffs = {term.id: [term.weight] + [term.offset] if term.offset else [term.weight] for term in constraint_terms} - command = UpdateBindingConstraint( - id=constraint.id, - coeffs=coeffs, - command_context=self.storage_service.variant_study_service.command_factory.command_context, - ) - execute_or_add_commands(study, file_study, [command], self.storage_service) + Args: + study: The study from which to update the binding constraint. + binding_constraint_id: The ID of the binding constraint to update. + constraint_terms: The constraint terms to add. + """ + return self.update_constraint_terms(study, binding_constraint_id, constraint_terms, update_mode="add") - # FIXME create a dedicated delete service def remove_constraint_term( self, study: Study, binding_constraint_id: str, term_id: str, ) -> None: - return self.update_constraint_term(study, binding_constraint_id, term_id) # type: ignore + """ + Remove a constraint term from an existing binding constraint. + + Args: + study: The study from which to update the binding constraint. + binding_constraint_id: The ID of the binding constraint to update. + term_id: The ID of the term to remove. + """ + constraint = self.get_binding_constraint(study, binding_constraint_id) + existing_terms = collections.OrderedDict((term.generate_id(), term) for term in constraint.terms) + removed_term = existing_terms.pop(term_id, None) + if removed_term is None: + raise ConstraintTermNotFound(binding_constraint_id, term_id) + self._update_constraint_with_terms(study, constraint, existing_terms) @staticmethod def get_table_schema() -> JSON: @@ -918,14 +919,6 @@ def _replace_matrices_according_to_frequency_and_version( return args -def find_constraint_term_id(constraints_term: t.Sequence[ConstraintTerm], constraint_term_id: str) -> int: - try: - index = [elm.id for elm in constraints_term].index(constraint_term_id) - return index - except ValueError: - return -1 - - def check_attributes_coherence(data: t.Union[ConstraintCreation, ConstraintInput], study_version: int) -> None: if study_version < 870: if data.group: diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index cbbf5358cc..65687af9c9 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -174,10 +174,10 @@ def update_table_data( The updated properties of the objects including the old ones. """ if table_type == TableModeType.AREA: - # Use AreaOutput to update properties of areas + # Use AreaOutput to update properties of areas, which may include `None` values area_props_by_ids = {key: AreaOutput(**values) for key, values in data.items()} areas_map = self._area_manager.update_areas_props(study, area_props_by_ids) - data = {area_id: area.dict(by_alias=True) for area_id, area in areas_map.items()} + data = {area_id: area.dict(by_alias=True, exclude_none=True) for area_id, area in areas_map.items()} return data elif table_type == TableModeType.LINK: links_map = {tuple(key.split(" / ")): LinkOutput(**values) for key, values in data.items()} diff --git a/antarest/study/storage/variantstudy/model/command/icommand.py b/antarest/study/storage/variantstudy/model/command/icommand.py index 72eb6bfa02..1c5c704320 100644 --- a/antarest/study/storage/variantstudy/model/command/icommand.py +++ b/antarest/study/storage/variantstudy/model/command/icommand.py @@ -19,7 +19,7 @@ logger = logging.getLogger(__name__) -class ICommand(ABC, BaseModel, extra=Extra.forbid, arbitrary_types_allowed=True): +class ICommand(ABC, BaseModel, extra=Extra.forbid, arbitrary_types_allowed=True, copy_on_model_validation="deep"): """ Interface for all commands that can be applied to a study. diff --git a/antarest/study/web/raw_studies_blueprint.py b/antarest/study/web/raw_studies_blueprint.py index 65becf59a2..2ce44c5d48 100644 --- a/antarest/study/web/raw_studies_blueprint.py +++ b/antarest/study/web/raw_studies_blueprint.py @@ -25,6 +25,12 @@ from antarest.study.service import StudyService from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency +try: + import tables # type: ignore + import xlsxwriter # type: ignore +except ImportError: + raise ImportError("The 'xlsxwriter' and 'tables' packages are required") from None + logger = logging.getLogger(__name__) @@ -69,7 +75,10 @@ class TableExportFormat(EnumIgnoreCase): """Export format for tables.""" XLSX = "xlsx" + HDF5 = "hdf5" TSV = "tsv" + CSV = "csv" + CSV_SEMICOLON = "csv (semicolon)" def __str__(self) -> str: """Return the format as a string for display.""" @@ -83,6 +92,10 @@ def media_type(self) -> str: return "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" elif self == TableExportFormat.TSV: return "text/tab-separated-values" + elif self in (TableExportFormat.CSV, TableExportFormat.CSV_SEMICOLON): + return "text/csv" + elif self == TableExportFormat.HDF5: + return "application/x-hdf5" else: # pragma: no cover raise NotImplementedError(f"Export format '{self}' is not implemented") @@ -93,6 +106,10 @@ def suffix(self) -> str: return ".xlsx" elif self == TableExportFormat.TSV: return ".tsv" + elif self in (TableExportFormat.CSV, TableExportFormat.CSV_SEMICOLON): + return ".csv" + elif self == TableExportFormat.HDF5: + return ".h5" else: # pragma: no cover raise NotImplementedError(f"Export format '{self}' is not implemented") @@ -106,9 +123,45 @@ def export_table( ) -> None: """Export a table to a file in the given format.""" if self == TableExportFormat.XLSX: - return df.to_excel(export_path, index=with_index, header=with_header, engine="openpyxl") + return df.to_excel( + export_path, + index=with_index, + header=with_header, + engine="xlsxwriter", + ) elif self == TableExportFormat.TSV: - return df.to_csv(export_path, sep="\t", index=with_index, header=with_header, float_format="%.6f") + return df.to_csv( + export_path, + sep="\t", + index=with_index, + header=with_header, + float_format="%.6f", + ) + elif self == TableExportFormat.CSV: + return df.to_csv( + export_path, + sep=",", + index=with_index, + header=with_header, + float_format="%.6f", + ) + elif self == TableExportFormat.CSV_SEMICOLON: + return df.to_csv( + export_path, + sep=";", + decimal=",", + index=with_index, + header=with_header, + float_format="%.6f", + ) + elif self == TableExportFormat.HDF5: + return df.to_hdf( + export_path, + key="data", + mode="w", + format="table", + data_columns=True, + ) else: # pragma: no cover raise NotImplementedError(f"Export format '{self}' is not implemented") @@ -464,7 +517,10 @@ def get_matrix( return FileResponse( export_path, - headers={"Content-Disposition": f'attachment; filename="{export_file_download.filename}"'}, + headers={ + "Content-Disposition": f'attachment; filename="{export_file_download.filename}"', + "Content-Type": f"{export_format.media_type}; charset=utf-8", + }, media_type=export_format.media_type, ) diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index ecdd5ff191..467aa49805 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -59,7 +59,6 @@ from antarest.study.business.timeseries_config_management import TSFormFields from antarest.study.model import PatchArea, PatchCluster from antarest.study.service import StudyService -from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( BindingConstraintFrequency, BindingConstraintOperator, @@ -1204,14 +1203,49 @@ def add_constraint_term( binding_constraint_id: str, term: ConstraintTerm, current_user: JWTUser = Depends(auth.get_current_user), - ) -> t.Any: + ) -> None: + """ + Append a new term to a given binding constraint + + Args: + - `uuid`: The UUID of the study. + - `binding_constraint_id`: The binding constraint ID. + - `term`: The term to create. + """ logger.info( f"Add constraint term {term.id} to {binding_constraint_id} for study {uuid}", extra={"user": current_user.id}, ) params = RequestParameters(user=current_user) study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, params) - return study_service.binding_constraint_manager.create_constraint_term(study, binding_constraint_id, term) + return study_service.binding_constraint_manager.create_constraint_terms(study, binding_constraint_id, [term]) + + @bp.post( + "/studies/{uuid}/bindingconstraints/{binding_constraint_id}/terms", + tags=[APITag.study_data], + summary="Create terms for a given binding constraint", + ) + def add_constraint_terms( + uuid: str, + binding_constraint_id: str, + terms: t.Sequence[ConstraintTerm], + current_user: JWTUser = Depends(auth.get_current_user), + ) -> None: + """ + Append new terms to a given binding constraint + + Args: + - `uuid`: The UUID of the study. + - `binding_constraint_id`: The binding constraint ID. + - `terms`: The list of terms to create. + """ + logger.info( + f"Adding constraint terms to {binding_constraint_id} for study {uuid}", + extra={"user": current_user.id}, + ) + params = RequestParameters(user=current_user) + study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, params) + return study_service.binding_constraint_manager.create_constraint_terms(study, binding_constraint_id, terms) @bp.put( "/studies/{uuid}/bindingconstraints/{binding_constraint_id}/term", @@ -1223,14 +1257,49 @@ def update_constraint_term( binding_constraint_id: str, term: ConstraintTerm, current_user: JWTUser = Depends(auth.get_current_user), - ) -> t.Any: + ) -> None: + """ + Update a term for a given binding constraint + + Args: + - `uuid`: The UUID of the study. + - `binding_constraint_id`: The binding constraint ID. + - `term`: The term to update. + """ logger.info( f"Update constraint term {term.id} from {binding_constraint_id} for study {uuid}", extra={"user": current_user.id}, ) params = RequestParameters(user=current_user) study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, params) - return study_service.binding_constraint_manager.update_constraint_term(study, binding_constraint_id, term) + return study_service.binding_constraint_manager.update_constraint_terms(study, binding_constraint_id, [term]) + + @bp.put( + "/studies/{uuid}/bindingconstraints/{binding_constraint_id}/terms", + tags=[APITag.study_data], + summary="Update terms for a given binding constraint", + ) + def update_constraint_terms( + uuid: str, + binding_constraint_id: str, + terms: t.Sequence[ConstraintTerm], + current_user: JWTUser = Depends(auth.get_current_user), + ) -> None: + """ + Update several terms for a given binding constraint + + Args: + - `uuid`: The UUID of the study. + - `binding_constraint_id`: The binding constraint ID. + - `terms`: The list of terms to update. + """ + logger.info( + f"Updating constraint terms from {binding_constraint_id} for study {uuid}", + extra={"user": current_user.id}, + ) + params = RequestParameters(user=current_user) + study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, params) + return study_service.binding_constraint_manager.update_constraint_terms(study, binding_constraint_id, terms) @bp.delete( "/studies/{uuid}/bindingconstraints/{binding_constraint_id}/term/{term_id}", diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 3ab8134941..8b4933a3eb 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -1,6 +1,36 @@ Antares Web Changelog ===================== +v2.17 (2024-05-15) +------------------ + +Support for evolutions relating to studies in versions 8.7: +- Scenarized RHS for binding constraints, +- Thermal cluster new properties (cost generation mode, efficiency, variable OM cost) + +Support for evolutions relating to studies in versions 8.8: +- Short-term storage¶: add `enabled` property +- Experimental "MILP" mode (using launcher options) + +### Features + +* **bc:** add endpoint for multiple terms edition [`#2020`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2020) +* **table-mode:** add missing properties for v8.6 and 8.7 [`#1643`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1643) +* **ui-table-mode:** translate table types in add/edit modal + + +### Bug Fixes + +* **bc:** handle undefined v8.3 fields [`#2026`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2026) +* **table-mode:** hide `adequacy_patch_mode` column from table-mode before v8.3 [`#2022`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2022) +* **ui-common:** allow only import of TSV file in `MatrixInput` [`#2027`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2027) +* **ui-settings:** prevent false duplicates on group form updates [`#1998`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1998) +* **ui-table-mode:** reset 'column' field when 'type' field change in create/update modal +* **ui-table-mode:** unable to edit tables with old types +* **ui-table-mode:** add missing "ST Storage" in Table Mode template [`#2016`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2016) +* **download**: improve performance of Excel file download + + v2.16.8 (2024-04-19) -------------------- diff --git a/requirements-test.txt b/requirements-test.txt index 10e44592a1..8e408b2677 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,4 +1,8 @@ -r requirements.txt checksumdir~=1.2.0 pytest~=6.2.5 -pytest-cov~=4.0.0 \ No newline at end of file +pytest-cov~=4.0.0 + +# In this version DataFrame conversion to Excel is done using 'xlsxwriter' library. +# But Excel files reading is done using 'openpyxl' library, during testing only. +openpyxl~=3.1.2 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 40373fbc18..835af45e10 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,7 +13,6 @@ Jinja2~=3.0.3 jsonref~=0.2 MarkupSafe~=2.0.1 numpy~=1.22.1 -openpyxl~=3.1.2 pandas~=1.4.0 paramiko~=2.12.0 plyer~=2.0.0 @@ -28,5 +27,7 @@ redis~=4.1.2 requests~=2.27.1 SQLAlchemy~=1.4.46 starlette~=0.17.1 +tables typing_extensions~=4.7.1 -uvicorn[standard]~=0.15.0 \ No newline at end of file +uvicorn[standard]~=0.15.0 +xlsxwriter~=3.2.0 diff --git a/scripts/package_antares_web.sh b/scripts/package_antares_web.sh index 21008c15f8..8b1999cfb9 100755 --- a/scripts/package_antares_web.sh +++ b/scripts/package_antares_web.sh @@ -9,7 +9,7 @@ set -e ANTARES_SOLVER_VERSION="8.8" -ANTARES_SOLVER_FULL_VERSION="8.8.3" +ANTARES_SOLVER_FULL_VERSION="8.8.4" ANTARES_SOLVER_VERSION_INT="880" SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd -P) diff --git a/setup.py b/setup.py index ce3b1690ea..76e6c3becb 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name="AntaREST", - version="2.16.8", + version="2.17", description="Antares Server", long_description=Path("README.md").read_text(encoding="utf-8"), long_description_content_type="text/markdown", diff --git a/sonar-project.properties b/sonar-project.properties index ed0c9da1c1..56212f62f5 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -6,5 +6,5 @@ sonar.exclusions=antarest/gui.py,antarest/main.py sonar.python.coverage.reportPaths=coverage.xml sonar.python.version=3.8 sonar.javascript.lcov.reportPaths=webapp/coverage/lcov.info -sonar.projectVersion=2.16.8 +sonar.projectVersion=2.17 sonar.coverage.exclusions=antarest/gui.py,antarest/main.py,antarest/singleton_services.py,antarest/worker/archive_worker_service.py,webapp/**/* \ No newline at end of file diff --git a/tests/integration/raw_studies_blueprint/test_download_matrices.py b/tests/integration/raw_studies_blueprint/test_download_matrices.py index ca2c501374..0fcf2683c5 100644 --- a/tests/integration/raw_studies_blueprint/test_download_matrices.py +++ b/tests/integration/raw_studies_blueprint/test_download_matrices.py @@ -160,7 +160,9 @@ def test_download_matrices(self, client: TestClient, user_access_token: str, stu ) assert res.status_code == 200 # noinspection SpellCheckingInspection - assert res.headers["content-type"] == "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" + assert res.headers["content-type"] == ( + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet; charset=utf-8" + ) # load into dataframe # noinspection PyTypeChecker diff --git a/tests/integration/study_data_blueprint/test_binding_constraints.py b/tests/integration/study_data_blueprint/test_binding_constraints.py index 29394c3b2f..fff973ae20 100644 --- a/tests/integration/study_data_blueprint/test_binding_constraints.py +++ b/tests/integration/study_data_blueprint/test_binding_constraints.py @@ -375,17 +375,15 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st # Update constraint cluster term with invalid id res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{bc_id}/term", - json={ - "id": f"{area1_id}.!!Invalid#cluster%%", - "weight": 4, - }, + json={"id": f"{area1_id}.!!invalid#cluster%%", "weight": 4}, headers=user_headers, ) assert res.status_code == 404, res.json() - assert res.json() == { - "description": f"{study_id}", - "exception": "ConstraintIdNotFoundError", - } + exception = res.json()["exception"] + description = res.json()["description"] + assert exception == "ConstraintTermNotFound" + assert bc_id in description + assert f"{area1_id}.!!invalid#cluster%%" in description # Update constraint cluster term with empty data res = client.put( @@ -683,6 +681,19 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud ) assert res.status_code == 200, res.json() + # Create a cluster in area1 + res = client.post( + f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal", + headers=admin_headers, + json={ + "name": "Cluster 1", + "group": "Nuclear", + }, + ) + assert res.status_code == 200, res.json() + cluster_id = res.json()["id"] + assert cluster_id == "Cluster 1" + # ============================= # CREATION # ============================= @@ -744,34 +755,103 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud # CONSTRAINT TERM MANAGEMENT # ============================= - # Add binding constraint link term + # Add binding constraint terms res = client.post( - f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}/term", - json={ - "weight": 1, - "offset": 2.5, + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}/terms", + json=[ + {"weight": 1, "offset": 2, "data": {"area1": area1_id, "area2": area2_id}}, + {"weight": 1, "offset": 2, "data": {"area": area1_id, "cluster": cluster_id}}, + ], + headers=admin_headers, + ) + assert res.status_code == 200, res.json() + + # Attempt to add a term with missing data + res = client.post( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}/terms", + json=[{"weight": 1, "offset": 2}], + headers=admin_headers, + ) + assert res.status_code == 422, res.json() + exception = res.json()["exception"] + description = res.json()["description"] + assert exception == "InvalidConstraintTerm" + assert bc_id_w_group in description, "Error message should contain the binding constraint ID" + assert "term 'data' is missing" in description, "Error message should indicate the missing field" + + # Attempt to add a duplicate term + res = client.post( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}/terms", + json=[{"weight": 99, "offset": 0, "data": {"area1": area1_id, "area2": area2_id}}], + headers=admin_headers, + ) + assert res.status_code == 409, res.json() + exception = res.json()["exception"] + description = res.json()["description"] + assert exception == "DuplicateConstraintTerm" + assert bc_id_w_group in description, "Error message should contain the binding constraint ID" + assert f"{area1_id}%{area2_id}" in description, "Error message should contain the duplicate term ID" + + # Get binding constraints list to check added terms + res = client.get( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}", + headers=admin_headers, + ) + assert res.status_code == 200, res.json() + binding_constraint = res.json() + constraint_terms = binding_constraint["terms"] + expected = [ + { "data": {"area1": area1_id, "area2": area2_id}, + "id": f"{area1_id}%{area2_id}", + "offset": 2, + "weight": 1.0, + }, + { + "data": {"area": area1_id, "cluster": cluster_id.lower()}, + "id": f"{area1_id}.{cluster_id.lower()}", + "offset": 2, + "weight": 1.0, }, + ] + assert constraint_terms == expected + + # Update binding constraint terms + res = client.put( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}/terms", + json=[ + {"id": f"{area1_id}%{area2_id}", "weight": 4.4, "offset": 1}, + { + "id": f"{area1_id}.{cluster_id}", + "weight": 5.1, + "data": {"area": area1_id, "cluster": cluster_id}, + }, + ], headers=admin_headers, ) assert res.status_code == 200, res.json() - # Get binding constraints list to check added term + # Asserts terms were updated res = client.get( f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}", headers=admin_headers, ) assert res.status_code == 200, res.json() binding_constraint = res.json() - assert binding_constraint["group"] == "specific_grp" # asserts the group wasn't altered constraint_terms = binding_constraint["terms"] expected = [ { "data": {"area1": area1_id, "area2": area2_id}, "id": f"{area1_id}%{area2_id}", - "offset": 2, # asserts the offset has been rounded - "weight": 1.0, - } + "offset": 1, + "weight": 4.4, + }, + { + "data": {"area": area1_id, "cluster": cluster_id.lower()}, + "id": f"{area1_id}.{cluster_id.lower()}", + "offset": None, + "weight": 5.1, + }, ] assert constraint_terms == expected diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index 15536b8426..45ca2cc961 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -1,9 +1,14 @@ +import typing as t + import pytest from starlette.testclient import TestClient from antarest.core.tasks.model import TaskStatus from tests.integration.utils import wait_task_completion +# noinspection SpellCheckingInspection +POLLUTANTS_860 = ("nh3", "nmvoc", "nox", "op1", "op2", "op3", "op4", "op5", "pm10", "pm25", "pm5", "so2") + # noinspection SpellCheckingInspection @pytest.mark.unit_test @@ -15,11 +20,9 @@ class TestTableMode: which contains the following areas: ["de", "es", "fr", "it"]. """ + @pytest.mark.parametrize("study_version", [0, 810, 830, 860, 870, 880]) def test_lifecycle__nominal( - self, - client: TestClient, - user_access_token: str, - study_id: str, + self, client: TestClient, user_access_token: str, study_id: str, study_version: int ) -> None: user_headers = {"Authorization": f"Bearer {user_access_token}"} @@ -28,30 +31,18 @@ def test_lifecycle__nominal( # or in version 8.6 for short-term storage and that the renewable clusters are enabled # in the study configuration. - # Upgrade the study to version 8.6 - res = client.put( - f"/v1/studies/{study_id}/upgrade", - headers={"Authorization": f"Bearer {user_access_token}"}, - params={"target_version": 860}, - ) - assert res.status_code == 200, res.json() + # Upgrade the study to the desired version + if study_version: + res = client.put( + f"/v1/studies/{study_id}/upgrade", + headers={"Authorization": f"Bearer {user_access_token}"}, + params={"target_version": study_version}, + ) + assert res.status_code == 200, res.json() - task_id = res.json() - task = wait_task_completion(client, user_access_token, task_id) - assert task.status == TaskStatus.COMPLETED, task - - # Parameter 'renewable-generation-modelling' must be set to 'clusters' instead of 'aggregated'. - # The `enr_modelling` value must be set to "clusters" instead of "aggregated" - args = { - "target": "settings/generaldata/other preferences", - "data": {"renewable-generation-modelling": "clusters"}, - } - res = client.post( - f"/v1/studies/{study_id}/commands", - headers={"Authorization": f"Bearer {user_access_token}"}, - json=[{"action": "update_config", "args": args}], - ) - assert res.status_code == 200, res.json() + task_id = res.json() + task = wait_task_completion(client, user_access_token, task_id) + assert task.status == TaskStatus.COMPLETED, task # Table Mode - Area # ================= @@ -79,26 +70,29 @@ def test_lifecycle__nominal( "adequacyPatchMode", } + _de_values = { + "averageUnsuppliedEnergyCost": 3456, + "dispatchableHydroPower": False, + "filterSynthesis": "daily, monthly", # not changed + "filterYearByYear": "annual, weekly", + } + _es_values = {"spreadSpilledEnergyCost": None} # not changed + + if study_version >= 830: + _es_values["adequacyPatchMode"] = "inside" + res = client.put( f"/v1/studies/{study_id}/table-mode/areas", headers=user_headers, json={ - "de": { - "averageUnsuppliedEnergyCost": 3456, - "dispatchableHydroPower": False, - "filterSynthesis": "daily, monthly", # not changed - "filterYearByYear": "annual, weekly", - }, - "es": { - "adequacyPatchMode": "inside", - "spreadSpilledEnergyCost": None, # not changed - }, + "de": _de_values, + "es": _es_values, }, ) assert res.status_code == 200, res.json() + expected_areas: t.Dict[str, t.Dict[str, t.Any]] expected_areas = { "de": { - "adequacyPatchMode": "outside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3456, "dispatchableHydroPower": False, @@ -110,7 +104,6 @@ def test_lifecycle__nominal( "spreadUnsuppliedEnergyCost": 0, }, "es": { - "adequacyPatchMode": "inside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3000, "dispatchableHydroPower": True, @@ -122,7 +115,6 @@ def test_lifecycle__nominal( "spreadUnsuppliedEnergyCost": 0, }, "fr": { - "adequacyPatchMode": "outside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3000, "dispatchableHydroPower": True, @@ -134,7 +126,6 @@ def test_lifecycle__nominal( "spreadUnsuppliedEnergyCost": 0, }, "it": { - "adequacyPatchMode": "outside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3000, "dispatchableHydroPower": True, @@ -146,6 +137,13 @@ def test_lifecycle__nominal( "spreadUnsuppliedEnergyCost": 0, }, } + + if study_version >= 830: + expected_areas["de"]["adequacyPatchMode"] = "outside" + expected_areas["es"]["adequacyPatchMode"] = "inside" + expected_areas["fr"]["adequacyPatchMode"] = "outside" + expected_areas["it"]["adequacyPatchMode"] = "outside" + actual = res.json() assert actual == expected_areas @@ -319,22 +317,19 @@ def test_lifecycle__nominal( "variableOMCost", } + _solar_values = {"group": "Other 2", "nominalCapacity": 500000, "unitCount": 17} + _wind_on_values = {"group": "Nuclear", "nominalCapacity": 314159, "unitCount": 15, "co2": 123} + if study_version >= 860: + _solar_values["so2"] = 8.25 + if study_version >= 870: + _solar_values.update({"costGeneration": "useCostTimeseries", "efficiency": 87, "variableOMCost": -12.5}) + res = client.put( f"/v1/studies/{study_id}/table-mode/thermals", headers=user_headers, json={ - "de / 01_solar": { - "group": "Other 2", - "nominalCapacity": 500000, - "so2": 8.25, - "unitCount": 17, - }, - "de / 02_wind_on": { - "group": "Nuclear", - "nominalCapacity": 314159, - "co2": 123, - "unitCount": 15, - }, + "de / 01_solar": _solar_values, + "de / 02_wind_on": _wind_on_values, }, ) assert res.status_code == 200, res.json() @@ -357,19 +352,7 @@ def test_lifecycle__nominal( "minStablePower": 0, "minUpTime": 1, "mustRun": False, - "nh3": 0, - "nmvoc": 0, "nominalCapacity": 500000, - "nox": 0, - "op1": 0, - "op2": 0, - "op3": 0, - "op4": 0, - "op5": 0, - "pm10": 0, - "pm25": 0, - "pm5": 0, - "so2": 8.25, "spinning": 0, "spreadCost": 0, "startupCost": 0, @@ -396,19 +379,7 @@ def test_lifecycle__nominal( "minStablePower": 0, "minUpTime": 1, "mustRun": False, - "nh3": 0, - "nmvoc": 0, "nominalCapacity": 314159, - "nox": 0, - "op1": 0, - "op2": 0, - "op3": 0, - "op4": 0, - "op5": 0, - "pm10": 0, - "pm25": 0, - "pm5": 0, - "so2": 0, "spinning": 0, "spreadCost": 0, "startupCost": 0, @@ -418,6 +389,22 @@ def test_lifecycle__nominal( "volatilityPlanned": 0, }, } + + if study_version >= 860: + _values = dict.fromkeys(POLLUTANTS_860, 0) + expected_thermals["de / 02_wind_on"].update(_values) + expected_thermals["de / 01_solar"].update(_values, **{"so2": 8.25}) + else: + _values = dict.fromkeys(POLLUTANTS_860) + expected_thermals["de / 02_wind_on"].update(_values) + expected_thermals["de / 01_solar"].update(_values) + + if study_version >= 870: + _values = {"costGeneration": "SetManually", "efficiency": 100, "variableOMCost": 0} + expected_thermals["de / 02_wind_on"].update(_values) + _values = {"costGeneration": "useCostTimeseries", "efficiency": 87, "variableOMCost": -12.5} + expected_thermals["de / 01_solar"].update(_values) + assert res.json()["de / 01_solar"] == expected_thermals["de / 01_solar"] assert res.json()["de / 02_wind_on"] == expected_thermals["de / 02_wind_on"] @@ -427,340 +414,376 @@ def test_lifecycle__nominal( params={"columns": ",".join(["group", "unitCount", "nominalCapacity", "so2"])}, ) assert res.status_code == 200, res.json() + expected: t.Dict[str, t.Dict[str, t.Any]] expected = { - "de / 01_solar": {"group": "Other 2", "nominalCapacity": 500000, "so2": 8.25, "unitCount": 17}, - "de / 02_wind_on": {"group": "Nuclear", "nominalCapacity": 314159, "so2": 0, "unitCount": 15}, - "de / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "de / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "de / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "de / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "de / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "de / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "de / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "de / 01_solar": {"group": "Other 2", "nominalCapacity": 500000, "unitCount": 17}, + "de / 02_wind_on": {"group": "Nuclear", "nominalCapacity": 314159, "unitCount": 15}, + "de / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "de / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "de / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "de / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "de / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "de / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "de / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, } + if study_version >= 860: + for key in expected: + expected[key]["so2"] = 0 + expected["de / 01_solar"]["so2"] = 8.25 + actual = res.json() assert actual == expected # Table Mode - Renewable Clusters # =============================== - # Prepare data for renewable clusters tests - generators_by_country = { - "fr": { - "La Rochelle": { - "name": "La Rochelle", - "group": "solar pv", - "nominalCapacity": 2.1, - "unitCount": 1, - "tsInterpretation": "production-factor", - }, - "Oleron": { - "name": "Oleron", - "group": "wind offshore", - "nominalCapacity": 15, - "unitCount": 70, - "tsInterpretation": "production-factor", + # only concerns studies after v8.1 + if study_version >= 810: + # Parameter 'renewable-generation-modelling' must be set to 'clusters' instead of 'aggregated'. + # The `enr_modelling` value must be set to "clusters" instead of "aggregated" + args = { + "target": "settings/generaldata/other preferences", + "data": {"renewable-generation-modelling": "clusters"}, + } + res = client.post( + f"/v1/studies/{study_id}/commands", + headers={"Authorization": f"Bearer {user_access_token}"}, + json=[{"action": "update_config", "args": args}], + ) + assert res.status_code == 200, res.json() + + # Prepare data for renewable clusters tests + generators_by_country = { + "fr": { + "La Rochelle": { + "name": "La Rochelle", + "group": "solar pv", + "nominalCapacity": 2.1, + "unitCount": 1, + "tsInterpretation": "production-factor", + }, + "Oleron": { + "name": "Oleron", + "group": "wind offshore", + "nominalCapacity": 15, + "unitCount": 70, + "tsInterpretation": "production-factor", + }, + "Dieppe": { + "name": "Dieppe", + "group": "wind offshore", + "nominalCapacity": 8, + "unitCount": 62, + "tsInterpretation": "power-generation", + }, }, - "Dieppe": { - "name": "Dieppe", - "group": "wind offshore", - "nominalCapacity": 8, - "unitCount": 62, - "tsInterpretation": "power-generation", + "it": { + "Sicile": { + "name": "Sicile", + "group": "solar pv", + "nominalCapacity": 1.8, + "unitCount": 1, + "tsInterpretation": "production-factor", + }, + "Sardaigne": { + "name": "Sardaigne", + "group": "wind offshore", + "nominalCapacity": 12, + "unitCount": 86, + "tsInterpretation": "power-generation", + }, + "Pouilles": { + "name": "Pouilles", + "enabled": False, + "group": "wind offshore", + "nominalCapacity": 11, + "unitCount": 40, + "tsInterpretation": "power-generation", + }, }, - }, - "it": { - "Sicile": { - "name": "Sicile", - "group": "solar pv", - "nominalCapacity": 1.8, - "unitCount": 1, - "tsInterpretation": "production-factor", - }, - "Sardaigne": { - "name": "Sardaigne", - "group": "wind offshore", - "nominalCapacity": 12, - "unitCount": 86, - "tsInterpretation": "power-generation", + } + + for area_id, generators in generators_by_country.items(): + for generator_id, generator in generators.items(): + res = client.post( + f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", + headers=user_headers, + json=generator, + ) + res.raise_for_status() + + # Get the schema of the renewables table + res = client.get( + "/v1/table-schema/renewables", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + # read-only fields + "id", + "name", + # Renewables fields + "group", + "tsInterpretation", + "enabled", + "unitCount", + "nominalCapacity", + } + + # Update some generators using the table mode + res = client.put( + f"/v1/studies/{study_id}/table-mode/renewables", + headers=user_headers, + json={ + "fr / Dieppe": {"enabled": False}, + "fr / La Rochelle": {"enabled": True, "nominalCapacity": 3.1, "unitCount": 2}, + "it / Pouilles": {"group": "Wind Onshore"}, }, - "Pouilles": { - "name": "Pouilles", - "enabled": False, - "group": "wind offshore", - "nominalCapacity": 11, - "unitCount": 40, - "tsInterpretation": "power-generation", - }, - }, - } - - for area_id, generators in generators_by_country.items(): - for generator_id, generator in generators.items(): - res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", - headers=user_headers, - json=generator, - ) - res.raise_for_status() - - # Get the schema of the renewables table - res = client.get( - "/v1/table-schema/renewables", - headers=user_headers, - ) - assert res.status_code == 200, res.json() - actual = res.json() - assert set(actual["properties"]) == { - # read-only fields - "id", - "name", - # Renewables fields - "group", - "tsInterpretation", - "enabled", - "unitCount", - "nominalCapacity", - } - - # Update some generators using the table mode - res = client.put( - f"/v1/studies/{study_id}/table-mode/renewables", - headers=user_headers, - json={ - "fr / Dieppe": {"enabled": False}, - "fr / La Rochelle": {"enabled": True, "nominalCapacity": 3.1, "unitCount": 2}, - "it / Pouilles": {"group": "Wind Onshore"}, - }, - ) - assert res.status_code == 200, res.json() - - res = client.get( - f"/v1/studies/{study_id}/table-mode/renewables", - headers=user_headers, - params={"columns": ",".join(["group", "enabled", "unitCount", "nominalCapacity"])}, - ) - assert res.status_code == 200, res.json() - expected = { - "fr / Dieppe": {"enabled": False, "group": "Wind Offshore", "nominalCapacity": 8, "unitCount": 62}, - "fr / La Rochelle": {"enabled": True, "group": "Solar PV", "nominalCapacity": 3.1, "unitCount": 2}, - "fr / Oleron": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 15, "unitCount": 70}, - "it / Pouilles": {"enabled": False, "group": "Wind Onshore", "nominalCapacity": 11, "unitCount": 40}, - "it / Sardaigne": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 12, "unitCount": 86}, - "it / Sicile": {"enabled": True, "group": "Solar PV", "nominalCapacity": 1.8, "unitCount": 1}, - } - actual = res.json() - assert actual == expected + ) + assert res.status_code == 200, res.json() + + res = client.get( + f"/v1/studies/{study_id}/table-mode/renewables", + headers=user_headers, + params={"columns": ",".join(["group", "enabled", "unitCount", "nominalCapacity"])}, + ) + assert res.status_code == 200, res.json() + expected = { + "fr / Dieppe": {"enabled": False, "group": "Wind Offshore", "nominalCapacity": 8, "unitCount": 62}, + "fr / La Rochelle": {"enabled": True, "group": "Solar PV", "nominalCapacity": 3.1, "unitCount": 2}, + "fr / Oleron": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 15, "unitCount": 70}, + "it / Pouilles": {"enabled": False, "group": "Wind Onshore", "nominalCapacity": 11, "unitCount": 40}, + "it / Sardaigne": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 12, "unitCount": 86}, + "it / Sicile": {"enabled": True, "group": "Solar PV", "nominalCapacity": 1.8, "unitCount": 1}, + } + actual = res.json() + assert actual == expected # Table Mode - Short Term Storage # =============================== - # Get the schema of the short-term storages table - res = client.get( - "/v1/table-schema/st-storages", - headers=user_headers, - ) - assert res.status_code == 200, res.json() - actual = res.json() - assert set(actual["properties"]) == { - # read-only fields - "id", - "name", - # Short-term storage fields - "enabled", # since v8.8 - "group", - "injectionNominalCapacity", - "withdrawalNominalCapacity", - "reservoirCapacity", - "efficiency", - "initialLevel", - "initialLevelOptim", - } - - # Prepare data for short-term storage tests - storage_by_country = { - "fr": { - "siemens": { - "name": "Siemens", - "group": "battery", - "injectionNominalCapacity": 1500, - "withdrawalNominalCapacity": 1500, - "reservoirCapacity": 1500, + # only concerns studies after v8.6 + if study_version >= 860: + # Get the schema of the short-term storages table + res = client.get( + "/v1/table-schema/st-storages", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + # read-only fields + "id", + "name", + # Short-term storage fields + "enabled", # since v8.8 + "group", + "injectionNominalCapacity", + "withdrawalNominalCapacity", + "reservoirCapacity", + "efficiency", + "initialLevel", + "initialLevelOptim", + } + + # Prepare data for short-term storage tests + storage_by_country = { + "fr": { + "siemens": { + "name": "Siemens", + "group": "battery", + "injectionNominalCapacity": 1500, + "withdrawalNominalCapacity": 1500, + "reservoirCapacity": 1500, + "initialLevel": 0.5, + "initialLevelOptim": False, + }, + "tesla": { + "name": "Tesla", + "group": "battery", + "injectionNominalCapacity": 1200, + "withdrawalNominalCapacity": 1200, + "reservoirCapacity": 1200, + "initialLevelOptim": True, + }, + }, + "it": { + "storage3": { + "name": "storage3", + "group": "psp_open", + "injectionNominalCapacity": 1234, + "withdrawalNominalCapacity": 1020, + "reservoirCapacity": 1357, + "initialLevel": 1, + "initialLevelOptim": False, + }, + "storage4": { + "name": "storage4", + "group": "psp_open", + "injectionNominalCapacity": 567, + "withdrawalNominalCapacity": 456, + "reservoirCapacity": 500, + "initialLevelOptim": True, + }, + }, + } + for area_id, storages in storage_by_country.items(): + for storage_id, storage in storages.items(): + res = client.post( + f"/v1/studies/{study_id}/areas/{area_id}/storages", + headers=user_headers, + json=storage, + ) + res.raise_for_status() + + # Update some generators using the table mode + _fr_siemes_values = {"injectionNominalCapacity": 1550, "withdrawalNominalCapacity": 1550} + _fr_tesla_values = {"efficiency": 0.75, "initialLevel": 0.89, "initialLevelOptim": False} + _it_storage3_values = {"group": "Pondage"} + if study_version >= 880: + _it_storage3_values["enabled"] = False + + res = client.put( + f"/v1/studies/{study_id}/table-mode/st-storages", + headers=user_headers, + json={ + "fr / siemens": _fr_siemes_values, + "fr / tesla": _fr_tesla_values, + "it / storage3": _it_storage3_values, + }, + ) + assert res.status_code == 200, res.json() + actual = res.json() + expected = { + "fr / siemens": { + # "id": "siemens", + # "name": "Siemens", + "efficiency": 1, + "enabled": None, + "group": "Battery", "initialLevel": 0.5, "initialLevelOptim": False, + "injectionNominalCapacity": 1550, + "reservoirCapacity": 1500, + "withdrawalNominalCapacity": 1550, }, - "tesla": { - "name": "Tesla", - "group": "battery", + "fr / tesla": { + # "id": "tesla", + # "name": "Tesla", + "efficiency": 0.75, + "enabled": None, + "group": "Battery", + "initialLevel": 0.89, + "initialLevelOptim": False, "injectionNominalCapacity": 1200, - "withdrawalNominalCapacity": 1200, "reservoirCapacity": 1200, - "initialLevelOptim": True, + "withdrawalNominalCapacity": 1200, }, - }, - "it": { - "storage3": { - "name": "storage3", - "group": "psp_open", - "injectionNominalCapacity": 1234, - "withdrawalNominalCapacity": 1020, - "reservoirCapacity": 1357, + "it / storage3": { + # "id": "storage3", + # "name": "storage3", + "efficiency": 1, + "enabled": None, + "group": "Pondage", "initialLevel": 1, "initialLevelOptim": False, + "injectionNominalCapacity": 1234, + "reservoirCapacity": 1357, + "withdrawalNominalCapacity": 1020, }, - "storage4": { - "name": "storage4", - "group": "psp_open", + "it / storage4": { + # "id": "storage4", + # "name": "storage4", + "efficiency": 1, + "enabled": None, + "group": "PSP_open", + "initialLevel": 0.5, + "initialLevelOptim": True, "injectionNominalCapacity": 567, + "reservoirCapacity": 500, "withdrawalNominalCapacity": 456, + }, + } + + if study_version >= 880: + for key in expected: + expected[key]["enabled"] = True + expected["it / storage3"]["enabled"] = False + + assert actual == expected + + res = client.get( + f"/v1/studies/{study_id}/table-mode/st-storages", + headers=user_headers, + params={ + "columns": ",".join( + [ + "group", + "injectionNominalCapacity", + "withdrawalNominalCapacity", + "reservoirCapacity", + "unknowColumn", # should be ignored + ] + ), + }, + ) + assert res.status_code == 200, res.json() + expected = { + "fr / siemens": { + "group": "Battery", + "injectionNominalCapacity": 1550, + "reservoirCapacity": 1500, + "withdrawalNominalCapacity": 1550, + }, + "fr / tesla": { + "group": "Battery", + "injectionNominalCapacity": 1200, + "reservoirCapacity": 1200, + "withdrawalNominalCapacity": 1200, + }, + "it / storage3": { + "group": "Pondage", + "injectionNominalCapacity": 1234, + "reservoirCapacity": 1357, + "withdrawalNominalCapacity": 1020, + }, + "it / storage4": { + "group": "PSP_open", + "injectionNominalCapacity": 567, "reservoirCapacity": 500, - "initialLevelOptim": True, + "withdrawalNominalCapacity": 456, }, - }, - } - for area_id, storages in storage_by_country.items(): - for storage_id, storage in storages.items(): - res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/storages", - headers=user_headers, - json=storage, - ) - res.raise_for_status() - - # Update some generators using the table mode - res = client.put( - f"/v1/studies/{study_id}/table-mode/st-storages", - headers=user_headers, - json={ - "fr / siemens": {"injectionNominalCapacity": 1550, "withdrawalNominalCapacity": 1550}, - "fr / tesla": {"efficiency": 0.75, "initialLevel": 0.89, "initialLevelOptim": False}, - "it / storage3": {"group": "Pondage"}, - }, - ) - assert res.status_code == 200, res.json() - actual = res.json() - assert actual == { - "fr / siemens": { - # "id": "siemens", - # "name": "Siemens", - "efficiency": 1, - "enabled": None, - "group": "Battery", - "initialLevel": 0.5, - "initialLevelOptim": False, - "injectionNominalCapacity": 1550, - "reservoirCapacity": 1500, - "withdrawalNominalCapacity": 1550, - }, - "fr / tesla": { - # "id": "tesla", - # "name": "Tesla", - "efficiency": 0.75, - "enabled": None, - "group": "Battery", - "initialLevel": 0.89, - "initialLevelOptim": False, - "injectionNominalCapacity": 1200, - "reservoirCapacity": 1200, - "withdrawalNominalCapacity": 1200, - }, - "it / storage3": { - # "id": "storage3", - # "name": "storage3", - "efficiency": 1, - "enabled": None, - "group": "Pondage", - "initialLevel": 1, - "initialLevelOptim": False, - "injectionNominalCapacity": 1234, - "reservoirCapacity": 1357, - "withdrawalNominalCapacity": 1020, - }, - "it / storage4": { - # "id": "storage4", - # "name": "storage4", - "efficiency": 1, - "enabled": None, - "group": "PSP_open", - "initialLevel": 0.5, - "initialLevelOptim": True, - "injectionNominalCapacity": 567, - "reservoirCapacity": 500, - "withdrawalNominalCapacity": 456, - }, - } - - res = client.get( - f"/v1/studies/{study_id}/table-mode/st-storages", - headers=user_headers, - params={ - "columns": ",".join( - [ - "group", - "injectionNominalCapacity", - "withdrawalNominalCapacity", - "reservoirCapacity", - "unknowColumn", # should be ignored - ] - ), - }, - ) - assert res.status_code == 200, res.json() - expected = { - "fr / siemens": { - "group": "Battery", - "injectionNominalCapacity": 1550, - "reservoirCapacity": 1500, - "withdrawalNominalCapacity": 1550, - }, - "fr / tesla": { - "group": "Battery", - "injectionNominalCapacity": 1200, - "reservoirCapacity": 1200, - "withdrawalNominalCapacity": 1200, - }, - "it / storage3": { - "group": "Pondage", - "injectionNominalCapacity": 1234, - "reservoirCapacity": 1357, - "withdrawalNominalCapacity": 1020, - }, - "it / storage4": { - "group": "PSP_open", - "injectionNominalCapacity": 567, - "reservoirCapacity": 500, - "withdrawalNominalCapacity": 456, - }, - } - actual = res.json() - assert actual == expected + } + actual = res.json() + assert actual == expected # Table Mode - Binding Constraints # ================================ @@ -819,7 +842,7 @@ def test_lifecycle__nominal( "id", "name", # Binding Constraints fields - "group", + "group", # since v8.7 "enabled", "timeStep", "operator", @@ -831,34 +854,46 @@ def test_lifecycle__nominal( } # Update some binding constraints using the table mode + _bc1_values = {"comments": "Hello World!", "enabled": True} + _bc2_values = {"filterSynthesis": "hourly", "filterYearByYear": "hourly", "operator": "both"} + if study_version >= 870: + _bc2_values["group"] = "My BC Group" + res = client.put( f"/v1/studies/{study_id}/table-mode/binding-constraints", headers=user_headers, json={ - "binding constraint 1": {"comments": "Hello World!", "enabled": True}, - "binding constraint 2": {"filterSynthesis": "hourly", "filterYearByYear": "hourly", "operator": "both"}, + "binding constraint 1": _bc1_values, + "binding constraint 2": _bc2_values, }, ) assert res.status_code == 200, res.json() actual = res.json() - assert actual == { + expected_binding = { "binding constraint 1": { "comments": "Hello World!", "enabled": True, - "filterSynthesis": "", - "filterYearByYear": "", "operator": "less", "timeStep": "hourly", }, "binding constraint 2": { "comments": "This is a binding constraint", "enabled": False, - "filterSynthesis": "hourly", - "filterYearByYear": "hourly", "operator": "both", "timeStep": "daily", }, } + if study_version >= 830: + expected_binding["binding constraint 1"]["filterSynthesis"] = "" + expected_binding["binding constraint 1"]["filterYearByYear"] = "" + expected_binding["binding constraint 2"]["filterSynthesis"] = "hourly" + expected_binding["binding constraint 2"]["filterYearByYear"] = "hourly" + + if study_version >= 870: + expected_binding["binding constraint 1"]["group"] = "default" + expected_binding["binding constraint 2"]["group"] = "My BC Group" + + assert actual == expected_binding res = client.get( f"/v1/studies/{study_id}/table-mode/binding-constraints", @@ -866,24 +901,7 @@ def test_lifecycle__nominal( params={"columns": ""}, ) assert res.status_code == 200, res.json() - expected = { - "binding constraint 1": { - "comments": "Hello World!", - "enabled": True, - "filterSynthesis": "", - "filterYearByYear": "", - "operator": "less", - "timeStep": "hourly", - }, - "binding constraint 2": { - "comments": "This is a binding constraint", - "enabled": False, - "filterSynthesis": "hourly", - "filterYearByYear": "hourly", - "operator": "both", - "timeStep": "daily", - }, - } + expected = expected_binding actual = res.json() assert actual == expected diff --git a/tests/storage/repository/filesystem/test_folder_node.py b/tests/storage/repository/filesystem/test_folder_node.py index d08360e223..7927927d7e 100644 --- a/tests/storage/repository/filesystem/test_folder_node.py +++ b/tests/storage/repository/filesystem/test_folder_node.py @@ -13,7 +13,7 @@ from antarest.study.storage.rawstudy.model.filesystem.inode import INode from antarest.study.storage.rawstudy.model.filesystem.raw_file_node import RawFileNode from antarest.study.storage.rawstudy.model.filesystem.root.input.areas.list import InputAreasList -from tests.storage.repository.filesystem.utils import TestMiddleNode, TestSubNode +from tests.storage.repository.filesystem.utils import CheckSubNode, TestMiddleNode def build_tree() -> INode[t.Any, t.Any, t.Any]: @@ -24,8 +24,8 @@ def build_tree() -> INode[t.Any, t.Any, t.Any]: context=Mock(), config=config, children={ - "input": TestSubNode(value=100), - "output": TestSubNode(value=200), + "input": CheckSubNode(config, value=100), + "output": CheckSubNode(config, value=200), }, ) diff --git a/tests/storage/repository/filesystem/utils.py b/tests/storage/repository/filesystem/utils.py index 82e0107082..abef9e26e5 100644 --- a/tests/storage/repository/filesystem/utils.py +++ b/tests/storage/repository/filesystem/utils.py @@ -8,7 +8,7 @@ from antarest.study.storage.rawstudy.model.filesystem.inode import TREE, INode -class TestSubNode(INode[int, int, int]): +class CheckSubNode(INode[int, int, int]): def normalize(self) -> None: pass @@ -18,7 +18,8 @@ def denormalize(self) -> None: def build(self, config: FileStudyTreeConfig) -> "TREE": pass - def __init__(self, value: int): + def __init__(self, config: FileStudyTreeConfig, value: int): + super().__init__(config) self.value = value def get_node( diff --git a/webapp/package-lock.json b/webapp/package-lock.json index 4eac54256e..10711529c7 100644 --- a/webapp/package-lock.json +++ b/webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "antares-web", - "version": "2.16.8", + "version": "2.17", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "antares-web", - "version": "2.16.8", + "version": "2.17", "dependencies": { "@emotion/react": "11.11.1", "@emotion/styled": "11.11.0", diff --git a/webapp/package.json b/webapp/package.json index 0f455c639b..106345bf84 100644 --- a/webapp/package.json +++ b/webapp/package.json @@ -1,6 +1,6 @@ { "name": "antares-web", - "version": "2.16.8", + "version": "2.17", "private": true, "type": "module", "scripts": { diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintFields.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintFields.tsx index 88fe3f2acc..3405540a6e 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintFields.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintFields.tsx @@ -115,7 +115,7 @@ function Fields({ study, constraintId }: Props) { control={control} sx={{ maxWidth: 150 }} /> - {Number(study.version) >= 840 && ( + {Number(study.version) >= 830 && ( setOpenImportDialog(false)} onImport={handleImport} + accept={{ + "text/tsv": [".tsv"], + }} /> )} {openMatrixAsignDialog && ( diff --git a/webapp/src/components/common/dialogs/ImportDialog.tsx b/webapp/src/components/common/dialogs/ImportDialog.tsx index a5e0402edd..e4aec2498f 100644 --- a/webapp/src/components/common/dialogs/ImportDialog.tsx +++ b/webapp/src/components/common/dialogs/ImportDialog.tsx @@ -1,7 +1,7 @@ import { useEffect, useState } from "react"; import * as R from "ramda"; import { Box, LinearProgress, Paper, Typography } from "@mui/material"; -import Dropzone from "react-dropzone"; +import Dropzone, { type Accept } from "react-dropzone"; import { useMountedState } from "react-use"; import { useTranslation } from "react-i18next"; import BasicDialog, { BasicDialogProps } from "./BasicDialog"; @@ -10,6 +10,7 @@ interface Props { open: BasicDialogProps["open"]; title?: string; dropzoneText?: string; + accept?: Accept; onClose: VoidFunction; onImport: ( file: File, @@ -18,7 +19,7 @@ interface Props { } function ImportDialog(props: Props) { - const { open, title, dropzoneText, onClose, onImport } = props; + const { open, title, dropzoneText, accept, onClose, onImport } = props; const [t] = useTranslation(); const [isUploading, setIsUploading] = useState(false); const [uploadProgress, setUploadProgress] = useState(-1); @@ -92,7 +93,12 @@ function ImportDialog(props: Props) { value={uploadProgress} /> ) : ( - + {({ getRootProps, getInputProps }) => (