Skip to content

Commit

Permalink
rebase from dev
Browse files Browse the repository at this point in the history
  • Loading branch information
MartinBelthle committed May 17, 2024
2 parents 96de0e2 + 428961e commit 3bb9d7b
Show file tree
Hide file tree
Showing 188 changed files with 18,901 additions and 4,734 deletions.
4 changes: 2 additions & 2 deletions antarest/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@

# Standard project metadata

__version__ = "2.16.7"
__version__ = "2.17"
__author__ = "RTE, Antares Web Team"
__date__ = "2024-03-05"
__date__ = "2024-05-15"
# noinspection SpellCheckingInspection
__credits__ = "(c) Réseau de Transport de l’Électricité (RTE)"

Expand Down
3 changes: 1 addition & 2 deletions antarest/core/configdata/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
from typing import Any, Optional

from pydantic import BaseModel
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, Sequence, String # type: ignore
from sqlalchemy.orm import relationship # type: ignore
from sqlalchemy import Column, Integer, String # type: ignore

from antarest.core.persistence import Base

Expand Down
88 changes: 73 additions & 15 deletions antarest/core/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import re
import typing as t
from http import HTTPStatus
from typing import Any, Optional

from fastapi.exceptions import HTTPException

Expand Down Expand Up @@ -158,6 +158,17 @@ def __str__(self) -> str:
return self.detail


class DuplicateSTStorageId(HTTPException):
"""Exception raised when trying to create a short-term storage with an already existing id."""

def __init__(self, study_id: str, area_id: str, st_storage_id: str) -> None:
detail = f"Short term storage '{st_storage_id}' already exists in area '{area_id}'"
super().__init__(HTTPStatus.CONFLICT, detail)

def __str__(self) -> str:
return self.detail


class ThermalClusterMatrixNotFound(MatrixNotFound):
"""Matrix of the thermal cluster is not found (404 Not Found)"""

Expand Down Expand Up @@ -304,7 +315,7 @@ def __init__(self) -> None:


class StudyDeletionNotAllowed(HTTPException):
def __init__(self, uuid: str, message: Optional[str] = None) -> None:
def __init__(self, uuid: str, message: t.Optional[str] = None) -> None:
msg = f"Study {uuid} (not managed) is not allowed to be deleted"
if message:
msg += f"\n{message}"
Expand Down Expand Up @@ -355,7 +366,7 @@ def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.BAD_REQUEST, message)


class BindingConstraintNotFoundError(HTTPException):
class BindingConstraintNotFound(HTTPException):
def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.NOT_FOUND, message)

Expand All @@ -365,11 +376,6 @@ def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.NOT_FOUND, message)


class ConstraintAlreadyExistError(HTTPException):
def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.NOT_FOUND, message)


class DuplicateConstraintName(HTTPException):
def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.CONFLICT, message)
Expand All @@ -385,20 +391,72 @@ def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message)


class IncoherenceBetweenMatricesLength(HTTPException):
def __init__(self, detail: Any) -> None:
super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, detail)
class MatrixWidthMismatchError(HTTPException):
def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message)


class MissingDataError(HTTPException):
class WrongMatrixHeightError(HTTPException):
def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.NOT_FOUND, message)
super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message)


class ConstraintIdNotFoundError(HTTPException):
def __init__(self, message: str) -> None:
class ConstraintTermNotFound(HTTPException):
"""
Exception raised when a constraint term is not found.
"""

def __init__(self, binding_constraint_id: str, *ids: str) -> None:
count = len(ids)
id_enum = ", ".join(f"'{term}'" for term in ids)
message = {
0: f"Constraint terms not found in BC '{binding_constraint_id}'",
1: f"Constraint term {id_enum} not found in BC '{binding_constraint_id}'",
2: f"Constraint terms {id_enum} not found in BC '{binding_constraint_id}'",
}[min(count, 2)]
super().__init__(HTTPStatus.NOT_FOUND, message)

def __str__(self) -> str:
"""Return a string representation of the exception."""
return self.detail


class DuplicateConstraintTerm(HTTPException):
"""
Exception raised when an attempt is made to create a constraint term which already exists.
"""

def __init__(self, binding_constraint_id: str, *ids: str) -> None:
count = len(ids)
id_enum = ", ".join(f"'{term}'" for term in ids)
message = {
0: f"Constraint terms already exist in BC '{binding_constraint_id}'",
1: f"Constraint term {id_enum} already exists in BC '{binding_constraint_id}'",
2: f"Constraint terms {id_enum} already exist in BC '{binding_constraint_id}'",
}[min(count, 2)]
super().__init__(HTTPStatus.CONFLICT, message)

def __str__(self) -> str:
"""Return a string representation of the exception."""
return self.detail


class InvalidConstraintTerm(HTTPException):
"""
Exception raised when a constraint term is not correctly specified (no term data).
"""

def __init__(self, binding_constraint_id: str, term_json: str) -> None:
message = (
f"Invalid constraint term for binding constraint '{binding_constraint_id}': {term_json},"
f" term 'data' is missing or empty"
)
super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message)

def __str__(self) -> str:
"""Return a string representation of the exception."""
return self.detail


class LayerNotFound(HTTPException):
def __init__(self) -> None:
Expand Down
1 change: 0 additions & 1 deletion antarest/core/filesystem_blueprint.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
from starlette.responses import PlainTextResponse, StreamingResponse

from antarest.core.config import Config
from antarest.core.jwt import JWTUser
from antarest.core.utils.web import APITag
from antarest.login.auth import Auth

Expand Down
4 changes: 2 additions & 2 deletions antarest/launcher/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ def get_solver_versions(
"value": "local",
},
},
)
),
) -> List[str]:
"""
Get list of supported solver versions defined in the configuration.
Expand Down Expand Up @@ -268,7 +268,7 @@ def get_nb_cores(
"value": "local",
},
},
)
),
) -> Dict[str, int]:
"""
Retrieve the numer of cores of the launcher.
Expand Down
2 changes: 1 addition & 1 deletion antarest/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ class PathType:
from antarest.main import PathType
parser = argparse.ArgumentParser()
parser.add_argument('--input', type=PathType(file_ok=True, exists=True))
parser.add_argument("--input", type=PathType(file_ok=True, exists=True))
args = parser.parse_args()
print(args.input)
Expand Down
1 change: 0 additions & 1 deletion antarest/matrixstore/main.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from typing import Optional

from fastapi import FastAPI
from fastapi_jwt_auth.exceptions import AuthJWTException # type: ignore

from antarest.core.config import Config
from antarest.core.filetransfer.service import FileTransferManager
Expand Down
4 changes: 2 additions & 2 deletions antarest/matrixstore/repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@
import numpy as np
from filelock import FileLock
from numpy import typing as npt
from sqlalchemy import and_, exists # type: ignore
from sqlalchemy.orm import Session, aliased # type: ignore
from sqlalchemy import exists # type: ignore
from sqlalchemy.orm import Session # type: ignore

from antarest.core.utils.fastapi_sqlalchemy import db
from antarest.matrixstore.model import Matrix, MatrixContent, MatrixData, MatrixDataSet
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/business/advanced_parameters_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def check_accuracy_on_correlation(cls, v: str) -> str:
return ""

allowed_values = ["wind", "load", "solar"]
values_list = re.split("\s*,\s*", v.strip())
values_list = re.split(r"\s*,\s*", v.strip())

if len(values_list) != len(set(values_list)):
raise ValueError("Duplicate value")
Expand Down
94 changes: 94 additions & 0 deletions antarest/study/business/all_optional_meta.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
import typing as t

import pydantic.fields
import pydantic.main
from pydantic import BaseModel

from antarest.core.utils.string import to_camel_case


class AllOptionalMetaclass(pydantic.main.ModelMetaclass):
"""
Metaclass that makes all fields of a Pydantic model optional.
Usage:
class MyModel(BaseModel, metaclass=AllOptionalMetaclass):
field1: str
field2: int
...
Instances of the model can be created even if not all fields are provided during initialization.
Default values, when provided, are used unless `use_none` is set to `True`.
"""

def __new__(
cls: t.Type["AllOptionalMetaclass"],
name: str,
bases: t.Tuple[t.Type[t.Any], ...],
namespaces: t.Dict[str, t.Any],
use_none: bool = False,
**kwargs: t.Dict[str, t.Any],
) -> t.Any:
"""
Create a new instance of the metaclass.
Args:
name: Name of the class to create.
bases: Base classes of the class to create (a Pydantic model).
namespaces: namespace of the class to create that defines the fields of the model.
use_none: If `True`, the default value of the fields is set to `None`.
Note that this field is not part of the Pydantic model, but it is an extension.
**kwargs: Additional keyword arguments used by the metaclass.
"""
# Modify the annotations of the class (but not of the ancestor classes)
# in order to make all fields optional.
# If the current model inherits from another model, the annotations of the ancestor models
# are not modified, because the fields are already converted to `ModelField`.
annotations = namespaces.get("__annotations__", {})
for field_name, field_type in annotations.items():
if not field_name.startswith("__"):
# Making already optional fields optional is not a problem (nothing is changed).
annotations[field_name] = t.Optional[field_type]
namespaces["__annotations__"] = annotations

if use_none:
# Modify the namespace fields to set their default value to `None`.
for field_name, field_info in namespaces.items():
if isinstance(field_info, pydantic.fields.FieldInfo):
field_info.default = None
field_info.default_factory = None

# Create the class: all annotations are converted into `ModelField`.
instance = super().__new__(cls, name, bases, namespaces, **kwargs)

# Modify the inherited fields of the class to make them optional
# and set their default value to `None`.
model_field: pydantic.fields.ModelField
for field_name, model_field in instance.__fields__.items():
model_field.required = False
model_field.allow_none = True
if use_none:
model_field.default = None
model_field.default_factory = None
model_field.field_info.default = None

return instance


MODEL = t.TypeVar("MODEL", bound=t.Type[BaseModel])


def camel_case_model(model: MODEL) -> MODEL:
"""
This decorator can be used to modify a model to use camel case aliases.
Args:
model: The pydantic model to modify.
Returns:
The modified model.
"""
model.__config__.alias_generator = to_camel_case
for field_name, field in model.__fields__.items():
field.alias = to_camel_case(field_name)
return model
Loading

0 comments on commit 3bb9d7b

Please sign in to comment.