diff --git a/.codecov.yml b/.codecov.yml index 02666df0a13..f4a4f9cbcf4 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -10,10 +10,10 @@ flag_management: statuses: - type: project target: auto - threshold: 1% + threshold: 5% - type: patch target: auto - threshold: 1% + threshold: 5% component_management: @@ -22,7 +22,7 @@ component_management: statuses: - type: project target: auto - threshold: 1% + threshold: 5% branches: - "!master" individual_components: @@ -116,12 +116,12 @@ coverage: project: default: informational: true - threshold: 1% + threshold: 5% patch: default: informational: true - threshold: 1% + threshold: 5% comment: layout: "header,diff,flags,components,footer" diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index aa1efbee7a9..789c552cc81 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -772,7 +772,7 @@ jobs: if: ${{ !cancelled() }} run: ./ci/github/unit-testing/catalog.bash test - name: upload failed tests logs - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_docker_logs @@ -879,7 +879,7 @@ jobs: if: ${{ !cancelled() }} run: ./ci/github/unit-testing/datcore-adapter.bash test - name: upload failed tests logs - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_docker_logs @@ -930,7 +930,7 @@ jobs: if: ${{ !cancelled() }} run: ./ci/github/unit-testing/director.bash test - name: upload failed tests logs - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_docker_logs @@ -981,7 +981,7 @@ jobs: if: ${{ !cancelled() }} run: ./ci/github/unit-testing/director-v2.bash test - name: upload failed tests logs - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_docker_logs @@ -1910,7 +1910,7 @@ jobs: - name: test run: ./ci/github/integration-testing/webserver.bash test 01 - name: upload failed tests logs - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_docker_logs @@ -1974,7 +1974,7 @@ jobs: - name: test run: ./ci/github/integration-testing/webserver.bash test 02 - name: upload failed tests logs - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_docker_logs @@ -2038,7 +2038,7 @@ jobs: - name: test run: ./ci/github/integration-testing/director-v2.bash test 01 - name: upload failed tests logs - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_docker_logs @@ -2111,7 +2111,7 @@ jobs: - name: test run: ./ci/github/integration-testing/director-v2.bash test 02 - name: upload failed tests logs - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_docker_logs @@ -2177,7 +2177,7 @@ jobs: - name: test run: ./ci/github/integration-testing/dynamic-sidecar.bash test 01 - name: upload failed tests logs - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_docker_logs @@ -2241,7 +2241,7 @@ jobs: - name: test run: ./ci/github/integration-testing/simcore-sdk.bash test - name: upload failed tests logs - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_docker_logs @@ -2330,7 +2330,7 @@ jobs: - name: test run: ./ci/github/system-testing/public-api.bash test - name: upload failed tests logs - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_docker_logs @@ -2395,7 +2395,7 @@ jobs: name: ${{ github.job }}_services_settings_schemas path: ./services/**/settings-schema.json - name: upload failed tests logs - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_docker_logs diff --git a/api/specs/web-server/_admin.py b/api/specs/web-server/_admin.py index 767661a0dfc..87c72ce371f 100644 --- a/api/specs/web-server/_admin.py +++ b/api/specs/web-server/_admin.py @@ -28,7 +28,7 @@ response_model=Envelope[Union[EmailTestFailed, EmailTestPassed]], ) async def test_email( - _test: TestEmail, x_simcore_products_name: str | None = Header(default=None) + _body: TestEmail, x_simcore_products_name: str | None = Header(default=None) ): # X-Simcore-Products-Name ... diff --git a/api/specs/web-server/_groups.py b/api/specs/web-server/_groups.py index 530460c6d8c..6f0f1f1e616 100644 --- a/api/specs/web-server/_groups.py +++ b/api/specs/web-server/_groups.py @@ -4,6 +4,7 @@ # pylint: disable=too-many-arguments +from enum import Enum from typing import Annotated, Any from fastapi import APIRouter, Depends, status @@ -87,19 +88,24 @@ async def delete_group(_path: Annotated[GroupsPathParams, Depends()]): """ +_extra_tags: list[str | Enum] = ["users"] + + @router.get( "/groups/{gid}/users", response_model=Envelope[list[GroupUserGet]], + tags=_extra_tags, ) async def get_all_group_users(_path: Annotated[GroupsPathParams, Depends()]): """ - Gets users in organization groups + Gets users in organization or primary groups """ @router.post( "/groups/{gid}/users", status_code=status.HTTP_204_NO_CONTENT, + tags=_extra_tags, ) async def add_group_user( _path: Annotated[GroupsPathParams, Depends()], @@ -113,6 +119,7 @@ async def add_group_user( @router.get( "/groups/{gid}/users/{uid}", response_model=Envelope[GroupUserGet], + tags=_extra_tags, ) async def get_group_user( _path: Annotated[GroupsUsersPathParams, Depends()], @@ -125,6 +132,7 @@ async def get_group_user( @router.patch( "/groups/{gid}/users/{uid}", response_model=Envelope[GroupUserGet], + tags=_extra_tags, ) async def update_group_user( _path: Annotated[GroupsUsersPathParams, Depends()], @@ -138,6 +146,7 @@ async def update_group_user( @router.delete( "/groups/{gid}/users/{uid}", status_code=status.HTTP_204_NO_CONTENT, + tags=_extra_tags, ) async def delete_group_user( _path: Annotated[GroupsUsersPathParams, Depends()], diff --git a/api/specs/web-server/_licensed_items_checkouts.py b/api/specs/web-server/_licensed_items_checkouts.py new file mode 100644 index 00000000000..cfc51a7c424 --- /dev/null +++ b/api/specs/web-server/_licensed_items_checkouts.py @@ -0,0 +1,57 @@ +""" Helper script to generate OAS automatically +""" + +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +from typing import Annotated + +from _common import as_query +from fastapi import APIRouter, Depends +from models_library.api_schemas_webserver.licensed_items_purchases import ( + LicensedItemPurchaseGet, +) +from models_library.generics import Envelope +from models_library.rest_error import EnvelopedError +from models_library.rest_pagination import Page +from simcore_service_webserver._meta import API_VTAG +from simcore_service_webserver.licenses._exceptions_handlers import _TO_HTTP_ERROR_MAP +from simcore_service_webserver.licenses._licensed_items_checkouts_models import ( + LicensedItemCheckoutPathParams, + LicensedItemsCheckoutsListQueryParams, +) +from simcore_service_webserver.wallets._handlers import WalletsPathParams + +router = APIRouter( + prefix=f"/{API_VTAG}", + tags=[ + "licenses", + ], + responses={ + i.status_code: {"model": EnvelopedError} for i in _TO_HTTP_ERROR_MAP.values() + }, +) + + +@router.get( + "/wallets/{wallet_id}/licensed-items-checkouts", + response_model=Page[LicensedItemPurchaseGet], + tags=["wallets"], +) +async def list_licensed_item_checkouts_for_wallet( + _path: Annotated[WalletsPathParams, Depends()], + _query: Annotated[as_query(LicensedItemsCheckoutsListQueryParams), Depends()], +): + ... + + +@router.get( + "/licensed-items-checkouts/{licensed_item_checkout_id}", + response_model=Envelope[LicensedItemPurchaseGet], +) +async def get_licensed_item_checkout( + _path: Annotated[LicensedItemCheckoutPathParams, Depends()], +): + ... diff --git a/api/specs/web-server/_users.py b/api/specs/web-server/_users.py index cb1904f3bb7..89d5eaaba2f 100644 --- a/api/specs/web-server/_users.py +++ b/api/specs/web-server/_users.py @@ -4,32 +4,35 @@ # pylint: disable=too-many-arguments +from enum import Enum from typing import Annotated from fastapi import APIRouter, Depends, status -from models_library.api_schemas_webserver.users import MyProfileGet, MyProfilePatch +from models_library.api_schemas_webserver.users import ( + MyPermissionGet, + MyProfileGet, + MyProfilePatch, + MyTokenCreate, + MyTokenGet, + UserForAdminGet, + UserGet, + UsersForAdminSearchQueryParams, + UsersSearch, +) from models_library.api_schemas_webserver.users_preferences import PatchRequestBody from models_library.generics import Envelope from models_library.user_preferences import PreferenceIdentifier from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.users._handlers import PreUserProfile, _SearchQueryParams +from simcore_service_webserver.users._common.schemas import PreRegisteredUserGet from simcore_service_webserver.users._notifications import ( UserNotification, UserNotificationCreate, UserNotificationPatch, ) -from simcore_service_webserver.users._notifications_handlers import ( - _NotificationPathParams, -) -from simcore_service_webserver.users._schemas import UserProfile -from simcore_service_webserver.users._tokens_handlers import _TokenPathParams -from simcore_service_webserver.users.schemas import ( - PermissionGet, - ThirdPartyToken, - TokenCreate, -) +from simcore_service_webserver.users._notifications_rest import _NotificationPathParams +from simcore_service_webserver.users._tokens_rest import _TokenPathParams -router = APIRouter(prefix=f"/{API_VTAG}", tags=["user"]) +router = APIRouter(prefix=f"/{API_VTAG}", tags=["users"]) @router.get( @@ -44,7 +47,7 @@ async def get_my_profile(): "/me", status_code=status.HTTP_204_NO_CONTENT, ) -async def update_my_profile(_profile: MyProfilePatch): +async def update_my_profile(_body: MyProfilePatch): ... @@ -54,7 +57,7 @@ async def update_my_profile(_profile: MyProfilePatch): deprecated=True, description="Use PATCH instead", ) -async def replace_my_profile(_profile: MyProfilePatch): +async def replace_my_profile(_body: MyProfilePatch): ... @@ -63,15 +66,15 @@ async def replace_my_profile(_profile: MyProfilePatch): status_code=status.HTTP_204_NO_CONTENT, ) async def set_frontend_preference( - preference_id: PreferenceIdentifier, # noqa: ARG001 - body_item: PatchRequestBody, # noqa: ARG001 + preference_id: PreferenceIdentifier, + _body: PatchRequestBody, ): ... @router.get( "/me/tokens", - response_model=Envelope[list[ThirdPartyToken]], + response_model=Envelope[list[MyTokenGet]], ) async def list_tokens(): ... @@ -79,18 +82,20 @@ async def list_tokens(): @router.post( "/me/tokens", - response_model=Envelope[ThirdPartyToken], + response_model=Envelope[MyTokenGet], status_code=status.HTTP_201_CREATED, ) -async def create_token(_token: TokenCreate): +async def create_token(_body: MyTokenCreate): ... @router.get( "/me/tokens/{service}", - response_model=Envelope[ThirdPartyToken], + response_model=Envelope[MyTokenGet], ) -async def get_token(_params: Annotated[_TokenPathParams, Depends()]): +async def get_token( + _path: Annotated[_TokenPathParams, Depends()], +): ... @@ -98,7 +103,7 @@ async def get_token(_params: Annotated[_TokenPathParams, Depends()]): "/me/tokens/{service}", status_code=status.HTTP_204_NO_CONTENT, ) -async def delete_token(_params: Annotated[_TokenPathParams, Depends()]): +async def delete_token(_path: Annotated[_TokenPathParams, Depends()]): ... @@ -114,7 +119,9 @@ async def list_user_notifications(): "/me/notifications", status_code=status.HTTP_204_NO_CONTENT, ) -async def create_user_notification(_notification: UserNotificationCreate): +async def create_user_notification( + _body: UserNotificationCreate, +): ... @@ -123,38 +130,57 @@ async def create_user_notification(_notification: UserNotificationCreate): status_code=status.HTTP_204_NO_CONTENT, ) async def mark_notification_as_read( - _params: Annotated[_NotificationPathParams, Depends()], - _notification: UserNotificationPatch, + _path: Annotated[_NotificationPathParams, Depends()], + _body: UserNotificationPatch, ): ... @router.get( "/me/permissions", - response_model=Envelope[list[PermissionGet]], + response_model=Envelope[list[MyPermissionGet]], ) async def list_user_permissions(): ... -@router.get( +# +# USERS public +# + + +@router.post( "/users:search", - response_model=Envelope[list[UserProfile]], - tags=[ - "po", - ], + response_model=Envelope[list[UserGet]], + description="Search among users who are publicly visible to the caller (i.e., me) based on their privacy settings.", ) -async def search_users(_params: Annotated[_SearchQueryParams, Depends()]): +async def search_users(_body: UsersSearch): + ... + + +# +# USERS admin +# + +_extra_tags: list[str | Enum] = ["admin"] + + +@router.get( + "/admin/users:search", + response_model=Envelope[list[UserForAdminGet]], + tags=_extra_tags, +) +async def search_users_for_admin( + _query: Annotated[UsersForAdminSearchQueryParams, Depends()] +): # NOTE: see `Search` in `Common Custom Methods` in https://cloud.google.com/apis/design/custom_methods ... @router.post( - "/users:pre-register", - response_model=Envelope[UserProfile], - tags=[ - "po", - ], + "/admin/users:pre-register", + response_model=Envelope[UserForAdminGet], + tags=_extra_tags, ) -async def pre_register_user(_body: PreUserProfile): +async def pre_register_user_for_admin(_body: PreRegisteredUserGet): ... diff --git a/api/specs/web-server/openapi.py b/api/specs/web-server/openapi.py index 6c3bc639fb4..cfcaf183591 100644 --- a/api/specs/web-server/openapi.py +++ b/api/specs/web-server/openapi.py @@ -38,6 +38,7 @@ "_long_running_tasks", "_licensed_items", "_licensed_items_purchases", + "_licensed_items_checkouts", "_metamodeling", "_nih_sparc", "_nih_sparc_redirections", diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/dict_tools.py b/packages/common-library/src/common_library/dict_tools.py similarity index 63% rename from packages/pytest-simcore/src/pytest_simcore/helpers/dict_tools.py rename to packages/common-library/src/common_library/dict_tools.py index b31123d5ff5..43ef7166308 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/dict_tools.py +++ b/packages/common-library/src/common_library/dict_tools.py @@ -1,9 +1,17 @@ -""" Utils to operate with dicts """ +""" A collection of free functions to manipulate dicts +""" -from copy import deepcopy -from typing import Any, Mapping +from collections.abc import Mapping +from copy import copy, deepcopy +from typing import Any -ConfigDict = dict[str, Any] + +def remap_keys(data: dict, rename: dict[str, str]) -> dict[str, Any]: + """A new dict that renames the keys of a dict while keeping the values unchanged + + NOTE: Does not support renaming of nested keys + """ + return {rename.get(k, k): v for k, v in data.items()} def get_from_dict(obj: Mapping[str, Any], dotted_key: str, default=None) -> Any: @@ -28,10 +36,10 @@ def copy_from_dict( # if include is None: - return deepcopy(data) if deep else data.copy() + return deepcopy(data) if deep else copy(data) if include == ...: - return deepcopy(data) if deep else data.copy() + return deepcopy(data) if deep else copy(data) if isinstance(include, set): return {key: data[key] for key in include} @@ -46,7 +54,7 @@ def copy_from_dict( def update_dict(obj: dict, **updates): for key, update_value in updates.items(): - if callable(update_value): - update_value = update_value(obj[key]) - obj.update({key: update_value}) + obj.update( + {key: update_value(obj[key]) if callable(update_value) else update_value} + ) return obj diff --git a/packages/common-library/src/common_library/unset.py b/packages/common-library/src/common_library/exclude.py similarity index 64% rename from packages/common-library/src/common_library/unset.py rename to packages/common-library/src/common_library/exclude.py index 3d4dfcbc947..6f635dfe643 100644 --- a/packages/common-library/src/common_library/unset.py +++ b/packages/common-library/src/common_library/exclude.py @@ -10,3 +10,7 @@ class UnSet: def as_dict_exclude_unset(**params) -> dict[str, Any]: return {k: v for k, v in params.items() if not isinstance(v, UnSet)} + + +def as_dict_exclude_none(**params) -> dict[str, Any]: + return {k: v for k, v in params.items() if v is not None} diff --git a/packages/common-library/src/common_library/users_enums.py b/packages/common-library/src/common_library/users_enums.py new file mode 100644 index 00000000000..7ebe4a617e9 --- /dev/null +++ b/packages/common-library/src/common_library/users_enums.py @@ -0,0 +1,59 @@ +from enum import Enum +from functools import total_ordering + +_USER_ROLE_TO_LEVEL = { + "ANONYMOUS": 0, + "GUEST": 10, + "USER": 20, + "TESTER": 30, + "PRODUCT_OWNER": 40, + "ADMIN": 100, +} + + +@total_ordering +class UserRole(Enum): + """SORTED enumeration of user roles + + A role defines a set of privileges the user can perform + Roles are sorted from lower to highest privileges + USER is the role assigned by default A user with a higher/lower role is denoted super/infra user + + ANONYMOUS : The user is not logged in + GUEST : Temporary user with very limited access. Main used for demos and for a limited amount of time + USER : Registered user. Basic permissions to use the platform [default] + TESTER : Upgraded user. First level of super-user with privileges to test the framework. + Can use everything but does not have an effect in other users or actual data + ADMIN : Framework admin. + + See security_access.py + """ + + ANONYMOUS = "ANONYMOUS" + GUEST = "GUEST" + USER = "USER" + TESTER = "TESTER" + PRODUCT_OWNER = "PRODUCT_OWNER" + ADMIN = "ADMIN" + + @property + def privilege_level(self) -> int: + return _USER_ROLE_TO_LEVEL[self.name] + + def __lt__(self, other: "UserRole") -> bool: + if self.__class__ is other.__class__: + return self.privilege_level < other.privilege_level + return NotImplemented + + +class UserStatus(str, Enum): + # This is a transition state. The user is registered but not confirmed. NOTE that state is optional depending on LOGIN_REGISTRATION_CONFIRMATION_REQUIRED + CONFIRMATION_PENDING = "CONFIRMATION_PENDING" + # This user can now operate the platform + ACTIVE = "ACTIVE" + # This user is inactive because it expired after a trial period + EXPIRED = "EXPIRED" + # This user is inactive because he has been a bad boy + BANNED = "BANNED" + # This user is inactive because it was marked for deletion + DELETED = "DELETED" diff --git a/packages/pytest-simcore/tests/test_helpers_utils_dict.py b/packages/common-library/tests/test_dict_tools.py similarity index 89% rename from packages/pytest-simcore/tests/test_helpers_utils_dict.py rename to packages/common-library/tests/test_dict_tools.py index 9fa34442a99..fb374ff1791 100644 --- a/packages/pytest-simcore/tests/test_helpers_utils_dict.py +++ b/packages/common-library/tests/test_dict_tools.py @@ -3,16 +3,19 @@ # pylint: disable=unused-variable -import json -import sys +from typing import Any import pytest -from pytest_simcore.helpers.dict_tools import copy_from_dict, get_from_dict -from pytest_simcore.helpers.typing_docker import TaskDict +from common_library.dict_tools import ( + copy_from_dict, + get_from_dict, + remap_keys, + update_dict, +) @pytest.fixture -def data(): +def data() -> dict[str, Any]: return { "ID": "3ifd79yhz2vpgu1iz43mf9m2d", "Version": {"Index": 176}, @@ -113,7 +116,20 @@ def data(): } -def test_get_from_dict(data: TaskDict): +def test_remap_keys(): + assert remap_keys({"a": 1, "b": 2}, rename={"a": "A"}) == {"A": 1, "b": 2} + + +def test_update_dict(): + def _increment(x): + return x + 1 + + data = {"a": 1, "b": 2, "c": 3} + + assert update_dict(data, a=_increment, b=42) == {"a": 2, "b": 42, "c": 3} + + +def test_get_from_dict(data: dict[str, Any]): assert get_from_dict(data, "Spec.ContainerSpec.Labels") == { "com.docker.stack.namespace": "master-simcore" @@ -122,7 +138,7 @@ def test_get_from_dict(data: TaskDict): assert get_from_dict(data, "Invalid.Invalid.Invalid", default=42) == 42 -def test_copy_from_dict(data: TaskDict): +def test_copy_from_dict(data: dict[str, Any]): selected_data = copy_from_dict( data, @@ -136,8 +152,6 @@ def test_copy_from_dict(data: TaskDict): }, ) - print(json.dumps(selected_data, indent=2)) - assert selected_data["ID"] == data["ID"] assert ( selected_data["Spec"]["ContainerSpec"]["Image"] @@ -145,11 +159,4 @@ def test_copy_from_dict(data: TaskDict): ) assert selected_data["Status"]["State"] == data["Status"]["State"] assert "Message" not in selected_data["Status"]["State"] - assert "Message" in data["Status"]["State"] - - -if __name__ == "__main__": - # NOTE: use in vscode "Run and Debug" -> select 'Python: Current File' - sys.exit( - pytest.main(["-vv", "-s", "--pdb", "--log-cli-level=WARNING", sys.argv[0]]) - ) + assert "running" in data["Status"]["State"] diff --git a/packages/common-library/tests/test_unset.py b/packages/common-library/tests/test_exclude.py similarity index 59% rename from packages/common-library/tests/test_unset.py rename to packages/common-library/tests/test_exclude.py index 0fece0d466c..78f5712161e 100644 --- a/packages/common-library/tests/test_unset.py +++ b/packages/common-library/tests/test_exclude.py @@ -1,6 +1,6 @@ from typing import Any -from common_library.unset import UnSet, as_dict_exclude_unset +from common_library.exclude import UnSet, as_dict_exclude_none, as_dict_exclude_unset def test_as_dict_exclude_unset(): @@ -13,3 +13,10 @@ def f( assert f(par1="hi") == {"par1": "hi"} assert f(par2=4) == {"par2": 4} assert f(par1="hi", par2=4) == {"par1": "hi", "par2": 4} + + # still expected behavior + assert as_dict_exclude_unset(par1=None) == {"par1": None} + + +def test_as_dict_exclude_none(): + assert as_dict_exclude_none(par1=None) == {} diff --git a/packages/common-library/tests/test_users_enums.py b/packages/common-library/tests/test_users_enums.py new file mode 100644 index 00000000000..e52d66b3f11 --- /dev/null +++ b/packages/common-library/tests/test_users_enums.py @@ -0,0 +1,79 @@ +# pylint: disable=no-value-for-parameter +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +from common_library.users_enums import _USER_ROLE_TO_LEVEL, UserRole + + +def test_user_role_to_level_map_in_sync(): + # If fails, then update _USER_ROLE_TO_LEVEL map + assert set(_USER_ROLE_TO_LEVEL.keys()) == set(UserRole.__members__.keys()) + + +def test_user_roles_compares_to_admin(): + assert UserRole.ANONYMOUS < UserRole.ADMIN + assert UserRole.GUEST < UserRole.ADMIN + assert UserRole.USER < UserRole.ADMIN + assert UserRole.TESTER < UserRole.ADMIN + assert UserRole.PRODUCT_OWNER < UserRole.ADMIN + assert UserRole.ADMIN == UserRole.ADMIN + + +def test_user_roles_compares_to_product_owner(): + assert UserRole.ANONYMOUS < UserRole.PRODUCT_OWNER + assert UserRole.GUEST < UserRole.PRODUCT_OWNER + assert UserRole.USER < UserRole.PRODUCT_OWNER + assert UserRole.TESTER < UserRole.PRODUCT_OWNER + assert UserRole.PRODUCT_OWNER == UserRole.PRODUCT_OWNER + assert UserRole.ADMIN > UserRole.PRODUCT_OWNER + + +def test_user_roles_compares_to_tester(): + assert UserRole.ANONYMOUS < UserRole.TESTER + assert UserRole.GUEST < UserRole.TESTER + assert UserRole.USER < UserRole.TESTER + assert UserRole.TESTER == UserRole.TESTER + assert UserRole.PRODUCT_OWNER > UserRole.TESTER + assert UserRole.ADMIN > UserRole.TESTER + + +def test_user_roles_compares_to_user(): + assert UserRole.ANONYMOUS < UserRole.USER + assert UserRole.GUEST < UserRole.USER + assert UserRole.USER == UserRole.USER + assert UserRole.TESTER > UserRole.USER + assert UserRole.PRODUCT_OWNER > UserRole.USER + assert UserRole.ADMIN > UserRole.USER + + +def test_user_roles_compares_to_guest(): + assert UserRole.ANONYMOUS < UserRole.GUEST + assert UserRole.GUEST == UserRole.GUEST + assert UserRole.USER > UserRole.GUEST + assert UserRole.TESTER > UserRole.GUEST + assert UserRole.PRODUCT_OWNER > UserRole.GUEST + assert UserRole.ADMIN > UserRole.GUEST + + +def test_user_roles_compares_to_anonymous(): + assert UserRole.ANONYMOUS == UserRole.ANONYMOUS + assert UserRole.GUEST > UserRole.ANONYMOUS + assert UserRole.USER > UserRole.ANONYMOUS + assert UserRole.TESTER > UserRole.ANONYMOUS + assert UserRole.PRODUCT_OWNER > UserRole.ANONYMOUS + assert UserRole.ADMIN > UserRole.ANONYMOUS + + +def test_user_roles_compares(): + # < and > + assert UserRole.TESTER < UserRole.ADMIN + assert UserRole.ADMIN > UserRole.TESTER + + # >=, == and <= + assert UserRole.TESTER <= UserRole.ADMIN + assert UserRole.ADMIN >= UserRole.TESTER + + assert UserRole.ADMIN <= UserRole.ADMIN + assert UserRole.ADMIN == UserRole.ADMIN diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py index 151611271a4..d26acac0490 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py @@ -79,3 +79,5 @@ class DynamicServiceCreate(ServiceDetails): class GetProjectInactivityResponse(BaseModel): is_inactive: bool + + model_config = ConfigDict(json_schema_extra={"example": {"is_inactive": "false"}}) diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/services.py b/packages/models-library/src/models_library/api_schemas_directorv2/services.py index c797c687fd1..3d2fb51f302 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/services.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/services.py @@ -103,3 +103,7 @@ class ServiceExtras(BaseModel): CHARS_IN_VOLUME_NAME_BEFORE_DIR_NAME: Final[NonNegativeInt] = 89 + + +DYNAMIC_SIDECAR_SERVICE_PREFIX: Final[str] = "dy-sidecar" +DYNAMIC_PROXY_SERVICE_PREFIX: Final[str] = "dy-proxy" diff --git a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/licensed_items_checkouts.py b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/licensed_items_checkouts.py new file mode 100644 index 00000000000..f14117b3439 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/licensed_items_checkouts.py @@ -0,0 +1,47 @@ +from datetime import datetime +from typing import NamedTuple + +from models_library.licensed_items import LicensedItemID +from models_library.products import ProductName +from models_library.resource_tracker_licensed_items_checkouts import ( + LicensedItemCheckoutID, +) +from models_library.services_types import ServiceRunID +from models_library.users import UserID +from models_library.wallets import WalletID +from pydantic import BaseModel, ConfigDict, PositiveInt + + +class LicensedItemCheckoutGet(BaseModel): + licensed_item_checkout_id: LicensedItemCheckoutID + licensed_item_id: LicensedItemID + wallet_id: WalletID + user_id: UserID + product_name: ProductName + service_run_id: ServiceRunID + started_at: datetime + stopped_at: datetime | None + num_of_seats: int + + model_config = ConfigDict( + json_schema_extra={ + "examples": [ + { + "licensed_item_checkout_id": "beb16d18-d57d-44aa-a638-9727fa4a72ef", + "licensed_item_id": "303942ef-6d31-4ba8-afbe-dbb1fce2a953", + "wallet_id": 1, + "user_id": 1, + "product_name": "osparc", + "service_run_id": "run_1", + "started_at": "2023-01-11 13:11:47.293595", + "stopped_at": "2023-01-11 13:11:47.293595", + "num_of_seats": 1, + } + ] + } + ) + + +class LicensedItemsCheckoutsPage(NamedTuple): + items: list[LicensedItemCheckoutGet] + total: PositiveInt diff --git a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/service_runs.py b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/service_runs.py index 72001f8b550..e16ba7ce108 100644 --- a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/service_runs.py +++ b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/service_runs.py @@ -6,14 +6,15 @@ from ..projects import ProjectID from ..projects_nodes_io import NodeID -from ..resource_tracker import CreditTransactionStatus, ServiceRunId, ServiceRunStatus +from ..resource_tracker import CreditTransactionStatus, ServiceRunStatus from ..services import ServiceKey, ServiceVersion +from ..services_types import ServiceRunID from ..users import UserID from ..wallets import WalletID class ServiceRunGet(BaseModel): - service_run_id: ServiceRunId + service_run_id: ServiceRunID wallet_id: WalletID | None wallet_name: str | None user_id: UserID diff --git a/packages/models-library/src/models_library/api_schemas_webserver/_base.py b/packages/models-library/src/models_library/api_schemas_webserver/_base.py index 948c4c9b3ea..a5eaa42c006 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/_base.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/_base.py @@ -29,6 +29,14 @@ class InputSchema(BaseModel): ) +class OutputSchemaWithoutCamelCase(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + extra="ignore", + frozen=True, + ) + + class OutputSchema(BaseModel): model_config = ConfigDict( alias_generator=snake_to_camel, diff --git a/packages/models-library/src/models_library/api_schemas_webserver/groups.py b/packages/models-library/src/models_library/api_schemas_webserver/groups.py index 3b2b77199fb..ec9738044b4 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/groups.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/groups.py @@ -1,8 +1,8 @@ from contextlib import suppress -from typing import Annotated, Any, Self, TypeVar +from typing import Annotated, Self, TypeVar from common_library.basic_types import DEFAULT_FACTORY -from models_library.groups import EVERYONE_GROUP_ID +from common_library.dict_tools import remap_keys from pydantic import ( AnyHttpUrl, AnyUrl, @@ -14,27 +14,26 @@ field_validator, model_validator, ) +from pydantic.config import JsonDict from ..emails import LowerCaseEmailStr from ..groups import ( + EVERYONE_GROUP_ID, AccessRightsDict, Group, GroupID, GroupMember, + GroupsByTypeTuple, StandardGroupCreate, StandardGroupUpdate, ) from ..users import UserID, UserNameID from ..utils.common_validators import create__check_only_one_is_set__root_validator -from ._base import InputSchema, OutputSchema +from ._base import InputSchema, OutputSchema, OutputSchemaWithoutCamelCase S = TypeVar("S", bound=BaseModel) -def _rename_keys(source: dict, name_map: dict[str, str]) -> dict[str, Any]: - return {name_map.get(k, k): v for k, v in source.items()} - - class GroupAccessRights(BaseModel): """ defines acesss rights for the user @@ -75,10 +74,10 @@ class GroupGet(OutputSchema): @classmethod def from_model(cls, group: Group, access_rights: AccessRightsDict) -> Self: - # Merges both service models into this schema + # Adapts these domain models into this schema return cls.model_validate( { - **_rename_keys( + **remap_keys( group.model_dump( include={ "gid", @@ -86,11 +85,13 @@ def from_model(cls, group: Group, access_rights: AccessRightsDict) -> Self: "description", "thumbnail", }, - exclude={"access_rights", "inclusion_rules"}, + exclude={ + "inclusion_rules", # deprecated + }, exclude_unset=True, by_alias=False, ), - name_map={ + rename={ "name": "label", }, ), @@ -98,38 +99,42 @@ def from_model(cls, group: Group, access_rights: AccessRightsDict) -> Self: } ) - model_config = ConfigDict( - json_schema_extra={ - "examples": [ - { - "gid": "27", - "label": "A user", - "description": "A very special user", - "thumbnail": "https://placekitten.com/10/10", - "accessRights": {"read": True, "write": False, "delete": False}, - }, - { - "gid": 1, - "label": "ITIS Foundation", - "description": "The Foundation for Research on Information Technologies in Society", - "accessRights": {"read": True, "write": False, "delete": False}, - }, - { - "gid": "1", - "label": "All", - "description": "Open to all users", - "accessRights": {"read": True, "write": True, "delete": True}, - }, - { - "gid": 5, - "label": "SPARCi", - "description": "Stimulating Peripheral Activity to Relieve Conditions", - "thumbnail": "https://placekitten.com/15/15", - "accessRights": {"read": True, "write": True, "delete": True}, - }, - ] - } - ) + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "gid": "27", + "label": "A user", + "description": "A very special user", + "thumbnail": "https://placekitten.com/10/10", + "accessRights": {"read": True, "write": False, "delete": False}, + }, + { + "gid": 1, + "label": "ITIS Foundation", + "description": "The Foundation for Research on Information Technologies in Society", + "accessRights": {"read": True, "write": False, "delete": False}, + }, + { + "gid": "1", + "label": "All", + "description": "Open to all users", + "accessRights": {"read": True, "write": True, "delete": True}, + }, + { + "gid": 5, + "label": "SPARCi", + "description": "Stimulating Peripheral Activity to Relieve Conditions", + "thumbnail": "https://placekitten.com/15/15", + "accessRights": {"read": True, "write": True, "delete": True}, + }, + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) @field_validator("thumbnail", mode="before") @classmethod @@ -147,14 +152,14 @@ class GroupCreate(InputSchema): thumbnail: AnyUrl | None = None def to_model(self) -> StandardGroupCreate: - data = _rename_keys( + data = remap_keys( self.model_dump( mode="json", # NOTE: intentionally inclusion_rules are not exposed to the REST api include={"label", "description", "thumbnail"}, exclude_unset=True, ), - name_map={"label": "name"}, + rename={"label": "name"}, ) return StandardGroupCreate(**data) @@ -165,14 +170,14 @@ class GroupUpdate(InputSchema): thumbnail: AnyUrl | None = None def to_model(self) -> StandardGroupUpdate: - data = _rename_keys( + data = remap_keys( self.model_dump( mode="json", # NOTE: intentionally inclusion_rules are not exposed to the REST api include={"label", "description", "thumbnail"}, exclude_unset=True, ), - name_map={"label": "name"}, + rename={"label": "name"}, ) return StandardGroupUpdate(**data) @@ -224,9 +229,26 @@ class MyGroupsGet(OutputSchema): } ) + @classmethod + def from_model( + cls, + groups_by_type: GroupsByTypeTuple, + my_product_group: tuple[Group, AccessRightsDict] | None, + ) -> Self: + assert groups_by_type.primary # nosec + assert groups_by_type.everyone # nosec + + return cls( + me=GroupGet.from_model(*groups_by_type.primary), + organizations=[GroupGet.from_model(*gi) for gi in groups_by_type.standard], + all=GroupGet.from_model(*groups_by_type.everyone), + product=GroupGet.from_model(*my_product_group) + if my_product_group + else None, + ) -class GroupUserGet(BaseModel): - # OutputSchema + +class GroupUserGet(OutputSchemaWithoutCamelCase): # Identifiers id: Annotated[UserID | None, Field(description="the user's id")] = None @@ -252,7 +274,14 @@ class GroupUserGet(BaseModel): ] = None # Access Rights - access_rights: GroupAccessRights = Field(..., alias="accessRights") + access_rights: Annotated[ + GroupAccessRights | None, + Field( + alias="accessRights", + description="If group is standard, these are these are the access rights of the user to it." + "None if primary group.", + ), + ] = None model_config = ConfigDict( populate_by_name=True, @@ -270,7 +299,23 @@ class GroupUserGet(BaseModel): "write": False, "delete": False, }, - } + }, + "examples": [ + # unique member on a primary group with two different primacy settings + { + "id": "16", + "userName": "mrprivate", + "gid": "55", + }, + { + "id": "56", + "userName": "mrpublic", + "login": "mrpublic@email.me", + "first_name": "Mr", + "last_name": "Public", + "gid": "42", + }, + ], }, ) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/licensed_items.py b/packages/models-library/src/models_library/api_schemas_webserver/licensed_items.py index 3455e8a81ac..5dafd9d5804 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/licensed_items.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/licensed_items.py @@ -11,6 +11,7 @@ class LicensedItemGet(OutputSchema): licensed_item_id: LicensedItemID name: str + license_key: str | None licensed_resource_type: LicensedResourceType pricing_plan_id: PricingPlanId created_at: datetime @@ -21,6 +22,7 @@ class LicensedItemGet(OutputSchema): { "licensed_item_id": "0362b88b-91f8-4b41-867c-35544ad1f7a1", "name": "best-model", + "license_key": "license-specific-key", "licensed_resource_type": f"{LicensedResourceType.VIP_MODEL}", "pricing_plan_id": "15", "created_at": "2024-12-12 09:59:26.422140", diff --git a/packages/models-library/src/models_library/api_schemas_webserver/licensed_items_checkouts.py b/packages/models-library/src/models_library/api_schemas_webserver/licensed_items_checkouts.py new file mode 100644 index 00000000000..c8fd22ce581 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_webserver/licensed_items_checkouts.py @@ -0,0 +1,48 @@ +from datetime import datetime +from typing import NamedTuple + +from pydantic import BaseModel, PositiveInt + +from ..licensed_items import LicensedItemID +from ..products import ProductName +from ..resource_tracker_licensed_items_checkouts import LicensedItemCheckoutID +from ..users import UserID +from ..wallets import WalletID +from ._base import OutputSchema + +# RPC + + +class LicensedItemCheckoutRpcGet(BaseModel): + licensed_item_checkout_id: LicensedItemCheckoutID + licensed_item_id: LicensedItemID + wallet_id: WalletID + user_id: UserID + product_name: ProductName + started_at: datetime + stopped_at: datetime | None + num_of_seats: int + + +class LicensedItemCheckoutRpcGetPage(NamedTuple): + items: list[LicensedItemCheckoutRpcGet] + total: PositiveInt + + +# Rest + + +class LicensedItemCheckoutRestGet(OutputSchema): + licensed_item_checkout_id: LicensedItemCheckoutID + licensed_item_id: LicensedItemID + wallet_id: WalletID + user_id: UserID + product_name: ProductName + started_at: datetime + stopped_at: datetime | None + num_of_seats: int + + +class LicensedItemCheckoutRestGetPage(NamedTuple): + items: list[LicensedItemCheckoutRestGet] + total: PositiveInt diff --git a/packages/models-library/src/models_library/api_schemas_webserver/licensed_items_purchases.py b/packages/models-library/src/models_library/api_schemas_webserver/licensed_items_purchases.py index 0264e713256..2f413f3d10f 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/licensed_items_purchases.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/licensed_items_purchases.py @@ -2,16 +2,14 @@ from decimal import Decimal from typing import NamedTuple -from models_library.licensed_items import LicensedItemID -from models_library.products import ProductName -from models_library.resource_tracker import PricingUnitCostId -from models_library.resource_tracker_licensed_items_purchases import ( - LicensedItemPurchaseID, -) -from models_library.users import UserID -from models_library.wallets import WalletID from pydantic import PositiveInt +from ..licensed_items import LicensedItemID +from ..products import ProductName +from ..resource_tracker import PricingUnitCostId +from ..resource_tracker_licensed_items_purchases import LicensedItemPurchaseID +from ..users import UserID +from ..wallets import WalletID from ._base import OutputSchema diff --git a/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py b/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py index 3eea55c6d67..506db2aee3c 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py @@ -11,12 +11,12 @@ PricingPlanId, PricingUnitCostUpdate, PricingUnitId, - ServiceRunId, ServiceRunStatus, SpecificInfo, UnitExtraInfo, ) from ..services import ServiceKey, ServiceVersion +from ..services_types import ServiceRunID from ..users import UserID from ..wallets import WalletID from ._base import InputSchema, OutputSchema @@ -27,7 +27,7 @@ class ServiceRunGet( BaseModel ): # NOTE: this is already in use so I didnt modidy inheritance from OutputSchema - service_run_id: ServiceRunId + service_run_id: ServiceRunID wallet_id: WalletID | None wallet_name: str | None user_id: UserID diff --git a/packages/models-library/src/models_library/api_schemas_webserver/users.py b/packages/models-library/src/models_library/api_schemas_webserver/users.py index f0dd3d8bcfb..f5f49bf726c 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/users.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/users.py @@ -1,16 +1,48 @@ import re from datetime import date from enum import Enum -from typing import Annotated, Literal +from typing import Annotated, Any, Literal, Self -from models_library.api_schemas_webserver.groups import MyGroupsGet -from models_library.api_schemas_webserver.users_preferences import AggregatedPreferences -from models_library.basic_types import IDStr -from models_library.emails import LowerCaseEmailStr -from models_library.users import FirstNameStr, LastNameStr, UserID -from pydantic import BaseModel, ConfigDict, Field, field_validator +import annotated_types +from common_library.basic_types import DEFAULT_FACTORY +from common_library.dict_tools import remap_keys +from common_library.users_enums import UserStatus +from models_library.groups import AccessRightsDict +from pydantic import ( + ConfigDict, + EmailStr, + Field, + StringConstraints, + ValidationInfo, + field_validator, +) -from ._base import InputSchema, OutputSchema +from ..basic_types import IDStr +from ..emails import LowerCaseEmailStr +from ..groups import AccessRightsDict, Group, GroupID, GroupsByTypeTuple +from ..products import ProductName +from ..rest_base import RequestParameters +from ..users import ( + FirstNameStr, + LastNameStr, + MyProfile, + UserID, + UserNameID, + UserPermission, + UserThirdPartyToken, +) +from ._base import ( + InputSchema, + InputSchemaWithoutCamelCase, + OutputSchema, + OutputSchemaWithoutCamelCase, +) +from .groups import MyGroupsGet +from .users_preferences import AggregatedPreferences + +# +# MY PROFILE +# class MyProfilePrivacyGet(OutputSchema): @@ -23,8 +55,7 @@ class MyProfilePrivacyPatch(InputSchema): hide_email: bool | None = None -class MyProfileGet(BaseModel): - # WARNING: do not use InputSchema until front-end is updated! +class MyProfileGet(OutputSchemaWithoutCamelCase): id: UserID user_name: Annotated[ IDStr, Field(description="Unique username identifier", alias="userName") @@ -76,9 +107,38 @@ def _to_upper_string(cls, v): return v.name.upper() return v + @classmethod + def from_model( + cls, + my_profile: MyProfile, + my_groups_by_type: GroupsByTypeTuple, + my_product_group: tuple[Group, AccessRightsDict] | None, + my_preferences: AggregatedPreferences, + ) -> Self: + data = remap_keys( + my_profile.model_dump( + include={ + "id", + "user_name", + "first_name", + "last_name", + "email", + "role", + "privacy", + "expiration_date", + }, + exclude_unset=True, + ), + rename={"email": "login"}, + ) + return cls( + **data, + groups=MyGroupsGet.from_model(my_groups_by_type, my_product_group), + preferences=my_preferences, + ) + -class MyProfilePatch(BaseModel): - # WARNING: do not use InputSchema until front-end is updated! +class MyProfilePatch(InputSchemaWithoutCamelCase): first_name: FirstNameStr | None = None last_name: LastNameStr | None = None user_name: Annotated[IDStr | None, Field(alias="userName")] = None @@ -128,3 +188,144 @@ def _validate_user_name(cls, value: str): raise ValueError(msg) return value + + +# +# USER +# + + +class UsersGetParams(RequestParameters): + user_id: UserID + + +class UsersSearch(InputSchema): + match_: Annotated[ + str, + StringConstraints(strip_whitespace=True, min_length=1, max_length=80), + Field( + description="Search string to match with usernames and public profiles (e.g. emails, first/last name)", + alias="match", + ), + ] + limit: Annotated[int, annotated_types.Interval(ge=1, le=50)] = 10 + + +class UserGet(OutputSchema): + # Public profile of a user subject to its privacy settings + user_id: UserID + group_id: GroupID + user_name: UserNameID + first_name: str | None = None + last_name: str | None = None + email: EmailStr | None = None + + @classmethod + def from_model(cls, data): + return cls.model_validate(data, from_attributes=True) + + +class UsersForAdminSearchQueryParams(RequestParameters): + email: Annotated[ + str, + Field( + min_length=3, + max_length=200, + description="complete or glob pattern for an email", + ), + ] + + +class UserForAdminGet(OutputSchema): + # ONLY for admins + first_name: str | None + last_name: str | None + email: LowerCaseEmailStr + institution: str | None + phone: str | None + address: str | None + city: str | None + state: Annotated[str | None, Field(description="State, province, canton, ...")] + postal_code: str | None + country: str | None + extras: Annotated[ + dict[str, Any], + Field( + default_factory=dict, + description="Keeps extra information provided in the request form", + ), + ] = DEFAULT_FACTORY + + # authorization + invited_by: str | None = None + + # user status + registered: bool + status: UserStatus | None + products: Annotated[ + list[ProductName] | None, + Field( + description="List of products this users is included or None if fields is unset", + ), + ] = None + + @field_validator("status") + @classmethod + def _consistency_check(cls, v, info: ValidationInfo): + registered = info.data["registered"] + status = v + if not registered and status is not None: + msg = f"{registered=} and {status=} is not allowed" + raise ValueError(msg) + return v + + +# +# THIRD-PARTY TOKENS +# + + +class MyTokenCreate(InputSchemaWithoutCamelCase): + service: Annotated[ + IDStr, + Field(description="uniquely identifies the service where this token is used"), + ] + token_key: IDStr + token_secret: IDStr + + def to_model(self) -> UserThirdPartyToken: + return UserThirdPartyToken( + service=self.service, + token_key=self.token_key, + token_secret=self.token_secret, + ) + + +class MyTokenGet(OutputSchemaWithoutCamelCase): + service: IDStr + token_key: IDStr + token_secret: Annotated[ + IDStr | None, Field(deprecated=True, description="Will be removed") + ] = None + + @classmethod + def from_model(cls, token: UserThirdPartyToken) -> Self: + return cls( + service=token.service, # type: ignore[arg-type] + token_key=token.token_key, # type: ignore[arg-type] + token_secret=None, + ) + + +# +# PERMISSIONS +# + + +class MyPermissionGet(OutputSchema): + name: str + allowed: bool + + @classmethod + def from_model(cls, permission: UserPermission) -> Self: + return cls(name=permission.name, allowed=permission.allowed) diff --git a/packages/models-library/src/models_library/docker.py b/packages/models-library/src/models_library/docker.py index 6e87f06b62e..db5f51ef359 100644 --- a/packages/models-library/src/models_library/docker.py +++ b/packages/models-library/src/models_library/docker.py @@ -37,7 +37,15 @@ def from_key(cls, key: str) -> "DockerLabelKey": str, StringConstraints(pattern=DOCKER_GENERIC_TAG_KEY_RE) ] -DockerPlacementConstraint: TypeAlias = Annotated[str, StringConstraints(strip_whitespace = True, pattern = re.compile(r"^(?!-)(?![.])(?!.*--)(?!.*[.][.])[a-zA-Z0-9.-]*(? "StandardSimcoreDockerLabels": ] }, ) + + +DockerNodeID: TypeAlias = Annotated[ + str, StringConstraints(strip_whitespace=True, pattern=re.compile(r"[a-zA-Z0-9]")) +] diff --git a/packages/models-library/src/models_library/groups.py b/packages/models-library/src/models_library/groups.py index 797453922f9..c0d8692b2e7 100644 --- a/packages/models-library/src/models_library/groups.py +++ b/packages/models-library/src/models_library/groups.py @@ -3,8 +3,11 @@ from common_library.basic_types import DEFAULT_FACTORY from common_library.groups_enums import GroupType as GroupType from pydantic import BaseModel, ConfigDict, EmailStr, Field, field_validator +from pydantic.config import JsonDict from pydantic.types import PositiveInt -from typing_extensions import TypedDict +from typing_extensions import ( # https://docs.pydantic.dev/latest/api/standard_library_types/#typeddict + TypedDict, +) from .basic_types import IDStr from .users import UserID @@ -35,7 +38,47 @@ class Group(BaseModel): create_enums_pre_validator(GroupType) ) - model_config = ConfigDict(populate_by_name=True) + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "gid": 1, + "name": "Everyone", + "type": "everyone", + "description": "all users", + "thumbnail": None, + }, + { + "gid": 2, + "name": "User", + "description": "primary group", + "type": "primary", + "thumbnail": None, + }, + { + "gid": 3, + "name": "Organization", + "description": "standard group", + "type": "standard", + "thumbnail": None, + "inclusionRules": {}, + }, + { + "gid": 4, + "name": "Product", + "description": "standard group for products", + "type": "standard", + "thumbnail": None, + }, + ] + } + ) + + model_config = ConfigDict( + populate_by_name=True, json_schema_extra=_update_json_schema_extra + ) class AccessRightsDict(TypedDict): @@ -65,7 +108,7 @@ class GroupMember(BaseModel): last_name: str | None # group access - access_rights: AccessRightsDict + access_rights: AccessRightsDict | None = None model_config = ConfigDict(from_attributes=True) diff --git a/packages/models-library/src/models_library/licensed_items.py b/packages/models-library/src/models_library/licensed_items.py index 021cf214ce5..79cd4fa87e0 100644 --- a/packages/models-library/src/models_library/licensed_items.py +++ b/packages/models-library/src/models_library/licensed_items.py @@ -24,6 +24,7 @@ class LicensedResourceType(StrAutoEnum): class LicensedItemDB(BaseModel): licensed_item_id: LicensedItemID name: str + license_key: str | None licensed_resource_type: LicensedResourceType pricing_plan_id: PricingPlanId product_name: ProductName diff --git a/packages/models-library/src/models_library/rabbitmq_messages.py b/packages/models-library/src/models_library/rabbitmq_messages.py index dd891758603..0cd6bd0874b 100644 --- a/packages/models-library/src/models_library/rabbitmq_messages.py +++ b/packages/models-library/src/models_library/rabbitmq_messages.py @@ -15,6 +15,7 @@ from .projects_state import RunningState from .services import ServiceKey, ServiceType, ServiceVersion from .services_resources import ServiceResourcesDict +from .services_types import ServiceRunID from .users import UserID from .utils.enums import StrAutoEnum from .wallets import WalletID @@ -178,7 +179,7 @@ class RabbitResourceTrackingMessageType(StrAutoEnum): class RabbitResourceTrackingBaseMessage(RabbitMessageBase): channel_name: Literal["io.simcore.service.tracking"] = "io.simcore.service.tracking" - service_run_id: str = Field( + service_run_id: ServiceRunID = Field( ..., description="uniquely identitifies the service run" ) created_at: datetime.datetime = Field( diff --git a/packages/models-library/src/models_library/resource_tracker.py b/packages/models-library/src/models_library/resource_tracker.py index 20e35b7e614..629633aa8c8 100644 --- a/packages/models-library/src/models_library/resource_tracker.py +++ b/packages/models-library/src/models_library/resource_tracker.py @@ -20,7 +20,6 @@ _logger = logging.getLogger(__name__) -ServiceRunId: TypeAlias = str PricingPlanId: TypeAlias = PositiveInt PricingUnitId: TypeAlias = PositiveInt PricingUnitCostId: TypeAlias = PositiveInt diff --git a/packages/models-library/src/models_library/resource_tracker_licensed_items_checkouts.py b/packages/models-library/src/models_library/resource_tracker_licensed_items_checkouts.py new file mode 100644 index 00000000000..cd09440b822 --- /dev/null +++ b/packages/models-library/src/models_library/resource_tracker_licensed_items_checkouts.py @@ -0,0 +1,4 @@ +from typing import TypeAlias +from uuid import UUID + +LicensedItemCheckoutID: TypeAlias = UUID diff --git a/packages/models-library/src/models_library/rpc_pagination.py b/packages/models-library/src/models_library/rpc_pagination.py index 96d6308f66c..f1aecabab81 100644 --- a/packages/models-library/src/models_library/rpc_pagination.py +++ b/packages/models-library/src/models_library/rpc_pagination.py @@ -30,14 +30,14 @@ class PageRefsParams(PageRefs[PageQueryParameters]): @classmethod def create(cls, total: int, limit: int, offset: int) -> "PageRefsParams": - last_page = ceil(total / limit) - 1 + last_page = ceil(total / limit) - 1 if total > 0 else 0 return cls.model_validate( { "self": {"offset": offset, "limit": limit}, "first": {"offset": 0, "limit": limit}, "prev": ( {"offset": max(offset - limit, 0), "limit": limit} - if offset > 0 + if offset > 0 and total > 0 else None ), "next": ( @@ -45,7 +45,7 @@ def create(cls, total: int, limit: int, offset: int) -> "PageRefsParams": "offset": min(offset + limit, last_page * limit), "limit": limit, } - if offset < (last_page * limit) + if offset < (last_page * limit) and total > 0 else None ), "last": {"offset": last_page * limit, "limit": limit}, diff --git a/packages/models-library/src/models_library/services.py b/packages/models-library/src/models_library/services.py index 23874571f96..cd20682f52d 100644 --- a/packages/models-library/src/models_library/services.py +++ b/packages/models-library/src/models_library/services.py @@ -7,9 +7,9 @@ from .services_metadata_published import ServiceInputsDict, ServiceMetaDataPublished from .services_types import ( DynamicServiceKey, - RunID, ServiceKey, ServicePortKey, + ServiceRunID, ServiceVersion, ) @@ -21,7 +21,6 @@ "BootOptions", "DynamicServiceKey", "LATEST_INTEGRATION_VERSION", - "RunID", "ServiceInput", "ServiceInputsDict", "ServiceKey", @@ -29,6 +28,7 @@ "ServiceMetaDataPublished", "ServiceOutput", "ServicePortKey", + "ServiceRunID", "ServiceType", "ServiceVersion", ) diff --git a/packages/models-library/src/models_library/services_types.py b/packages/models-library/src/models_library/services_types.py index 03c0bb4bf5d..b6689fdf888 100644 --- a/packages/models-library/src/models_library/services_types.py +++ b/packages/models-library/src/models_library/services_types.py @@ -1,11 +1,17 @@ -from typing import Annotated, Any, TypeAlias +from typing import TYPE_CHECKING, Annotated, Any, Self, TypeAlias from uuid import uuid4 import arrow -from pydantic import GetCoreSchemaHandler, StringConstraints, ValidationInfo +from pydantic import ( + GetCoreSchemaHandler, + PositiveInt, + StringConstraints, + ValidationInfo, +) from pydantic_core import CoreSchema, core_schema from .basic_regex import PROPERTY_KEY_RE, SIMPLE_VERSION_RE +from .projects_nodes_io import NodeID from .services_regex import ( COMPUTATIONAL_SERVICE_KEY_RE, DYNAMIC_SERVICE_KEY_RE, @@ -13,6 +19,10 @@ SERVICE_ENCODED_KEY_RE, SERVICE_KEY_RE, ) +from .users import UserID + +if TYPE_CHECKING: + from .projects import ProjectID ServicePortKey: TypeAlias = Annotated[str, StringConstraints(pattern=PROPERTY_KEY_RE)] @@ -35,7 +45,7 @@ ServiceVersion: TypeAlias = Annotated[str, StringConstraints(pattern=SIMPLE_VERSION_RE)] -class RunID(str): +class ServiceRunID(str): """ Used to assign a unique identifier to the run of a service. @@ -44,12 +54,15 @@ class RunID(str): and old volumes for different runs. Avoids overwriting data that left dropped on the node (due to an error) and gives the osparc-agent an opportunity to back it up. + The resource-usage-tracker tracker uses these RunIDs to keep track of + resource usage from computational and dynamic services. """ __slots__ = () @classmethod - def create(cls) -> "RunID": + def get_resource_tracking_run_id_for_dynamic(cls) -> Self: + """used for dynamic services""" # NOTE: there was a legacy version of this RunID # legacy version: # '0ac3ed64-665b-42d2-95f7-e59e0db34242' @@ -59,6 +72,17 @@ def create(cls) -> "RunID": run_id_format = f"{utc_int_timestamp}_{uuid4()}" return cls(run_id_format) + @classmethod + def get_resource_tracking_run_id_for_computational( + cls, + user_id: UserID, + project_id: "ProjectID", + node_id: NodeID, + iteration: PositiveInt, + ) -> Self: + """used by computational services""" + return cls(f"comp_{user_id}_{project_id}_{node_id}_{iteration}") + @classmethod def __get_pydantic_core_schema__( cls, @@ -68,7 +92,7 @@ def __get_pydantic_core_schema__( return core_schema.no_info_after_validator_function(cls, handler(str)) @classmethod - def validate(cls, v: "RunID | str", _: ValidationInfo) -> "RunID": + def validate(cls, v: "ServiceRunID | str", _: ValidationInfo) -> "ServiceRunID": if isinstance(v, cls): return v if isinstance(v, str): diff --git a/packages/models-library/src/models_library/users.py b/packages/models-library/src/models_library/users.py index af532978320..c8860171b64 100644 --- a/packages/models-library/src/models_library/users.py +++ b/packages/models-library/src/models_library/users.py @@ -1,7 +1,15 @@ +import datetime from typing import Annotated, TypeAlias +from common_library.users_enums import UserRole from models_library.basic_types import IDStr from pydantic import BaseModel, ConfigDict, Field, PositiveInt, StringConstraints +from pydantic.config import JsonDict +from typing_extensions import ( # https://docs.pydantic.dev/latest/api/standard_library_types/#typeddict + TypedDict, +) + +from .emails import LowerCaseEmailStr UserID: TypeAlias = PositiveInt UserNameID: TypeAlias = IDStr @@ -16,6 +24,40 @@ ] +class PrivacyDict(TypedDict): + hide_fullname: bool + hide_email: bool + + +class MyProfile(BaseModel): + id: UserID + user_name: IDStr + first_name: str | None + last_name: str | None + email: LowerCaseEmailStr + role: UserRole + privacy: PrivacyDict + expiration_date: datetime.date | None = None + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "example": { + "id": 1, + "email": "PtN5Ab0uv@guest-at-osparc.io", + "user_name": "PtN5Ab0uv", + "first_name": "PtN5Ab0uv", + "last_name": "", + "role": "GUEST", + "privacy": {"hide_email": True, "hide_fullname": False}, + } + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) + + class UserBillingDetails(BaseModel): first_name: str | None last_name: str | None @@ -28,3 +70,37 @@ class UserBillingDetails(BaseModel): phone: str | None model_config = ConfigDict(from_attributes=True) + + +# +# THIRD-PARTY TOKENS +# + + +class UserThirdPartyToken(BaseModel): + """ + Tokens used to access third-party services connected to osparc (e.g. pennsieve, scicrunch, etc) + """ + + service: str + token_key: str + token_secret: str | None = None + + model_config = ConfigDict( + json_schema_extra={ + "example": { + "service": "github-api-v1", + "token_key": "5f21abf5-c596-47b7-bfd1-c0e436ef1107", + } + } + ) + + +# +# PERMISSIONS +# + + +class UserPermission(BaseModel): + name: str + allowed: bool diff --git a/packages/models-library/tests/test_rest_pagination.py b/packages/models-library/tests/test_rest_pagination.py index a32bec673bb..85669e4a6de 100644 --- a/packages/models-library/tests/test_rest_pagination.py +++ b/packages/models-library/tests/test_rest_pagination.py @@ -2,7 +2,7 @@ import pytest from models_library.rest_pagination import Page, PageMetaInfoLimitOffset -from pydantic.main import BaseModel +from pydantic import BaseModel, ValidationError from pytest_simcore.examples.models_library import PAGE_EXAMPLES @@ -26,7 +26,7 @@ def test_page_response_limit_offset_models(cls_model: BaseModel, examples: list[ def test_invalid_offset(): - with pytest.raises(ValueError): + with pytest.raises(ValidationError): PageMetaInfoLimitOffset(limit=6, total=5, offset=5, count=2) @@ -39,14 +39,14 @@ def test_invalid_offset(): ], ) def test_invalid_count(count: int, offset: int): - with pytest.raises(ValueError): + with pytest.raises(ValidationError): PageMetaInfoLimitOffset(limit=6, total=5, offset=offset, count=count) def test_data_size_does_not_fit_count(): example = deepcopy(PAGE_EXAMPLES[0]) example["_meta"]["count"] = len(example["data"]) - 1 - with pytest.raises(ValueError): + with pytest.raises(ValidationError): Page[str](**example) diff --git a/packages/models-library/tests/test_services_types.py b/packages/models-library/tests/test_services_types.py new file mode 100644 index 00000000000..206c531a78f --- /dev/null +++ b/packages/models-library/tests/test_services_types.py @@ -0,0 +1,40 @@ +import pytest +from models_library.projects import ProjectID +from models_library.projects_nodes import NodeID +from models_library.services_types import ServiceRunID +from models_library.users import UserID +from pydantic import PositiveInt + + +@pytest.mark.parametrize( + "user_id, project_id, node_id, iteration, expected_result", + [ + ( + 2, + ProjectID("e08356e4-eb74-49e9-b769-2c26e34c61d9"), + NodeID("a08356e4-eb74-49e9-b769-2c26e34c61d1"), + 5, + "comp_2_e08356e4-eb74-49e9-b769-2c26e34c61d9_a08356e4-eb74-49e9-b769-2c26e34c61d1_5", + ) + ], +) +def test_run_id_get_resource_tracking_run_id( + user_id: UserID, + project_id: ProjectID, + node_id: NodeID, + iteration: PositiveInt, + expected_result: str, +): + resource_tracking_service_run_id = ( + ServiceRunID.get_resource_tracking_run_id_for_computational( + user_id, project_id, node_id, iteration + ) + ) + assert isinstance(resource_tracking_service_run_id, ServiceRunID) + assert resource_tracking_service_run_id == expected_result + + +def test_get_resource_tracking_run_id_for_dynamic(): + assert isinstance( + ServiceRunID.get_resource_tracking_run_id_for_dynamic(), ServiceRunID + ) diff --git a/packages/models-library/tests/test_users.py b/packages/models-library/tests/test_users.py new file mode 100644 index 00000000000..97496e133a9 --- /dev/null +++ b/packages/models-library/tests/test_users.py @@ -0,0 +1,27 @@ +from models_library.api_schemas_webserver.users import MyProfileGet +from models_library.api_schemas_webserver.users_preferences import Preference +from models_library.groups import AccessRightsDict, Group, GroupsByTypeTuple +from models_library.users import MyProfile +from pydantic import TypeAdapter + + +def test_adapter_from_model_to_schema(): + my_profile = MyProfile.model_validate(MyProfile.model_json_schema()["example"]) + + groups = TypeAdapter(list[Group]).validate_python( + Group.model_json_schema()["examples"] + ) + + ar = AccessRightsDict(read=False, write=False, delete=False) + + my_groups_by_type = GroupsByTypeTuple( + primary=(groups[1], ar), standard=[(groups[2], ar)], everyone=(groups[0], ar) + ) + my_product_group = groups[-1], AccessRightsDict( + read=False, write=False, delete=False + ) + my_preferences = {"foo": Preference(default_value=3, value=1)} + + MyProfileGet.from_model( + my_profile, my_groups_by_type, my_product_group, my_preferences + ) diff --git a/packages/postgres-database/requirements/_test.in b/packages/postgres-database/requirements/_test.in index 3249c9c02b2..d0b7af019df 100644 --- a/packages/postgres-database/requirements/_test.in +++ b/packages/postgres-database/requirements/_test.in @@ -10,6 +10,7 @@ --constraint _migration.txt aiopg[sa] +arrow coverage faker pytest diff --git a/packages/postgres-database/requirements/_test.txt b/packages/postgres-database/requirements/_test.txt index 49636a365c3..6b6c490ce72 100644 --- a/packages/postgres-database/requirements/_test.txt +++ b/packages/postgres-database/requirements/_test.txt @@ -1,5 +1,7 @@ aiopg==1.4.0 # via -r requirements/_test.in +arrow==1.3.0 + # via -r requirements/_test.in async-timeout==4.0.3 # via # -c requirements/_base.txt @@ -52,7 +54,9 @@ pytest-instafail==0.5.0 pytest-runner==6.0.1 # via -r requirements/_test.in python-dateutil==2.9.0.post0 - # via faker + # via + # arrow + # faker pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt @@ -72,6 +76,8 @@ types-docker==7.1.0.20240827 # via -r requirements/_test.in types-psycopg2==2.9.21.20241019 # via -r requirements/_test.in +types-python-dateutil==2.9.0.20241206 + # via arrow types-requests==2.32.0.20241016 # via types-docker typing-extensions==4.12.2 diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/1e3c9c804fec_set_privacy_hide_email_to_true.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/1e3c9c804fec_set_privacy_hide_email_to_true.py new file mode 100644 index 00000000000..58e1115a1bf --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/1e3c9c804fec_set_privacy_hide_email_to_true.py @@ -0,0 +1,33 @@ +"""set privacy_hide_email to true. Reverts "set privacy_hide_email to false temporarily" (5e27063c3ac9) + +Revision ID: 1e3c9c804fec +Revises: d31c23845017 +Create Date: 2025-01-03 10:16:58.531083+00:00 + +""" +from alembic import op +from sqlalchemy.sql import expression + +# revision identifiers, used by Alembic. +revision = "1e3c9c804fec" +down_revision = "d31c23845017" +branch_labels = None +depends_on = None + + +def upgrade(): + # server_default of privacy_hide_email to true + with op.batch_alter_table("users") as batch_op: + batch_op.alter_column("privacy_hide_email", server_default=expression.true()) + + # Reset all to default: Revert existing values in the database to true + op.execute("UPDATE users SET privacy_hide_email = true") + + +def downgrade(): + # Change the server_default of privacy_hide_email to false + with op.batch_alter_table("users") as batch_op: + batch_op.alter_column("privacy_hide_email", server_default=expression.false()) + + # Reset all to default: Update existing values in the database + op.execute("UPDATE users SET privacy_hide_email = false") diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/307017ee1a49_add_deprecated_submit_column.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/307017ee1a49_add_deprecated_submit_column.py new file mode 100644 index 00000000000..a93d032b8e8 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/307017ee1a49_add_deprecated_submit_column.py @@ -0,0 +1,28 @@ +"""add deprecated submit column + +Revision ID: 307017ee1a49 +Revises: 1e3c9c804fec +Create Date: 2025-01-06 12:53:51.604189+00:00 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '307017ee1a49' +down_revision = '1e3c9c804fec' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('comp_tasks', sa.Column('submit', sa.DateTime(timezone=True), server_default=sa.text("'1900-01-01T00:00:00Z'::timestamptz"), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('comp_tasks', 'submit') + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/52a0e8148dd5_remove_submit_timestamp.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/52a0e8148dd5_remove_submit_timestamp.py new file mode 100644 index 00000000000..8589578abe7 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/52a0e8148dd5_remove_submit_timestamp.py @@ -0,0 +1,28 @@ +"""remove submit timestamp + +Revision ID: 52a0e8148dd5 +Revises: 77ac824a77ff +Create Date: 2024-12-16 14:55:15.114923+00:00 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '52a0e8148dd5' +down_revision = '77ac824a77ff' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('comp_tasks', 'submit') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('comp_tasks', sa.Column('submit', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True)) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/aa6da21a0055_rename_usages_to_checkouts.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/aa6da21a0055_rename_usages_to_checkouts.py new file mode 100644 index 00000000000..882be09dd2c --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/aa6da21a0055_rename_usages_to_checkouts.py @@ -0,0 +1,134 @@ +"""rename usages to checkouts + +Revision ID: aa6da21a0055 +Revises: 52a0e8148dd5 +Create Date: 2024-12-17 13:47:09.304574+00:00 + +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "aa6da21a0055" +down_revision = "52a0e8148dd5" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "resource_tracker_licensed_items_checkouts", + sa.Column( + "licensed_item_checkout_id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("licensed_item_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("wallet_id", sa.BigInteger(), nullable=False), + sa.Column("user_id", sa.BigInteger(), nullable=False), + sa.Column("user_email", sa.String(), nullable=True), + sa.Column("product_name", sa.String(), nullable=False), + sa.Column("service_run_id", sa.String(), nullable=True), + sa.Column("started_at", sa.DateTime(timezone=True), nullable=False), + sa.Column("stopped_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("num_of_seats", sa.SmallInteger(), nullable=False), + sa.Column( + "modified", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.ForeignKeyConstraint( + ["product_name", "service_run_id"], + [ + "resource_tracker_service_runs.product_name", + "resource_tracker_service_runs.service_run_id", + ], + name="resource_tracker_license_checkouts_service_run_id_fkey", + onupdate="CASCADE", + ondelete="RESTRICT", + ), + sa.PrimaryKeyConstraint("licensed_item_checkout_id"), + ) + op.create_index( + op.f("ix_resource_tracker_licensed_items_checkouts_wallet_id"), + "resource_tracker_licensed_items_checkouts", + ["wallet_id"], + unique=False, + ) + op.drop_index( + "ix_resource_tracker_licensed_items_usage_wallet_id", + table_name="resource_tracker_licensed_items_usage", + ) + op.drop_table("resource_tracker_licensed_items_usage") + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "resource_tracker_licensed_items_usage", + sa.Column( + "licensed_item_usage_id", + postgresql.UUID(), + server_default=sa.text("gen_random_uuid()"), + autoincrement=False, + nullable=False, + ), + sa.Column("wallet_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("user_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("user_email", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("product_name", sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column("service_run_id", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column( + "started_at", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=False, + ), + sa.Column( + "stopped_at", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.Column("num_of_seats", sa.SMALLINT(), autoincrement=False, nullable=False), + sa.Column( + "modified", + postgresql.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.Column( + "licensed_item_id", postgresql.UUID(), autoincrement=False, nullable=False + ), + sa.ForeignKeyConstraint( + ["product_name", "service_run_id"], + [ + "resource_tracker_service_runs.product_name", + "resource_tracker_service_runs.service_run_id", + ], + name="resource_tracker_license_checkouts_service_run_id_fkey", + onupdate="CASCADE", + ondelete="RESTRICT", + ), + sa.PrimaryKeyConstraint( + "licensed_item_usage_id", name="resource_tracker_licensed_items_usage_pkey" + ), + ) + op.create_index( + "ix_resource_tracker_licensed_items_usage_wallet_id", + "resource_tracker_licensed_items_usage", + ["wallet_id"], + unique=False, + ) + op.drop_index( + op.f("ix_resource_tracker_licensed_items_checkouts_wallet_id"), + table_name="resource_tracker_licensed_items_checkouts", + ) + op.drop_table("resource_tracker_licensed_items_checkouts") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/d31c23845017_add_license_key.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/d31c23845017_add_license_key.py new file mode 100644 index 00000000000..59856c49d52 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/d31c23845017_add_license_key.py @@ -0,0 +1,29 @@ +"""add license key + +Revision ID: d31c23845017 +Revises: aa6da21a0055 +Create Date: 2024-12-18 11:11:52.644534+00:00 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "d31c23845017" +down_revision = "aa6da21a0055" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "licensed_items", sa.Column("license_key", sa.String(), nullable=True) + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("licensed_items", "license_key") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/models/comp_tasks.py b/packages/postgres-database/src/simcore_postgres_database/models/comp_tasks.py index af5dc451cc3..096447f7366 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/comp_tasks.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/comp_tasks.py @@ -1,6 +1,4 @@ -""" Computational Tasks Table - -""" +"""Computational Tasks Table""" import enum @@ -77,10 +75,6 @@ class NodeClass(enum.Enum): nullable=True, doc="current progress of the task if available", ), - # utc timestamps for submission/start/end - sa.Column( - "submit", sa.DateTime(timezone=True), doc="UTC timestamp for task submission" - ), sa.Column( "start", sa.DateTime(timezone=True), doc="UTC timestamp when task started" ), @@ -106,6 +100,14 @@ class NodeClass(enum.Enum): nullable=True, doc="Harware information of this task", ), + # deprecated columns must be kept due to legacy services + # utc timestamps for submission/start/end + sa.Column( + "submit", + sa.DateTime(timezone=True), + server_default=sa.text("'1900-01-01T00:00:00Z'::timestamptz"), + doc="[DEPRECATED unused but kept for legacy services and must be filled with a default value of 1 January 1900]", + ), # ------ sa.UniqueConstraint("project_id", "node_id", name="project_node_uniqueness"), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/licensed_items.py b/packages/postgres-database/src/simcore_postgres_database/models/licensed_items.py index 63301eb9c1d..a0ea136f4bb 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/licensed_items.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/licensed_items.py @@ -58,6 +58,12 @@ class LicensedResourceType(str, enum.Enum): nullable=False, doc="Product name", ), + sa.Column( + "license_key", + sa.String, + nullable=True, + doc="Purpose: Acts as a mapping key to the internal license server. Usage: The Sim4Life base applications use this key to check out a seat from the internal license server.", + ), column_created_datetime(timezone=True), column_modified_datetime(timezone=True), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_licensed_items_usage.py b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_licensed_items_checkouts.py similarity index 91% rename from packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_licensed_items_usage.py rename to packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_licensed_items_checkouts.py index 27d6afe8250..e3cabb899f7 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_licensed_items_usage.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_licensed_items_checkouts.py @@ -7,11 +7,11 @@ from ._common import RefActions, column_modified_datetime from .base import metadata -resource_tracker_licensed_items_usage = sa.Table( - "resource_tracker_licensed_items_usage", +resource_tracker_licensed_items_checkouts = sa.Table( + "resource_tracker_licensed_items_checkouts", metadata, sa.Column( - "licensed_item_usage_id", + "licensed_item_checkout_id", UUID(as_uuid=True), nullable=False, primary_key=True, @@ -19,7 +19,7 @@ ), sa.Column( "licensed_item_id", - sa.String, + UUID(as_uuid=True), nullable=True, ), sa.Column( diff --git a/packages/postgres-database/src/simcore_postgres_database/models/users.py b/packages/postgres-database/src/simcore_postgres_database/models/users.py index bdff1293211..b8ff7a455cd 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/users.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/users.py @@ -1,69 +1,14 @@ -from enum import Enum -from functools import total_ordering - import sqlalchemy as sa +from common_library.users_enums import UserRole, UserStatus from sqlalchemy.sql import expression from ._common import RefActions from .base import metadata -_USER_ROLE_TO_LEVEL = { - "ANONYMOUS": 0, - "GUEST": 10, - "USER": 20, - "TESTER": 30, - "PRODUCT_OWNER": 40, - "ADMIN": 100, -} - - -@total_ordering -class UserRole(Enum): - """SORTED enumeration of user roles - - A role defines a set of privileges the user can perform - Roles are sorted from lower to highest privileges - USER is the role assigned by default A user with a higher/lower role is denoted super/infra user - - ANONYMOUS : The user is not logged in - GUEST : Temporary user with very limited access. Main used for demos and for a limited amount of time - USER : Registered user. Basic permissions to use the platform [default] - TESTER : Upgraded user. First level of super-user with privileges to test the framework. - Can use everything but does not have an effect in other users or actual data - ADMIN : Framework admin. - - See security_access.py - """ - - ANONYMOUS = "ANONYMOUS" - GUEST = "GUEST" - USER = "USER" - TESTER = "TESTER" - PRODUCT_OWNER = "PRODUCT_OWNER" - ADMIN = "ADMIN" - - @property - def privilege_level(self) -> int: - return _USER_ROLE_TO_LEVEL[self.name] - - def __lt__(self, other: "UserRole") -> bool: - if self.__class__ is other.__class__: - return self.privilege_level < other.privilege_level - return NotImplemented - - -class UserStatus(str, Enum): - # This is a transition state. The user is registered but not confirmed. NOTE that state is optional depending on LOGIN_REGISTRATION_CONFIRMATION_REQUIRED - CONFIRMATION_PENDING = "CONFIRMATION_PENDING" - # This user can now operate the platform - ACTIVE = "ACTIVE" - # This user is inactive because it expired after a trial period - EXPIRED = "EXPIRED" - # This user is inactive because he has been a bad boy - BANNED = "BANNED" - # This user is inactive because it was marked for deletion - DELETED = "DELETED" - +__all__: tuple[str, ...] = ( + "UserRole", + "UserStatus", +) users = sa.Table( "users", diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_groups_extra_properties.py b/packages/postgres-database/src/simcore_postgres_database/utils_groups_extra_properties.py index b6c25183a21..709096572c6 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_groups_extra_properties.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_groups_extra_properties.py @@ -1,18 +1,26 @@ import datetime import logging +import warnings from dataclasses import dataclass, fields -from typing import Any +from typing import Any, Callable import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy +from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine from .models.groups import GroupType, groups, user_to_groups from .models.groups_extra_properties import groups_extra_properties from .utils_models import FromRowMixin +from .utils_repos import pass_or_acquire_connection _logger = logging.getLogger(__name__) +_WARNING_FMSG = ( + f"{__name__}.{{}} uses aiopg which has been deprecated in this repo. Use {{}} instead. " + "SEE https://github.com/ITISFoundation/osparc-simcore/issues/4529" +) + class GroupExtraPropertiesError(Exception): ... @@ -35,10 +43,8 @@ class GroupExtraProperties(FromRowMixin): enable_efs: bool -async def _list_table_entries_ordered_by_group_type( - connection: SAConnection, user_id: int, product_name: str -) -> list[RowProxy]: - list_stmt = ( +def _list_table_entries_ordered_by_group_type_stmt(user_id: int, product_name: str): + return ( sa.select( groups_extra_properties, groups.c.type, @@ -68,15 +74,6 @@ async def _list_table_entries_ordered_by_group_type( .alias() ) - result = await connection.execute( - sa.select(list_stmt).order_by(list_stmt.c.type_order) - ) - assert result # nosec - - rows: list[RowProxy] | None = await result.fetchall() - assert rows is not None # nosec - return rows - def _merge_extra_properties_booleans( instance1: GroupExtraProperties, instance2: GroupExtraProperties @@ -95,34 +92,55 @@ def _merge_extra_properties_booleans( @dataclass(frozen=True, slots=True, kw_only=True) class GroupExtraPropertiesRepo: + @staticmethod + def _get_stmt(gid: int, product_name: str): + return sa.select(groups_extra_properties).where( + (groups_extra_properties.c.group_id == gid) + & (groups_extra_properties.c.product_name == product_name) + ) + @staticmethod async def get( connection: SAConnection, *, gid: int, product_name: str ) -> GroupExtraProperties: - get_stmt = sa.select(groups_extra_properties).where( - (groups_extra_properties.c.group_id == gid) - & (groups_extra_properties.c.product_name == product_name) + warnings.warn( + _WARNING_FMSG.format("get", "get_v2"), + DeprecationWarning, + stacklevel=1, ) - result = await connection.execute(get_stmt) + + query = GroupExtraPropertiesRepo._get_stmt(gid, product_name) + result = await connection.execute(query) assert result # nosec if row := await result.first(): - return GroupExtraProperties.from_row(row) + return GroupExtraProperties.from_row_proxy(row) msg = f"Properties for group {gid} not found" raise GroupExtraPropertiesNotFoundError(msg) @staticmethod - async def get_aggregated_properties_for_user( - connection: SAConnection, + async def get_v2( + engine: AsyncEngine, + connection: AsyncConnection | None = None, *, - user_id: int, + gid: int, product_name: str, ) -> GroupExtraProperties: - rows = await _list_table_entries_ordered_by_group_type( - connection, user_id, product_name - ) + async with pass_or_acquire_connection(engine, connection) as conn: + query = GroupExtraPropertiesRepo._get_stmt(gid, product_name) + result = await conn.stream(query) + assert result # nosec + if row := await result.first(): + return GroupExtraProperties.from_row(row) + msg = f"Properties for group {gid} not found" + raise GroupExtraPropertiesNotFoundError(msg) + + @staticmethod + def _aggregate( + rows, user_id, product_name, from_row: Callable + ) -> GroupExtraProperties: merged_standard_extra_properties = None for row in rows: - group_extra_properties = GroupExtraProperties.from_row(row) + group_extra_properties: GroupExtraProperties = from_row(row) match row.type: case GroupType.PRIMARY: # this always has highest priority @@ -153,3 +171,56 @@ async def get_aggregated_properties_for_user( return merged_standard_extra_properties msg = f"Properties for user {user_id} in {product_name} not found" raise GroupExtraPropertiesNotFoundError(msg) + + @staticmethod + async def get_aggregated_properties_for_user( + connection: SAConnection, + *, + user_id: int, + product_name: str, + ) -> GroupExtraProperties: + warnings.warn( + _WARNING_FMSG.format( + "get_aggregated_properties_for_user", + "get_aggregated_properties_for_user_v2", + ), + DeprecationWarning, + stacklevel=1, + ) + + list_stmt = _list_table_entries_ordered_by_group_type_stmt( + user_id=user_id, product_name=product_name + ) + + result = await connection.execute( + sa.select(list_stmt).order_by(list_stmt.c.type_order) + ) + assert result # nosec + + rows: list[RowProxy] | None = await result.fetchall() + assert rows is not None # nosec + + return GroupExtraPropertiesRepo._aggregate( + rows, user_id, product_name, GroupExtraProperties.from_row_proxy + ) + + @staticmethod + async def get_aggregated_properties_for_user_v2( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + user_id: int, + product_name: str, + ) -> GroupExtraProperties: + async with pass_or_acquire_connection(engine, connection) as conn: + + list_stmt = _list_table_entries_ordered_by_group_type_stmt( + user_id=user_id, product_name=product_name + ) + result = await conn.stream( + sa.select(list_stmt).order_by(list_stmt.c.type_order) + ) + rows = [row async for row in result] + return GroupExtraPropertiesRepo._aggregate( + rows, user_id, product_name, GroupExtraProperties.from_row + ) diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_models.py b/packages/postgres-database/src/simcore_postgres_database/utils_models.py index 0fe50578aae..2cbf0e1d699 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_models.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_models.py @@ -2,6 +2,7 @@ from typing import TypeVar from aiopg.sa.result import RowProxy +from sqlalchemy.engine.row import Row ModelType = TypeVar("ModelType") @@ -10,7 +11,13 @@ class FromRowMixin: """Mixin to allow instance construction from aiopg.sa.result.RowProxy""" @classmethod - def from_row(cls: type[ModelType], row: RowProxy) -> ModelType: + def from_row_proxy(cls: type[ModelType], row: RowProxy) -> ModelType: assert is_dataclass(cls) # nosec field_names = [f.name for f in fields(cls)] return cls(**{k: v for k, v in row.items() if k in field_names}) # type: ignore[return-value] + + @classmethod + def from_row(cls: type[ModelType], row: Row) -> ModelType: + assert is_dataclass(cls) # nosec + field_names = [f.name for f in fields(cls)] + return cls(**{k: v for k, v in row._asdict().items() if k in field_names}) # type: ignore[return-value] diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_repos.py b/packages/postgres-database/src/simcore_postgres_database/utils_repos.py index e013a09b526..efbdebc48f2 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_repos.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_repos.py @@ -13,7 +13,13 @@ async def pass_or_acquire_connection( ) -> AsyncIterator[AsyncConnection]: """ When to use: For READ operations! - It ensures that a connection is available for use within the context, either by using an existing connection passed as a parameter or by acquiring a new one from the engine. The caller must manage the lifecycle of any connection explicitly passed in, but the function handles the cleanup for connections it creates itself. This function **does not open new transactions** and therefore is recommended only for read-only database operations. + It ensures that a connection is available for use within the context, + either by using an existing connection passed as a parameter or by acquiring a new one from the engine. + + The caller must manage the lifecycle of any connection explicitly passed in, but the function handles the + cleanup for connections it creates itself. + + This function **does not open new transactions** and therefore is recommended only for read-only database operations. """ # NOTE: When connection is passed, the engine is actually not needed # NOTE: Creator is responsible of closing connection @@ -36,7 +42,8 @@ async def transaction_context( ): """ When to use: For WRITE operations! - This function manages the database connection and ensures that a transaction context is established for write operations. It supports both outer and nested transactions, providing flexibility for scenarios where transactions may already exist in the calling context. + This function manages the database connection and ensures that a transaction context is established for write operations. + It supports both outer and nested transactions, providing flexibility for scenarios where transactions may already exist in the calling context. """ async with pass_or_acquire_connection(engine, connection) as conn: if conn.in_transaction(): diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_users.py b/packages/postgres-database/src/simcore_postgres_database/utils_users.py index 9026cdd27b4..ac5426bafde 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_users.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_users.py @@ -10,6 +10,7 @@ import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy +from sqlalchemy import Column from .errors import UniqueViolation from .models.users import UserRole, UserStatus, users @@ -134,8 +135,8 @@ async def join_and_update_from_pre_registration_details( ) @staticmethod - async def get_billing_details(conn: SAConnection, user_id: int) -> RowProxy | None: - result = await conn.execute( + def get_billing_details_query(user_id: int): + return ( sa.select( users.c.first_name, users.c.last_name, @@ -155,6 +156,12 @@ async def get_billing_details(conn: SAConnection, user_id: int) -> RowProxy | No ) .where(users.c.id == user_id) ) + + @staticmethod + async def get_billing_details(conn: SAConnection, user_id: int) -> RowProxy | None: + result = await conn.execute( + UsersRepo.get_billing_details_query(user_id=user_id) + ) value: RowProxy | None = await result.fetchone() return value @@ -208,7 +215,44 @@ async def is_email_used(conn: SAConnection, email: str) -> bool: users_pre_registration_details.c.pre_email == email ) ) - if pre_registered: - return True - - return False + return bool(pre_registered) + + +# +# Privacy settings +# + + +def is_private(hide_attribute: Column, caller_id: int): + return hide_attribute.is_(True) & (users.c.id != caller_id) + + +def is_public(hide_attribute: Column, caller_id: int): + return hide_attribute.is_(False) | (users.c.id == caller_id) + + +def visible_user_profile_cols(caller_id: int): + """Returns user profile columns with visibility constraints applied based on privacy settings.""" + return ( + sa.case( + ( + is_private(users.c.privacy_hide_email, caller_id), + None, + ), + else_=users.c.email, + ).label("email"), + sa.case( + ( + is_private(users.c.privacy_hide_fullname, caller_id), + None, + ), + else_=users.c.first_name, + ).label("first_name"), + sa.case( + ( + is_private(users.c.privacy_hide_fullname, caller_id), + None, + ), + else_=users.c.last_name, + ).label("last_name"), + ) diff --git a/packages/postgres-database/tests/test_users.py b/packages/postgres-database/tests/test_users.py index 97bfa3b2f99..1c10636e772 100644 --- a/packages/postgres-database/tests/test_users.py +++ b/packages/postgres-database/tests/test_users.py @@ -12,12 +12,7 @@ from faker import Faker from pytest_simcore.helpers.faker_factories import random_user from simcore_postgres_database.errors import InvalidTextRepresentation, UniqueViolation -from simcore_postgres_database.models.users import ( - _USER_ROLE_TO_LEVEL, - UserRole, - UserStatus, - users, -) +from simcore_postgres_database.models.users import UserRole, UserStatus, users from simcore_postgres_database.utils_users import ( UsersRepo, _generate_random_chars, @@ -26,78 +21,6 @@ from sqlalchemy.sql import func -def test_user_role_to_level_map_in_sync(): - # If fails, then update _USER_ROLE_TO_LEVEL map - assert set(_USER_ROLE_TO_LEVEL.keys()) == set(UserRole.__members__.keys()) - - -def test_user_roles_compares_to_admin(): - assert UserRole.ANONYMOUS < UserRole.ADMIN - assert UserRole.GUEST < UserRole.ADMIN - assert UserRole.USER < UserRole.ADMIN - assert UserRole.TESTER < UserRole.ADMIN - assert UserRole.PRODUCT_OWNER < UserRole.ADMIN - assert UserRole.ADMIN == UserRole.ADMIN - - -def test_user_roles_compares_to_product_owner(): - assert UserRole.ANONYMOUS < UserRole.PRODUCT_OWNER - assert UserRole.GUEST < UserRole.PRODUCT_OWNER - assert UserRole.USER < UserRole.PRODUCT_OWNER - assert UserRole.TESTER < UserRole.PRODUCT_OWNER - assert UserRole.PRODUCT_OWNER == UserRole.PRODUCT_OWNER - assert UserRole.ADMIN > UserRole.PRODUCT_OWNER - - -def test_user_roles_compares_to_tester(): - assert UserRole.ANONYMOUS < UserRole.TESTER - assert UserRole.GUEST < UserRole.TESTER - assert UserRole.USER < UserRole.TESTER - assert UserRole.TESTER == UserRole.TESTER - assert UserRole.PRODUCT_OWNER > UserRole.TESTER - assert UserRole.ADMIN > UserRole.TESTER - - -def test_user_roles_compares_to_user(): - assert UserRole.ANONYMOUS < UserRole.USER - assert UserRole.GUEST < UserRole.USER - assert UserRole.USER == UserRole.USER - assert UserRole.TESTER > UserRole.USER - assert UserRole.PRODUCT_OWNER > UserRole.USER - assert UserRole.ADMIN > UserRole.USER - - -def test_user_roles_compares_to_guest(): - assert UserRole.ANONYMOUS < UserRole.GUEST - assert UserRole.GUEST == UserRole.GUEST - assert UserRole.USER > UserRole.GUEST - assert UserRole.TESTER > UserRole.GUEST - assert UserRole.PRODUCT_OWNER > UserRole.GUEST - assert UserRole.ADMIN > UserRole.GUEST - - -def test_user_roles_compares_to_anonymous(): - assert UserRole.ANONYMOUS == UserRole.ANONYMOUS - assert UserRole.GUEST > UserRole.ANONYMOUS - assert UserRole.USER > UserRole.ANONYMOUS - assert UserRole.TESTER > UserRole.ANONYMOUS - assert UserRole.PRODUCT_OWNER > UserRole.ANONYMOUS - assert UserRole.ADMIN > UserRole.ANONYMOUS - - -def test_user_roles_compares(): - # < and > - assert UserRole.TESTER < UserRole.ADMIN - assert UserRole.ADMIN > UserRole.TESTER - - # >=, == and <= - assert UserRole.TESTER <= UserRole.ADMIN - assert UserRole.ADMIN >= UserRole.TESTER - - assert UserRole.ADMIN <= UserRole.ADMIN - assert UserRole.ADMIN == UserRole.ADMIN - - @pytest.fixture async def clean_users_db_table(connection: SAConnection): yield diff --git a/packages/postgres-database/tests/test_utils_groups_extra_properties.py b/packages/postgres-database/tests/test_utils_groups_extra_properties.py index fafc97d1551..e7900de6082 100644 --- a/packages/postgres-database/tests/test_utils_groups_extra_properties.py +++ b/packages/postgres-database/tests/test_utils_groups_extra_properties.py @@ -21,6 +21,7 @@ GroupExtraPropertiesRepo, ) from sqlalchemy import literal_column +from sqlalchemy.ext.asyncio import AsyncEngine async def test_get_raises_if_not_found( @@ -64,7 +65,7 @@ async def _creator( assert result row = await result.first() assert row - properties = GroupExtraProperties.from_row(row) + properties = GroupExtraProperties.from_row_proxy(row) created_properties.append((properties.group_id, properties.product_name)) return properties @@ -101,6 +102,28 @@ async def test_get( assert created_extra_properties == received_extra_properties +async def test_get_v2( + asyncpg_engine: AsyncEngine, + registered_user: RowProxy, + product_name: str, + create_fake_product: Callable[..., Awaitable[RowProxy]], + create_fake_group_extra_properties: Callable[..., Awaitable[GroupExtraProperties]], +): + with pytest.raises(GroupExtraPropertiesNotFoundError): + await GroupExtraPropertiesRepo.get_v2( + asyncpg_engine, gid=registered_user.primary_gid, product_name=product_name + ) + + await create_fake_product(product_name) + created_extra_properties = await create_fake_group_extra_properties( + registered_user.primary_gid, product_name + ) + received_extra_properties = await GroupExtraPropertiesRepo.get_v2( + asyncpg_engine, gid=registered_user.primary_gid, product_name=product_name + ) + assert created_extra_properties == received_extra_properties + + @pytest.fixture async def everyone_group_id(connection: aiopg.sa.connection.SAConnection) -> int: result = await connection.scalar( @@ -355,3 +378,114 @@ async def test_get_aggregated_properties_for_user_returns_property_values_as_tru assert aggregated_group_properties.internet_access is False assert aggregated_group_properties.override_services_specifications is False assert aggregated_group_properties.use_on_demand_clusters is True + + +async def test_get_aggregated_properties_for_user_returns_property_values_as_truthy_if_one_of_them_is_v2( + asyncpg_engine: AsyncEngine, + connection: aiopg.sa.connection.SAConnection, + product_name: str, + registered_user: RowProxy, + create_fake_product: Callable[..., Awaitable[RowProxy]], + create_fake_group: Callable[..., Awaitable[RowProxy]], + create_fake_group_extra_properties: Callable[..., Awaitable[GroupExtraProperties]], + everyone_group_id: int, +): + await create_fake_product(product_name) + await create_fake_product(f"{product_name}_additional_just_for_fun") + + # create a specific extra properties for group that disallow everything + everyone_group_extra_properties = await create_fake_group_extra_properties( + everyone_group_id, + product_name, + internet_access=False, + override_services_specifications=False, + use_on_demand_clusters=False, + ) + # this should return the everyone group properties + aggregated_group_properties = ( + await GroupExtraPropertiesRepo.get_aggregated_properties_for_user_v2( + asyncpg_engine, user_id=registered_user.id, product_name=product_name + ) + ) + assert aggregated_group_properties == everyone_group_extra_properties + + # now we create some standard groups and add the user to them and make everything false for now + standard_groups = [await create_fake_group(connection) for _ in range(5)] + for group in standard_groups: + await create_fake_group_extra_properties( + group.gid, + product_name, + internet_access=False, + override_services_specifications=False, + use_on_demand_clusters=False, + ) + await _add_user_to_group( + connection, user_id=registered_user.id, group_id=group.gid + ) + + # now we still should not have any of these value Truthy + aggregated_group_properties = ( + await GroupExtraPropertiesRepo.get_aggregated_properties_for_user_v2( + asyncpg_engine, user_id=registered_user.id, product_name=product_name + ) + ) + assert aggregated_group_properties.internet_access is False + assert aggregated_group_properties.override_services_specifications is False + assert aggregated_group_properties.use_on_demand_clusters is False + + # let's change one of these standard groups + random_standard_group = random.choice(standard_groups) # noqa: S311 + result = await connection.execute( + groups_extra_properties.update() + .where(groups_extra_properties.c.group_id == random_standard_group.gid) + .values(internet_access=True) + ) + assert result.rowcount == 1 + + # now we should have internet access + aggregated_group_properties = ( + await GroupExtraPropertiesRepo.get_aggregated_properties_for_user_v2( + asyncpg_engine, user_id=registered_user.id, product_name=product_name + ) + ) + assert aggregated_group_properties.internet_access is True + assert aggregated_group_properties.override_services_specifications is False + assert aggregated_group_properties.use_on_demand_clusters is False + + # let's change another one of these standard groups + random_standard_group = random.choice(standard_groups) # noqa: S311 + result = await connection.execute( + groups_extra_properties.update() + .where(groups_extra_properties.c.group_id == random_standard_group.gid) + .values(override_services_specifications=True) + ) + assert result.rowcount == 1 + + # now we should have internet access and service override + aggregated_group_properties = ( + await GroupExtraPropertiesRepo.get_aggregated_properties_for_user_v2( + asyncpg_engine, user_id=registered_user.id, product_name=product_name + ) + ) + assert aggregated_group_properties.internet_access is True + assert aggregated_group_properties.override_services_specifications is True + assert aggregated_group_properties.use_on_demand_clusters is False + + # and we can deny it again by setting a primary extra property + # now create some personal extra properties + personal_group_extra_properties = await create_fake_group_extra_properties( + registered_user.primary_gid, + product_name, + internet_access=False, + use_on_demand_clusters=True, + ) + assert personal_group_extra_properties + + aggregated_group_properties = ( + await GroupExtraPropertiesRepo.get_aggregated_properties_for_user_v2( + asyncpg_engine, user_id=registered_user.id, product_name=product_name + ) + ) + assert aggregated_group_properties.internet_access is False + assert aggregated_group_properties.override_services_specifications is False + assert aggregated_group_properties.use_on_demand_clusters is True diff --git a/packages/postgres-database/tests/test_utils_projects.py b/packages/postgres-database/tests/test_utils_projects.py index c0c00d271e6..c97c822090f 100644 --- a/packages/postgres-database/tests/test_utils_projects.py +++ b/packages/postgres-database/tests/test_utils_projects.py @@ -3,9 +3,9 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments import uuid -from collections.abc import Awaitable, Callable -from datetime import datetime, timezone -from typing import Any, AsyncIterator +from collections.abc import AsyncIterator, Awaitable, Callable +from datetime import UTC, datetime +from typing import Any import pytest import sqlalchemy as sa @@ -53,7 +53,7 @@ async def registered_project( await _delete_project(connection, project["uuid"]) -@pytest.mark.parametrize("expected", (datetime.now(tz=timezone.utc), None)) +@pytest.mark.parametrize("expected", (datetime.now(tz=UTC), None)) async def test_get_project_trashed_at_column_can_be_converted_to_datetime( asyncpg_engine: AsyncEngine, registered_project: dict, expected: datetime | None ): diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py index 579d9b52bca..e848ddc6df1 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py @@ -7,15 +7,16 @@ import json import logging import subprocess -from collections.abc import Iterator +from collections.abc import AsyncIterator, Awaitable, Callable, Iterator from contextlib import suppress from pathlib import Path -from typing import Any, AsyncIterator, Awaitable, Callable +from typing import Any import aiodocker import docker import pytest import yaml +from common_library.dict_tools import copy_from_dict from docker.errors import APIError from faker import Faker from tenacity import AsyncRetrying, Retrying, TryAgain, retry @@ -25,7 +26,6 @@ from tenacity.wait import wait_fixed, wait_random_exponential from .helpers.constants import HEADER_STR, MINUTE -from .helpers.dict_tools import copy_from_dict from .helpers.host import get_localhost_ip from .helpers.typing_env import EnvVarsDict @@ -222,7 +222,7 @@ def _deploy_stack(compose_file: Path, stack_name: str) -> None: f"{stack_name}", ] subprocess.run( - cmd, # noqa: S603 + cmd, check=True, cwd=compose_file.parent, capture_output=True, @@ -238,7 +238,7 @@ def _deploy_stack(compose_file: Path, stack_name: str) -> None: def _make_dask_sidecar_certificates(simcore_service_folder: Path) -> None: dask_sidecar_root_folder = simcore_service_folder / "dask-sidecar" subprocess.run( - ["make", "certificates"], # noqa: S603, S607 + ["make", "certificates"], # noqa: S607 cwd=dask_sidecar_root_folder, check=True, capture_output=True, diff --git a/packages/pytest-simcore/src/pytest_simcore/examples/models_library.py b/packages/pytest-simcore/src/pytest_simcore/examples/models_library.py index 8af09913f71..cbd0f18d8ff 100644 --- a/packages/pytest-simcore/src/pytest_simcore/examples/models_library.py +++ b/packages/pytest-simcore/src/pytest_simcore/examples/models_library.py @@ -25,6 +25,18 @@ }, "data": ["data 5", "data 6", "data 7"], }, + # empty page + { + "_meta": {"total": 0, "count": 0, "limit": 4, "offset": 0}, + "_links": { + "self": "https://osparc.io/v2/listing?offset=0&limit=4", + "first": "https://osparc.io/v2/listing?offset=0&limit=4", + "prev": None, + "next": None, + "last": "https://osparc.io/v2/listing?offset=0&limit=4", + }, + "data": [], + }, ] RPC_PAGE_EXAMPLES: Final[list[dict]] = [ @@ -52,4 +64,16 @@ }, "data": ["data 5", "data 6", "data 7"], }, + # empty page + { + "_meta": {"total": 0, "count": 0, "limit": 4, "offset": 0}, + "_links": { + "self": {"offset": 0, "limit": 4}, + "first": {"offset": 0, "limit": 4}, + "prev": None, + "next": None, + "last": {"offset": 0, "limit": 4}, + }, + "data": [], + }, ] diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/aws_ec2.py b/packages/pytest-simcore/src/pytest_simcore/helpers/aws_ec2.py index 1e992f4ee45..7bb826149fe 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/aws_ec2.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/aws_ec2.py @@ -42,7 +42,10 @@ async def assert_autoscaled_dynamic_ec2_instances( expected_instance_state: InstanceStateNameType, expected_additional_tag_keys: list[str], instance_filters: Sequence[FilterTypeDef] | None, + expected_user_data: list[str] | None = None, ) -> list[InstanceTypeDef]: + if expected_user_data is None: + expected_user_data = ["docker swarm join"] return await assert_ec2_instances( ec2_client, expected_num_reservations=expected_num_reservations, @@ -54,7 +57,7 @@ async def assert_autoscaled_dynamic_ec2_instances( "io.simcore.autoscaling.monitored_services_labels", *expected_additional_tag_keys, ], - expected_user_data=["docker swarm join"], + expected_user_data=expected_user_data, instance_filters=instance_filters, ) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py index d4418a5ef81..803987b3b8a 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py @@ -16,10 +16,11 @@ import json import random from collections.abc import Callable -from datetime import datetime, timedelta, timezone +from datetime import UTC, datetime, timedelta from typing import Any, Final from uuid import uuid4 +import arrow import faker from faker import Faker @@ -182,7 +183,7 @@ def fake_task_factory(first_internal_id=1) -> Callable: _index_in_sequence = itertools.count(start=first_internal_id) def fake_task(**overrides) -> dict[str, Any]: - t0 = datetime.utcnow() + t0 = arrow.utcnow().datetime data = { "project_id": uuid4(), "node_id": uuid4(), @@ -193,7 +194,6 @@ def fake_task(**overrides) -> dict[str, Any]: "outputs": json.dumps({}), "image": json.dumps({}), "state": random.choice(_get_comp_pipeline_test_states()), - "submit": t0, "start": t0 + timedelta(seconds=1), "end": t0 + timedelta(minutes=5), } @@ -251,7 +251,7 @@ def random_product( def utcnow() -> datetime: - return datetime.now(tz=timezone.utc) + return datetime.now(tz=UTC) def random_payment_method( diff --git a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py index 5b85a036d79..a2b46d06679 100644 --- a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py +++ b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py @@ -172,9 +172,6 @@ async def director_v2_service_mock( r"^http://[a-z\-_]*director-v2:[0-9]+/v2/computations/.*:stop$" ) delete_computation_pattern = get_computation_pattern - projects_networks_pattern = re.compile( - r"^http://[a-z\-_]*director-v2:[0-9]+/v2/dynamic_services/projects/.*/-/networks$" - ) get_services_pattern = re.compile( r"^http://[a-z\-_]*director-v2:[0-9]+/v2/dynamic_services.*$" @@ -202,7 +199,6 @@ async def director_v2_service_mock( repeat=True, ) aioresponses_mocker.delete(delete_computation_pattern, status=204, repeat=True) - aioresponses_mocker.patch(projects_networks_pattern, status=204, repeat=True) return aioresponses_mocker diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_groups_fixtures.py b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_groups_fixtures.py index 0c79aba5622..be032c8f6f4 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_groups_fixtures.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_groups_fixtures.py @@ -19,7 +19,7 @@ from models_library.groups import GroupsByTypeTuple, StandardGroupCreate from models_library.users import UserID from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict -from simcore_service_webserver.groups._groups_api import ( +from simcore_service_webserver.groups._groups_service import ( add_user_in_group, create_standard_group, delete_standard_group, @@ -29,7 +29,9 @@ def _groupget_model_dump(group, access_rights) -> dict[str, Any]: return GroupGet.from_model(group, access_rights).model_dump( - mode="json", by_alias=True + mode="json", + by_alias=True, + exclude_unset=True, ) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/containers.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/containers.py new file mode 100644 index 00000000000..2049f0a409f --- /dev/null +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/containers.py @@ -0,0 +1,37 @@ +import logging +from datetime import timedelta +from typing import Final + +from models_library.docker import DockerNodeID +from models_library.projects_nodes_io import NodeID +from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace +from pydantic import NonNegativeInt, TypeAdapter +from servicelib.logging_utils import log_decorator +from servicelib.rabbitmq import RabbitMQRPCClient + +_logger = logging.getLogger(__name__) + +_REQUEST_TIMEOUT: Final[NonNegativeInt] = int(timedelta(minutes=60).total_seconds()) + + +@log_decorator(_logger, level=logging.DEBUG) +async def force_container_cleanup( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + docker_node_id: DockerNodeID, + swarm_stack_name: str, + node_id: NodeID, +) -> None: + result = await rabbitmq_rpc_client.request( + RPCNamespace.from_entries( + { + "service": "agent", + "docker_node_id": docker_node_id, + "swarm_stack_name": swarm_stack_name, + } + ), + TypeAdapter(RPCMethodName).validate_python("force_container_cleanup"), + node_id=node_id, + timeout_s=_REQUEST_TIMEOUT, + ) + assert result is None # nosec diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/volumes.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/volumes.py index 043898dcb30..41cf2ffd8b8 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/volumes.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/volumes.py @@ -2,6 +2,7 @@ from datetime import timedelta from typing import Final +from models_library.docker import DockerNodeID from models_library.projects_nodes_io import NodeID from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace from pydantic import NonNegativeInt, TypeAdapter @@ -17,7 +18,7 @@ async def remove_volumes_without_backup_for_service( rabbitmq_rpc_client: RabbitMQRPCClient, *, - docker_node_id: str, + docker_node_id: DockerNodeID, swarm_stack_name: str, node_id: NodeID, ) -> None: @@ -42,7 +43,7 @@ async def remove_volumes_without_backup_for_service( async def backup_and_remove_volumes_for_all_services( rabbitmq_rpc_client: RabbitMQRPCClient, *, - docker_node_id: str, + docker_node_id: DockerNodeID, swarm_stack_name: str, ) -> None: result = await rabbitmq_rpc_client.request( diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py index d941f889bd7..fb3276ae670 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py @@ -1,7 +1,11 @@ import logging from typing import Final -from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_directorv2.dynamic_services import ( + DynamicServiceGet, + GetProjectInactivityResponse, + RetrieveDataOutEnveloped, +) from models_library.api_schemas_dynamic_scheduler import DYNAMIC_SCHEDULER_RPC_NAMESPACE from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( DynamicServiceStart, @@ -11,6 +15,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.rabbitmq_basic_types import RPCMethodName +from models_library.services_types import ServicePortKey from models_library.users import UserID from pydantic import NonNegativeInt, TypeAdapter from servicelib.logging_utils import log_decorator @@ -93,3 +98,69 @@ async def stop_dynamic_service( timeout_s=timeout_s, ) assert result is None # nosec + + +@log_decorator(_logger, level=logging.DEBUG) +async def get_project_inactivity( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + project_id: ProjectID, + max_inactivity_seconds: NonNegativeInt, +) -> GetProjectInactivityResponse: + result = await rabbitmq_rpc_client.request( + DYNAMIC_SCHEDULER_RPC_NAMESPACE, + _RPC_METHOD_NAME_ADAPTER.validate_python("get_project_inactivity"), + project_id=project_id, + max_inactivity_seconds=max_inactivity_seconds, + timeout_s=_RPC_DEFAULT_TIMEOUT_S, + ) + assert isinstance(result, GetProjectInactivityResponse) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def restart_user_services( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + timeout_s: NonNegativeInt, +) -> None: + result = await rabbitmq_rpc_client.request( + DYNAMIC_SCHEDULER_RPC_NAMESPACE, + _RPC_METHOD_NAME_ADAPTER.validate_python("restart_user_services"), + node_id=node_id, + timeout_s=timeout_s, + ) + assert result is None # nosec + + +@log_decorator(_logger, level=logging.DEBUG) +async def retrieve_inputs( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + port_keys: list[ServicePortKey], + timeout_s: NonNegativeInt, +) -> RetrieveDataOutEnveloped: + result = await rabbitmq_rpc_client.request( + DYNAMIC_SCHEDULER_RPC_NAMESPACE, + _RPC_METHOD_NAME_ADAPTER.validate_python("retrieve_inputs"), + node_id=node_id, + port_keys=port_keys, + timeout_s=timeout_s, + ) + assert isinstance(result, RetrieveDataOutEnveloped) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def update_projects_networks( + rabbitmq_rpc_client: RabbitMQRPCClient, *, project_id: ProjectID +) -> None: + result = await rabbitmq_rpc_client.request( + DYNAMIC_SCHEDULER_RPC_NAMESPACE, + _RPC_METHOD_NAME_ADAPTER.validate_python("update_projects_networks"), + project_id=project_id, + timeout_s=_RPC_DEFAULT_TIMEOUT_S, + ) + assert result is None # nosec diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py new file mode 100644 index 00000000000..42e578ee482 --- /dev/null +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py @@ -0,0 +1,29 @@ +from common_library.errors_classes import OsparcErrorMixin + + +class LicensesBaseError(OsparcErrorMixin, Exception): + ... + + +class NotEnoughAvailableSeatsError(LicensesBaseError): + msg_template = "Not enough available seats. Current available seats {available_num_of_seats} for license item {license_item_id}" + + +class CanNotCheckoutNotEnoughAvailableSeatsError(LicensesBaseError): + msg_template = "Can not checkout license item {licensed_item_id} with num of seats {num_of_seats}. Currently available seats {available_num_of_seats}" + + +class CanNotCheckoutServiceIsNotRunningError(LicensesBaseError): + msg_template = "Can not checkout license item {licensed_item_id} as dynamic service is not running. Current service {service_run}" + + +class LicensedItemCheckoutNotFoundError(LicensesBaseError): + msg_template = "Licensed item checkout {licensed_item_checkout_id} not found." + + +LICENSES_ERRORS = ( + NotEnoughAvailableSeatsError, + CanNotCheckoutNotEnoughAvailableSeatsError, + CanNotCheckoutServiceIsNotRunningError, + LicensedItemCheckoutNotFoundError, +) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/licensed_items_checkouts.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/licensed_items_checkouts.py new file mode 100644 index 00000000000..ed8c85dfd37 --- /dev/null +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/licensed_items_checkouts.py @@ -0,0 +1,126 @@ +import logging +from typing import Final + +from models_library.api_schemas_resource_usage_tracker import ( + RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, +) +from models_library.api_schemas_resource_usage_tracker.licensed_items_checkouts import ( + LicensedItemCheckoutGet, + LicensedItemsCheckoutsPage, +) +from models_library.basic_types import IDStr +from models_library.licensed_items import LicensedItemID +from models_library.products import ProductName +from models_library.rabbitmq_basic_types import RPCMethodName +from models_library.resource_tracker_licensed_items_checkouts import ( + LicensedItemCheckoutID, +) +from models_library.rest_ordering import OrderBy +from models_library.services_types import ServiceRunID +from models_library.users import UserID +from models_library.wallets import WalletID +from pydantic import NonNegativeInt, TypeAdapter + +from ....logging_utils import log_decorator +from ... import RabbitMQRPCClient + +_logger = logging.getLogger(__name__) + + +_DEFAULT_TIMEOUT_S: Final[NonNegativeInt] = 30 + +_RPC_METHOD_NAME_ADAPTER: TypeAdapter[RPCMethodName] = TypeAdapter(RPCMethodName) + + +@log_decorator(_logger, level=logging.DEBUG) +async def get_licensed_item_checkout( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + product_name: ProductName, + licensed_item_checkout_id: LicensedItemCheckoutID, +) -> LicensedItemCheckoutGet: + result = await rabbitmq_rpc_client.request( + RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, + _RPC_METHOD_NAME_ADAPTER.validate_python("get_licensed_item_checkout"), + product_name=product_name, + licensed_item_checkout_id=licensed_item_checkout_id, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + assert isinstance(result, LicensedItemCheckoutGet) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def get_licensed_items_checkouts_page( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + product_name: ProductName, + filter_wallet_id: WalletID, + offset: int = 0, + limit: int = 20, + order_by: OrderBy | None = None, +) -> LicensedItemsCheckoutsPage: + """ + Default order_by field is "started_at" + """ + if order_by is None: + order_by = OrderBy(field=IDStr("started_at")) + + result = await rabbitmq_rpc_client.request( + RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, + _RPC_METHOD_NAME_ADAPTER.validate_python("get_licensed_items_checkouts_page"), + product_name=product_name, + filter_wallet_id=filter_wallet_id, + limit=limit, + offset=offset, + order_by=order_by, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + assert isinstance(result, LicensedItemsCheckoutsPage) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def checkout_licensed_item( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + licensed_item_id: LicensedItemID, + wallet_id: WalletID, + product_name: ProductName, + num_of_seats: int, + service_run_id: ServiceRunID, + user_id: UserID, + user_email: str, +) -> LicensedItemCheckoutGet: + result: LicensedItemCheckoutGet = await rabbitmq_rpc_client.request( + RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, + _RPC_METHOD_NAME_ADAPTER.validate_python("checkout_licensed_item"), + licensed_item_id=licensed_item_id, + wallet_id=wallet_id, + product_name=product_name, + num_of_seats=num_of_seats, + service_run_id=service_run_id, + user_id=user_id, + user_email=user_email, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + assert isinstance(result, LicensedItemCheckoutGet) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def release_licensed_item( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + licensed_item_checkout_id: LicensedItemCheckoutID, + product_name: ProductName, +) -> LicensedItemCheckoutGet: + result: LicensedItemCheckoutGet = await rabbitmq_rpc_client.request( + RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, + _RPC_METHOD_NAME_ADAPTER.validate_python("release_licensed_item"), + licensed_item_checkout_id=licensed_item_checkout_id, + product_name=product_name, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + assert isinstance(result, LicensedItemCheckoutGet) # nosec + return result diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/licensed_items_purchases.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/licensed_items_purchases.py index a9463271d75..125dbe655a0 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/licensed_items_purchases.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/licensed_items_purchases.py @@ -17,7 +17,7 @@ ) from models_library.rest_ordering import OrderBy from models_library.wallets import WalletID -from pydantic import AnyUrl, NonNegativeInt, TypeAdapter +from pydantic import NonNegativeInt, TypeAdapter from ....logging_utils import log_decorator from ....rabbitmq import RabbitMQRPCClient @@ -38,8 +38,14 @@ async def get_licensed_items_purchases_page( wallet_id: WalletID, offset: int = 0, limit: int = 20, - order_by: OrderBy = OrderBy(field=IDStr("purchased_at")), + order_by: OrderBy | None = None, ) -> LicensedItemsPurchasesPage: + """ + Default order_by field is "purchased_at" + """ + if order_by is None: + order_by = OrderBy(field=IDStr("purchased_at")) + result = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, _RPC_METHOD_NAME_ADAPTER.validate_python("get_licensed_items_purchases_page"), @@ -76,7 +82,7 @@ async def get_licensed_item_purchase( async def create_licensed_item_purchase( rabbitmq_rpc_client: RabbitMQRPCClient, *, data: LicensedItemsPurchasesCreate ) -> LicensedItemPurchaseGet: - result: AnyUrl = await rabbitmq_rpc_client.request( + result = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, _RPC_METHOD_NAME_ADAPTER.validate_python("create_licensed_item_purchase"), data=data, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/licenses/licensed_items.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/licenses/licensed_items.py index e212854bae5..0f86ab63d79 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/licenses/licensed_items.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/licenses/licensed_items.py @@ -5,10 +5,16 @@ LicensedItemGet, LicensedItemGetPage, ) +from models_library.api_schemas_webserver.licensed_items_checkouts import ( + LicensedItemCheckoutRpcGet, +) from models_library.licensed_items import LicensedItemID from models_library.products import ProductName from models_library.rabbitmq_basic_types import RPCMethodName -from models_library.resource_tracker import ServiceRunId +from models_library.resource_tracker_licensed_items_checkouts import ( + LicensedItemCheckoutID, +) +from models_library.services_types import ServiceRunID from models_library.users import UserID from models_library.wallets import WalletID from pydantic import TypeAdapter @@ -23,8 +29,8 @@ async def get_licensed_items( rabbitmq_rpc_client: RabbitMQRPCClient, *, product_name: str, - offset: int, - limit: int, + offset: int = 0, + limit: int = 20, ) -> LicensedItemGetPage: result: LicensedItemGetPage = await rabbitmq_rpc_client.request( WEBSERVER_RPC_NAMESPACE, @@ -33,26 +39,32 @@ async def get_licensed_items( offset=offset, limit=limit, ) - assert isinstance(result, LicensedItemGetPage) + assert isinstance(result, LicensedItemGetPage) # nosec return result @log_decorator(_logger, level=logging.DEBUG) -async def get_licensed_items_for_wallet( +async def get_available_licensed_items_for_wallet( rabbitmq_rpc_client: RabbitMQRPCClient, *, - user_id: UserID, product_name: ProductName, wallet_id: WalletID, -) -> LicensedItemGet: + user_id: UserID, + offset: int = 0, + limit: int = 20, +) -> LicensedItemGetPage: result: LicensedItemGet = await rabbitmq_rpc_client.request( WEBSERVER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("get_licensed_items_for_wallet"), - user_id=user_id, + TypeAdapter(RPCMethodName).validate_python( + "get_available_licensed_items_for_wallet" + ), product_name=product_name, + user_id=user_id, wallet_id=wallet_id, + offset=offset, + limit=limit, ) - assert isinstance(result, LicensedItemGet) # nosec + assert isinstance(result, LicensedItemGetPage) # nosec return result @@ -60,45 +72,41 @@ async def get_licensed_items_for_wallet( async def checkout_licensed_item_for_wallet( rabbitmq_rpc_client: RabbitMQRPCClient, *, - user_id: UserID, product_name: ProductName, + user_id: UserID, wallet_id: WalletID, licensed_item_id: LicensedItemID, num_of_seats: int, - service_run_id: ServiceRunId, -) -> None: + service_run_id: ServiceRunID, +) -> LicensedItemCheckoutRpcGet: result = await rabbitmq_rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("checkout_licensed_item_for_wallet"), - user_id=user_id, product_name=product_name, + user_id=user_id, wallet_id=wallet_id, licensed_item_id=licensed_item_id, num_of_seats=num_of_seats, service_run_id=service_run_id, ) - assert result is None # nosec + assert isinstance(result, LicensedItemCheckoutRpcGet) # nosec + return result @log_decorator(_logger, level=logging.DEBUG) async def release_licensed_item_for_wallet( rabbitmq_rpc_client: RabbitMQRPCClient, *, - user_id: UserID, product_name: ProductName, - wallet_id: WalletID, - licensed_item_id: LicensedItemID, - num_of_seats: int, - service_run_id: ServiceRunId, -) -> None: + user_id: UserID, + licensed_item_checkout_id: LicensedItemCheckoutID, +) -> LicensedItemCheckoutRpcGet: result = await rabbitmq_rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("release_licensed_item_for_wallet"), - user_id=user_id, product_name=product_name, - wallet_id=wallet_id, - licensed_item_id=licensed_item_id, - num_of_seats=num_of_seats, - service_run_id=service_run_id, + user_id=user_id, + licensed_item_checkout_id=licensed_item_checkout_id, ) - assert result is None # nosec + assert isinstance(result, LicensedItemCheckoutRpcGet) # nosec + return result diff --git a/services/agent/src/simcore_service_agent/api/rpc/_containers.py b/services/agent/src/simcore_service_agent/api/rpc/_containers.py new file mode 100644 index 00000000000..e7d651d6ede --- /dev/null +++ b/services/agent/src/simcore_service_agent/api/rpc/_containers.py @@ -0,0 +1,20 @@ +import logging + +from fastapi import FastAPI +from models_library.projects_nodes_io import NodeID +from servicelib.logging_utils import log_context +from servicelib.rabbitmq import RPCRouter + +from ...services.containers_manager import ContainersManager + +_logger = logging.getLogger(__name__) + +router = RPCRouter() + + +@router.expose() +async def force_container_cleanup(app: FastAPI, *, node_id: NodeID) -> None: + with log_context( + _logger, logging.INFO, f"removing all orphan container for {node_id=}" + ): + await ContainersManager.get_from_app_state(app).force_container_cleanup(node_id) diff --git a/services/agent/src/simcore_service_agent/api/rpc/_volumes.py b/services/agent/src/simcore_service_agent/api/rpc/_volumes.py index 96edb817e62..9d2433a19af 100644 --- a/services/agent/src/simcore_service_agent/api/rpc/_volumes.py +++ b/services/agent/src/simcore_service_agent/api/rpc/_volumes.py @@ -7,7 +7,8 @@ from servicelib.rabbitmq.rpc_interfaces.agent.errors import ( NoServiceVolumesFoundRPCError, ) -from simcore_service_agent.services.volumes_manager import VolumesManager + +from ...services.volumes_manager import VolumesManager _logger = logging.getLogger(__name__) diff --git a/services/agent/src/simcore_service_agent/api/rpc/routes.py b/services/agent/src/simcore_service_agent/api/rpc/routes.py index 7a658ae5280..e8b0cea8f4c 100644 --- a/services/agent/src/simcore_service_agent/api/rpc/routes.py +++ b/services/agent/src/simcore_service_agent/api/rpc/routes.py @@ -4,9 +4,10 @@ from simcore_service_agent.core.settings import ApplicationSettings from ...services.rabbitmq import get_rabbitmq_rpc_server -from . import _volumes +from . import _containers, _volumes ROUTERS: list[RPCRouter] = [ + _containers.router, _volumes.router, ] diff --git a/services/agent/src/simcore_service_agent/core/application.py b/services/agent/src/simcore_service_agent/core/application.py index fe226a33558..b0cfa8720e4 100644 --- a/services/agent/src/simcore_service_agent/core/application.py +++ b/services/agent/src/simcore_service_agent/core/application.py @@ -18,6 +18,7 @@ ) from ..api.rest.routes import setup_rest_api from ..api.rpc.routes import setup_rpc_api_routes +from ..services.containers_manager import setup_containers_manager from ..services.instrumentation import setup_instrumentation from ..services.rabbitmq import setup_rabbitmq from ..services.volumes_manager import setup_volume_manager @@ -28,8 +29,8 @@ def _setup_logger(settings: ApplicationSettings): # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 - logging.basicConfig(level=settings.LOGLEVEL.value) # NOSONAR - logging.root.setLevel(settings.LOGLEVEL.value) + logging.basicConfig(level=settings.LOG_LEVEL.value) # NOSONAR + logging.root.setLevel(settings.LOG_LEVEL.value) config_all_loggers( log_format_local_dev_enabled=settings.AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=settings.AGENT_VOLUMES_LOG_FILTER_MAPPING, @@ -58,6 +59,7 @@ def create_app() -> FastAPI: setup_rabbitmq(app) setup_volume_manager(app) + setup_containers_manager(app) setup_rest_api(app) setup_rpc_api_routes(app) diff --git a/services/agent/src/simcore_service_agent/core/settings.py b/services/agent/src/simcore_service_agent/core/settings.py index f37d7c8d263..742d3bf02d1 100644 --- a/services/agent/src/simcore_service_agent/core/settings.py +++ b/services/agent/src/simcore_service_agent/core/settings.py @@ -1,6 +1,7 @@ from datetime import timedelta from models_library.basic_types import BootModeEnum, LogLevel +from models_library.docker import DockerNodeID from pydantic import AliasChoices, AnyHttpUrl, Field, field_validator from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.base import BaseCustomSettings @@ -11,7 +12,7 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): - LOGLEVEL: LogLevel = Field( + LOG_LEVEL: LogLevel = Field( LogLevel.WARNING, validation_alias=AliasChoices( "AGENT_LOGLEVEL", @@ -79,7 +80,9 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): AGENT_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True - AGENT_DOCKER_NODE_ID: str = Field(..., description="used by the rabbitmq module") + AGENT_DOCKER_NODE_ID: DockerNodeID = Field( + ..., description="used by the rabbitmq module" + ) AGENT_RABBITMQ: RabbitSettings = Field( description="settings for service/rabbitmq", @@ -91,7 +94,7 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): json_schema_extra={"auto_default_from_env": True}, ) - @field_validator("LOGLEVEL") + @field_validator("LOG_LEVEL") @classmethod def valid_log_level(cls, value) -> LogLevel: return LogLevel(cls.validate_log_level(value)) diff --git a/services/agent/src/simcore_service_agent/models/volumes.py b/services/agent/src/simcore_service_agent/models/volumes.py index cf227bf69e9..68f20cae559 100644 --- a/services/agent/src/simcore_service_agent/models/volumes.py +++ b/services/agent/src/simcore_service_agent/models/volumes.py @@ -6,14 +6,14 @@ ) from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID -from models_library.services_types import RunID +from models_library.services_types import ServiceRunID from models_library.users import UserID from pydantic import BaseModel, ConfigDict, Field, TypeAdapter class DynamicServiceVolumeLabels(BaseModel): node_uuid: NodeID - run_id: RunID + run_id: ServiceRunID source: str study_id: ProjectID swarm_stack_name: str diff --git a/services/agent/src/simcore_service_agent/services/containers_manager.py b/services/agent/src/simcore_service_agent/services/containers_manager.py new file mode 100644 index 00000000000..ca2317e156e --- /dev/null +++ b/services/agent/src/simcore_service_agent/services/containers_manager.py @@ -0,0 +1,71 @@ +import logging +from dataclasses import dataclass, field + +from aiodocker import Docker +from fastapi import FastAPI +from models_library.api_schemas_directorv2.services import ( + DYNAMIC_PROXY_SERVICE_PREFIX, + DYNAMIC_SIDECAR_SERVICE_PREFIX, +) +from models_library.projects_nodes_io import NodeID +from servicelib.fastapi.app_state import SingletonInAppStateMixin + +from .docker_utils import get_containers_with_prefixes, remove_container_forcefully + +_logger = logging.getLogger(__name__) + + +@dataclass +class ContainersManager(SingletonInAppStateMixin): + app_state_name: str = "containers_manager" + + docker: Docker = field(default_factory=Docker) + + async def force_container_cleanup(self, node_id: NodeID) -> None: + # compose all possible used container prefixes + proxy_prefix = f"{DYNAMIC_PROXY_SERVICE_PREFIX}_{node_id}" + dy_sidecar_prefix = f"{DYNAMIC_SIDECAR_SERVICE_PREFIX}_{node_id}" + user_service_prefix = f"{DYNAMIC_SIDECAR_SERVICE_PREFIX}-{node_id}" + + orphan_containers = await get_containers_with_prefixes( + self.docker, {proxy_prefix, dy_sidecar_prefix, user_service_prefix} + ) + _logger.debug( + "Detected orphan containers for node_id='%s': %s", + node_id, + orphan_containers, + ) + + unexpected_orphans = { + orphan + for orphan in orphan_containers + if orphan.startswith(user_service_prefix) + } + if unexpected_orphans: + _logger.warning( + "Unexpected orphans detected for node_id='%s': %s", + node_id, + unexpected_orphans, + ) + + # avoids parallel requests to docker engine + for container in orphan_containers: + await remove_container_forcefully(self.docker, container) + + async def shutdown(self) -> None: + await self.docker.close() + + +def get_containers_manager(app: FastAPI) -> ContainersManager: + return ContainersManager.get_from_app_state(app) + + +def setup_containers_manager(app: FastAPI) -> None: + async def _on_startup() -> None: + ContainersManager().set_to_app_state(app) + + async def _on_shutdown() -> None: + await ContainersManager.get_from_app_state(app).shutdown() + + app.add_event_handler("startup", _on_startup) + app.add_event_handler("shutdown", _on_shutdown) diff --git a/services/agent/src/simcore_service_agent/services/docker_utils.py b/services/agent/src/simcore_service_agent/services/docker_utils.py index 83656783b55..1390a5b12df 100644 --- a/services/agent/src/simcore_service_agent/services/docker_utils.py +++ b/services/agent/src/simcore_service_agent/services/docker_utils.py @@ -106,3 +106,27 @@ async def remove_volume( get_instrumentation(app).agent_metrics.remove_volumes( settings.AGENT_DOCKER_NODE_ID ) + + +async def get_containers_with_prefixes(docker: Docker, prefixes: set[str]) -> set[str]: + """Returns a set of container names matching any of the given prefixes""" + all_containers = await docker.containers.list(all=True) + + result: set[str] = set() + for container in all_containers: + container_info = await container.show() + container_name = container_info.get("Name", "").lstrip("/") + if any(container_name.startswith(prefix) for prefix in prefixes): + result.add(container_name) + + return result + + +async def remove_container_forcefully(docker: Docker, container_id: str) -> None: + """Removes a container regardless of it's state""" + try: + container = await docker.containers.get(container_id) + await container.delete(force=True) + except DockerError as e: + if e.status != status.HTTP_404_NOT_FOUND: + raise diff --git a/services/agent/src/simcore_service_agent/services/instrumentation/_models.py b/services/agent/src/simcore_service_agent/services/instrumentation/_models.py index bf554374595..2c49859e897 100644 --- a/services/agent/src/simcore_service_agent/services/instrumentation/_models.py +++ b/services/agent/src/simcore_service_agent/services/instrumentation/_models.py @@ -1,6 +1,7 @@ from dataclasses import dataclass, field from typing import Final +from models_library.docker import DockerNodeID from prometheus_client import CollectorRegistry, Counter from servicelib.instrumentation import MetricsBase, get_metrics_namespace @@ -34,10 +35,10 @@ def __post_init__(self) -> None: registry=self.registry, ) - def remove_volumes(self, docker_node_id: str) -> None: + def remove_volumes(self, docker_node_id: DockerNodeID) -> None: self.volumes_removed.labels(docker_node_id=docker_node_id).inc() - def backedup_volumes(self, docker_node_id: str) -> None: + def backedup_volumes(self, docker_node_id: DockerNodeID) -> None: self.volumes_backedup.labels(docker_node_id=docker_node_id).inc() diff --git a/services/agent/tests/conftest.py b/services/agent/tests/conftest.py index 14e8cd1d9e3..97df58d4e5a 100644 --- a/services/agent/tests/conftest.py +++ b/services/agent/tests/conftest.py @@ -5,6 +5,7 @@ import pytest from faker import Faker from models_library.basic_types import BootModeEnum +from models_library.docker import DockerNodeID from moto.server import ThreadedMotoServer from pydantic import HttpUrl, TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict @@ -25,8 +26,8 @@ def swarm_stack_name() -> str: @pytest.fixture -def docker_node_id() -> str: - return "test-node-id" +def docker_node_id() -> DockerNodeID: + return TypeAdapter(DockerNodeID).validate_python("testnodeid") @pytest.fixture @@ -40,7 +41,7 @@ def mock_environment( mocked_s3_server_url: HttpUrl, bucket: str, swarm_stack_name: str, - docker_node_id: str, + docker_node_id: DockerNodeID, ) -> EnvVarsDict: return setenvs_from_dict( monkeypatch, diff --git a/services/agent/tests/unit/conftest.py b/services/agent/tests/unit/conftest.py index 1a49ce6ba57..4b23619f5a0 100644 --- a/services/agent/tests/unit/conftest.py +++ b/services/agent/tests/unit/conftest.py @@ -15,7 +15,7 @@ from fastapi.testclient import TestClient from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID -from models_library.services_types import RunID +from models_library.services_types import ServiceRunID from models_library.users import UserID from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from settings_library.rabbit import RabbitSettings @@ -56,8 +56,8 @@ def test_client(initialized_app: FastAPI) -> TestClient: @pytest.fixture -def run_id() -> RunID: - return RunID.create() +def service_run_id() -> ServiceRunID: + return ServiceRunID.get_resource_tracking_run_id_for_dynamic() @pytest.fixture @@ -77,7 +77,7 @@ def volumes_path(tmp_path: Path) -> Path: @pytest.fixture async def create_dynamic_sidecar_volume( - run_id: RunID, + service_run_id: ServiceRunID, project_id: ProjectID, swarm_stack_name: str, user_id: UserID, @@ -89,13 +89,13 @@ async def create_dynamic_sidecar_volume( async with aiodocker.Docker() as docker_client: async def _(node_id: NodeID, in_use: bool, volume_name: str) -> str: - source = get_source(run_id, node_id, volumes_path / volume_name) + source = get_source(service_run_id, node_id, volumes_path / volume_name) volume = await docker_client.volumes.create( { "Name": source, "Labels": { "node_uuid": f"{node_id}", - "run_id": run_id, + "run_id": service_run_id, "source": source, "study_id": f"{project_id}", "swarm_stack_name": swarm_stack_name, diff --git a/services/agent/tests/unit/test_api_rpc__containers.py b/services/agent/tests/unit/test_api_rpc__containers.py new file mode 100644 index 00000000000..201acf5d218 --- /dev/null +++ b/services/agent/tests/unit/test_api_rpc__containers.py @@ -0,0 +1,55 @@ +# pylint:disable=redefined-outer-name +# pylint:disable=unused-argument + +from collections.abc import Awaitable, Callable +from unittest.mock import AsyncMock + +import pytest +import pytest_mock +from faker import Faker +from fastapi import FastAPI +from models_library.docker import DockerNodeID +from models_library.projects_nodes_io import NodeID +from servicelib.rabbitmq import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.agent import containers + +pytest_simcore_core_services_selection = [ + "rabbit", +] + + +@pytest.fixture +def node_id(faker: Faker) -> NodeID: + return faker.uuid4(cast_to=None) + + +@pytest.fixture +async def rpc_client( + initialized_app: FastAPI, + rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], +) -> RabbitMQRPCClient: + return await rabbitmq_rpc_client("client") + + +@pytest.fixture +def mocked_force_container_cleanup(mocker: pytest_mock.MockerFixture) -> AsyncMock: + return mocker.patch( + "simcore_service_agent.services.containers_manager.ContainersManager.force_container_cleanup" + ) + + +async def test_force_container_cleanup( + rpc_client: RabbitMQRPCClient, + swarm_stack_name: str, + docker_node_id: DockerNodeID, + node_id: NodeID, + mocked_force_container_cleanup: AsyncMock, +): + assert mocked_force_container_cleanup.call_count == 0 + await containers.force_container_cleanup( + rpc_client, + docker_node_id=docker_node_id, + swarm_stack_name=swarm_stack_name, + node_id=node_id, + ) + assert mocked_force_container_cleanup.call_count == 1 diff --git a/services/agent/tests/unit/test_api_rpc__volumes.py b/services/agent/tests/unit/test_api_rpc__volumes.py index df7121d1418..6e7eeb76485 100644 --- a/services/agent/tests/unit/test_api_rpc__volumes.py +++ b/services/agent/tests/unit/test_api_rpc__volumes.py @@ -8,6 +8,7 @@ import pytest import pytest_mock from fastapi import FastAPI +from models_library.docker import DockerNodeID from servicelib.rabbitmq import RabbitMQRPCClient from servicelib.rabbitmq.rpc_interfaces.agent import volumes @@ -41,7 +42,7 @@ def mocked_remove_all_volumes(mocker: pytest_mock.MockerFixture) -> AsyncMock: async def test_backup_and_remove_volumes_for_all_services( rpc_client: RabbitMQRPCClient, swarm_stack_name: str, - docker_node_id: str, + docker_node_id: DockerNodeID, mocked_remove_all_volumes: AsyncMock, ): assert mocked_remove_all_volumes.call_count == 0 diff --git a/services/agent/tests/unit/test_services_backup.py b/services/agent/tests/unit/test_services_backup.py index c986550da51..d544a25dfa5 100644 --- a/services/agent/tests/unit/test_services_backup.py +++ b/services/agent/tests/unit/test_services_backup.py @@ -11,7 +11,7 @@ from fastapi import FastAPI from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID -from models_library.services_types import RunID +from models_library.services_types import ServiceRunID from pydantic import NonNegativeInt from simcore_service_agent.core.settings import ApplicationSettings from simcore_service_agent.services.backup import backup_volume @@ -48,7 +48,7 @@ async def test_backup_volume( volume_content: Path, project_id: ProjectID, swarm_stack_name: str, - run_id: RunID, + service_run_id: ServiceRunID, downlaoded_from_s3: Path, create_dynamic_sidecar_volumes: Callable[[NodeID, bool], Awaitable[set[str]]], initialized_app: FastAPI, @@ -80,7 +80,7 @@ async def test_backup_volume( async with session.client("s3", endpoint_url=f"{settings.AGENT_VOLUMES_CLEANUP_S3_ENDPOINT}") as s3_client: # type: ignore list_response = await s3_client.list_objects_v2( Bucket=settings.AGENT_VOLUMES_CLEANUP_S3_BUCKET, - Prefix=f"{swarm_stack_name}/{project_id}/{node_id}/{run_id}", + Prefix=f"{swarm_stack_name}/{project_id}/{node_id}/{service_run_id}", ) synced_keys: list[str] = [o["Key"] for o in list_response["Contents"]] diff --git a/services/agent/tests/unit/test_services_containers_manager.py b/services/agent/tests/unit/test_services_containers_manager.py new file mode 100644 index 00000000000..4489d975ab3 --- /dev/null +++ b/services/agent/tests/unit/test_services_containers_manager.py @@ -0,0 +1,107 @@ +# pylint: disable=redefined-outer-name + + +import logging +from collections.abc import AsyncIterable, Awaitable, Callable +from enum import Enum + +import pytest +from aiodocker import Docker, DockerError +from asgi_lifespan import LifespanManager +from faker import Faker +from fastapi import FastAPI, status +from models_library.api_schemas_directorv2.services import ( + DYNAMIC_PROXY_SERVICE_PREFIX, + DYNAMIC_SIDECAR_SERVICE_PREFIX, +) +from models_library.projects_nodes_io import NodeID +from simcore_service_agent.services.containers_manager import ( + get_containers_manager, + setup_containers_manager, +) + + +@pytest.fixture +async def app() -> AsyncIterable[FastAPI]: + app = FastAPI() + setup_containers_manager(app) + + async with LifespanManager(app): + yield app + + +@pytest.fixture +def node_id(faker: Faker) -> NodeID: + return faker.uuid4(cast_to=None) + + +@pytest.fixture +async def docker() -> AsyncIterable[Docker]: + async with Docker() as docker: + yield docker + + +class _ContainerMode(Enum): + CREATED = "CREATED" + RUNNING = "RUNNING" + STOPPED = "STOPPED" + + +@pytest.fixture +async def create_container( + docker: Docker, +) -> AsyncIterable[Callable[[str, _ContainerMode], Awaitable[str]]]: + created_containers: set[str] = set() + + async def _(name: str, container_mode: _ContainerMode) -> str: + container = await docker.containers.create( + config={ + "Image": "alpine", + "Cmd": ["sh", "-c", "while true; do sleep 1; done"], + }, + name=name, + ) + + if container_mode in (_ContainerMode.RUNNING, _ContainerMode.STOPPED): + await container.start() + if container_mode == _ContainerMode.STOPPED: + await container.stop() + + created_containers.add(container.id) + return container.id + + yield _ + + # cleanup containers + for container_id in created_containers: + try: + container = await docker.containers.get(container_id) + await container.delete(force=True) + except DockerError as e: + if e.status != status.HTTP_404_NOT_FOUND: + raise + + +async def test_force_container_cleanup( + app: FastAPI, + node_id: NodeID, + create_container: Callable[[str, _ContainerMode], Awaitable[str]], + faker: Faker, + caplog: pytest.LogCaptureFixture, +): + caplog.set_level(logging.DEBUG) + caplog.clear() + + proxy_name = f"{DYNAMIC_PROXY_SERVICE_PREFIX}_{node_id}{faker.pystr()}" + dynamic_sidecar_name = f"{DYNAMIC_SIDECAR_SERVICE_PREFIX}-{node_id}{faker.pystr()}" + user_service_name = f"{DYNAMIC_SIDECAR_SERVICE_PREFIX}_{node_id}{faker.pystr()}" + + await create_container(proxy_name, _ContainerMode.CREATED) + await create_container(dynamic_sidecar_name, _ContainerMode.RUNNING) + await create_container(user_service_name, _ContainerMode.STOPPED) + + await get_containers_manager(app).force_container_cleanup(node_id) + + assert proxy_name in caplog.text + assert dynamic_sidecar_name in caplog.text + assert user_service_name in caplog.text diff --git a/services/agent/tests/unit/test_services_docker_utils.py b/services/agent/tests/unit/test_services_docker_utils.py index 40f86529edb..f4a19c9b9aa 100644 --- a/services/agent/tests/unit/test_services_docker_utils.py +++ b/services/agent/tests/unit/test_services_docker_utils.py @@ -10,7 +10,7 @@ from aiodocker.docker import Docker from fastapi import FastAPI from models_library.projects_nodes_io import NodeID -from models_library.services_types import RunID +from models_library.services_types import ServiceRunID from pytest_mock import MockerFixture from servicelib.docker_constants import PREFIX_DYNAMIC_SIDECAR_VOLUMES from simcore_service_agent.services.docker_utils import ( @@ -43,9 +43,9 @@ def test__reverse_string(): ], ) def test__does_volume_require_backup( - run_id: RunID, volume_path_part: str, expected: bool + service_run_id: ServiceRunID, volume_path_part: str, expected: bool ) -> None: - volume_name = get_source(run_id, uuid4(), Path("/apath") / volume_path_part) + volume_name = get_source(service_run_id, uuid4(), Path("/apath") / volume_path_part) print(volume_name) assert _does_volume_require_backup(volume_name) is expected diff --git a/services/agent/tests/unit/test_services_volumes_manager.py b/services/agent/tests/unit/test_services_volumes_manager.py index 0dfc29ceb83..4ac429aeca9 100644 --- a/services/agent/tests/unit/test_services_volumes_manager.py +++ b/services/agent/tests/unit/test_services_volumes_manager.py @@ -14,7 +14,7 @@ from aiodocker.docker import Docker from fastapi import FastAPI from models_library.projects_nodes_io import NodeID -from models_library.services_types import RunID +from models_library.services_types import ServiceRunID from servicelib.rabbitmq.rpc_interfaces.agent.errors import ( NoServiceVolumesFoundRPCError, ) @@ -30,12 +30,14 @@ @dataclass class MockedVolumesProxy: - run_id: RunID + service_run_id: ServiceRunID volumes: set[str] = field(default_factory=set) def add_unused_volumes_for_service(self, node_id: NodeID) -> None: for folder_name in VOLUMES_TO_CREATE: - volume_name = get_source(self.run_id, node_id, Path("/apath") / folder_name) + volume_name = get_source( + self.service_run_id, node_id, Path("/apath") / folder_name + ) self.volumes.add(volume_name) def remove_volume(self, volume_name: str) -> None: @@ -47,9 +49,9 @@ def get_unused_dynamc_sidecar_volumes(self) -> set[str]: @pytest.fixture async def mock_docker_utils( - mocker: pytest_mock.MockerFixture, run_id: RunID + mocker: pytest_mock.MockerFixture, service_run_id: ServiceRunID ) -> MockedVolumesProxy: - proxy = MockedVolumesProxy(run_id) + proxy = MockedVolumesProxy(service_run_id) async def _remove_volume( app: FastAPI, docker: Docker, *, volume_name: str, requires_backup: bool diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_core.py b/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_core.py index e2212195aed..9c45de0524b 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_core.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_core.py @@ -418,15 +418,43 @@ async def _activate_drained_nodes( ) -async def _start_buffer_instances( +async def _start_warm_buffer_instances( app: FastAPI, cluster: Cluster, auto_scaling_mode: BaseAutoscaling ) -> Cluster: + """starts warm buffer if there are assigned tasks, or if a hot buffer of the same type is needed""" + + app_settings = get_application_settings(app) + assert app_settings.AUTOSCALING_EC2_INSTANCES # nosec + instances_to_start = [ i.ec2_instance for i in cluster.buffer_ec2s if i.assigned_tasks ] + + if ( + len(cluster.buffer_drained_nodes) + < app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER + ): + # check if we can migrate warm buffers to hot buffers + hot_buffer_instance_type = cast( + InstanceTypeType, + next( + iter(app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_ALLOWED_TYPES) + ), + ) + free_startable_warm_buffers_to_replace_hot_buffers = [ + warm_buffer.ec2_instance + for warm_buffer in cluster.buffer_ec2s + if (warm_buffer.ec2_instance.type == hot_buffer_instance_type) + and not warm_buffer.assigned_tasks + ] + instances_to_start += free_startable_warm_buffers_to_replace_hot_buffers[ + : app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER + - len(cluster.buffer_drained_nodes) + ] + if not instances_to_start: return cluster - # change the buffer machine to an active one + with log_context( _logger, logging.INFO, f"start {len(instances_to_start)} buffer machines" ): @@ -1187,8 +1215,8 @@ async def _autoscale_cluster( # 2. activate available drained nodes to cover some of the tasks cluster = await _activate_drained_nodes(app, cluster, auto_scaling_mode) - # 3. start buffer instances to cover the remaining tasks - cluster = await _start_buffer_instances(app, cluster, auto_scaling_mode) + # 3. start warm buffer instances to cover the remaining tasks + cluster = await _start_warm_buffer_instances(app, cluster, auto_scaling_mode) # 4. scale down unused instances cluster = await _scale_down_unused_cluster_instances( diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/dask.py b/services/autoscaling/src/simcore_service_autoscaling/modules/dask.py index 4c5ee00f86c..d57508babf8 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/dask.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/dask.py @@ -273,7 +273,7 @@ def _list_processing_tasks_on_worker( async with _scheduler_client(scheduler_url, authentication) as client: worker_url, _ = _dask_worker_from_ec2_instance(client, ec2_instance) - _logger.debug("looking for processing tasksfor %s", f"{worker_url=}") + _logger.debug("looking for processing tasks for %s", f"{worker_url=}") # now get the used resources worker_processing_tasks: list[ diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py b/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py index 65caa0f40b1..4c5b5e6f79f 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py @@ -521,8 +521,14 @@ async def tag_node( tags: dict[DockerLabelKey, str], available: bool, ) -> Node: + assert node.spec # nosec + if (node.spec.labels == tags) and ( + (node.spec.availability is Availability.active) == available + ): + # nothing to do + return node with log_context( - logger, logging.DEBUG, msg=f"tagging {node.id=} with {tags=} and {available=}" + logger, logging.DEBUG, msg=f"tag {node.id=} with {tags=} and {available=}" ): assert node.id # nosec diff --git a/services/autoscaling/tests/unit/conftest.py b/services/autoscaling/tests/unit/conftest.py index 4a48f2776b6..9b7489268e6 100644 --- a/services/autoscaling/tests/unit/conftest.py +++ b/services/autoscaling/tests/unit/conftest.py @@ -28,11 +28,16 @@ EC2InstanceType, Resources, ) +from common_library.json_serialization import json_dumps from deepdiff import DeepDiff from faker import Faker from fakeredis.aioredis import FakeRedis from fastapi import FastAPI -from models_library.docker import DockerLabelKey, StandardSimcoreDockerLabels +from models_library.docker import ( + DockerGenericTag, + DockerLabelKey, + StandardSimcoreDockerLabels, +) from models_library.generated_models.docker_rest_api import Availability from models_library.generated_models.docker_rest_api import Node as DockerNode from models_library.generated_models.docker_rest_api import ( @@ -45,7 +50,7 @@ Service, TaskSpec, ) -from pydantic import ByteSize, PositiveInt, TypeAdapter +from pydantic import ByteSize, NonNegativeInt, PositiveInt, TypeAdapter from pytest_mock import MockType from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.host import get_localhost_ip @@ -57,6 +62,7 @@ ) from settings_library.rabbit import RabbitSettings from settings_library.ssm import SSMSettings +from simcore_service_autoscaling.constants import PRE_PULLED_IMAGES_EC2_TAG_KEY from simcore_service_autoscaling.core.application import create_app from simcore_service_autoscaling.core.settings import ( AUTOSCALING_ENV_PREFIX, @@ -71,8 +77,14 @@ DaskTaskResources, ) from simcore_service_autoscaling.modules import auto_scaling_core +from simcore_service_autoscaling.modules.auto_scaling_mode_dynamic import ( + DynamicAutoscaling, +) from simcore_service_autoscaling.modules.docker import AutoscalingDocker from simcore_service_autoscaling.modules.ec2 import SimcoreEC2API +from simcore_service_autoscaling.utils.buffer_machines_pool_core import ( + get_deactivated_buffer_ec2_tags, +) from simcore_service_autoscaling.utils.utils_docker import ( _OSPARC_SERVICE_READY_LABEL_KEY, _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY, @@ -81,7 +93,9 @@ from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed -from types_aiobotocore_ec2.literals import InstanceTypeType +from types_aiobotocore_ec2 import EC2Client +from types_aiobotocore_ec2.literals import InstanceStateNameType, InstanceTypeType +from types_aiobotocore_ec2.type_defs import TagTypeDef pytest_plugins = [ "pytest_simcore.aws_server", @@ -991,10 +1005,22 @@ def _creator( @pytest.fixture -def mock_machines_buffer(monkeypatch: pytest.MonkeyPatch) -> int: - num_machines_in_buffer = 5 - monkeypatch.setenv("EC2_INSTANCES_MACHINES_BUFFER", f"{num_machines_in_buffer}") - return num_machines_in_buffer +def num_hot_buffer() -> NonNegativeInt: + return 5 + + +@pytest.fixture +def with_instances_machines_hot_buffer( + num_hot_buffer: int, + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, + { + "EC2_INSTANCES_MACHINES_BUFFER": f"{num_hot_buffer}", + }, + ) @pytest.fixture @@ -1042,3 +1068,165 @@ async def _( autospec=True, side_effect=_, ) + + +@pytest.fixture +def fake_pre_pull_images() -> list[DockerGenericTag]: + return TypeAdapter(list[DockerGenericTag]).validate_python( + [ + "nginx:latest", + "itisfoundation/my-very-nice-service:latest", + "simcore/services/dynamic/another-nice-one:2.4.5", + "asd", + ] + ) + + +@pytest.fixture +def ec2_instances_allowed_types_with_only_1_buffered( + faker: Faker, + fake_pre_pull_images: list[DockerGenericTag], + external_ec2_instances_allowed_types: None | dict[str, EC2InstanceBootSpecific], +) -> dict[InstanceTypeType, EC2InstanceBootSpecific]: + if not external_ec2_instances_allowed_types: + return { + "t2.micro": EC2InstanceBootSpecific( + ami_id=faker.pystr(), + pre_pull_images=fake_pre_pull_images, + buffer_count=faker.pyint(min_value=1, max_value=10), + ) + } + + allowed_ec2_types = external_ec2_instances_allowed_types + allowed_ec2_types_with_buffer_defined = dict( + filter( + lambda instance_type_and_settings: instance_type_and_settings[ + 1 + ].buffer_count + > 0, + allowed_ec2_types.items(), + ) + ) + assert ( + allowed_ec2_types_with_buffer_defined + ), "one type with buffer is needed for the tests!" + assert ( + len(allowed_ec2_types_with_buffer_defined) == 1 + ), "more than one type with buffer is disallowed in this test!" + return { + TypeAdapter(InstanceTypeType).validate_python(k): v + for k, v in allowed_ec2_types_with_buffer_defined.items() + } + + +@pytest.fixture +def buffer_count( + ec2_instances_allowed_types_with_only_1_buffered: dict[ + InstanceTypeType, EC2InstanceBootSpecific + ], +) -> int: + def _by_buffer_count( + instance_type_and_settings: tuple[InstanceTypeType, EC2InstanceBootSpecific] + ) -> bool: + _, boot_specific = instance_type_and_settings + return boot_specific.buffer_count > 0 + + allowed_ec2_types = ec2_instances_allowed_types_with_only_1_buffered + allowed_ec2_types_with_buffer_defined = dict( + filter(_by_buffer_count, allowed_ec2_types.items()) + ) + assert allowed_ec2_types_with_buffer_defined, "you need one type with buffer" + assert ( + len(allowed_ec2_types_with_buffer_defined) == 1 + ), "more than one type with buffer is disallowed in this test!" + return next(iter(allowed_ec2_types_with_buffer_defined.values())).buffer_count + + +@pytest.fixture +async def create_buffer_machines( + ec2_client: EC2Client, + aws_ami_id: str, + app_settings: ApplicationSettings, + initialized_app: FastAPI, +) -> Callable[ + [int, InstanceTypeType, InstanceStateNameType, list[DockerGenericTag] | None], + Awaitable[list[str]], +]: + async def _do( + num: int, + instance_type: InstanceTypeType, + instance_state_name: InstanceStateNameType, + pre_pull_images: list[DockerGenericTag] | None, + ) -> list[str]: + assert app_settings.AUTOSCALING_EC2_INSTANCES + + assert instance_state_name in [ + "running", + "stopped", + ], "only 'running' and 'stopped' are supported for testing" + + resource_tags: list[TagTypeDef] = [ + {"Key": tag_key, "Value": tag_value} + for tag_key, tag_value in get_deactivated_buffer_ec2_tags( + initialized_app, DynamicAutoscaling() + ).items() + ] + if pre_pull_images is not None and instance_state_name == "stopped": + resource_tags.append( + { + "Key": PRE_PULLED_IMAGES_EC2_TAG_KEY, + "Value": f"{json_dumps(pre_pull_images)}", + } + ) + with log_context( + logging.INFO, f"creating {num} buffer machines of {instance_type}" + ): + instances = await ec2_client.run_instances( + ImageId=aws_ami_id, + MaxCount=num, + MinCount=num, + InstanceType=instance_type, + KeyName=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_KEY_NAME, + SecurityGroupIds=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_SECURITY_GROUP_IDS, + SubnetId=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_SUBNET_ID, + IamInstanceProfile={ + "Arn": app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_ATTACHED_IAM_PROFILE + }, + TagSpecifications=[ + {"ResourceType": "instance", "Tags": resource_tags}, + {"ResourceType": "volume", "Tags": resource_tags}, + {"ResourceType": "network-interface", "Tags": resource_tags}, + ], + UserData="echo 'I am pytest'", + ) + instance_ids = [ + i["InstanceId"] for i in instances["Instances"] if "InstanceId" in i + ] + + waiter = ec2_client.get_waiter("instance_exists") + await waiter.wait(InstanceIds=instance_ids) + instances = await ec2_client.describe_instances(InstanceIds=instance_ids) + assert "Reservations" in instances + assert instances["Reservations"] + assert "Instances" in instances["Reservations"][0] + assert len(instances["Reservations"][0]["Instances"]) == num + for instance in instances["Reservations"][0]["Instances"]: + assert "State" in instance + assert "Name" in instance["State"] + assert instance["State"]["Name"] == "running" + + if instance_state_name == "stopped": + await ec2_client.stop_instances(InstanceIds=instance_ids) + instances = await ec2_client.describe_instances(InstanceIds=instance_ids) + assert "Reservations" in instances + assert instances["Reservations"] + assert "Instances" in instances["Reservations"][0] + assert len(instances["Reservations"][0]["Instances"]) == num + for instance in instances["Reservations"][0]["Instances"]: + assert "State" in instance + assert "Name" in instance["State"] + assert instance["State"]["Name"] == "stopped" + + return instance_ids + + return _do diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py index 6e7a0d7c828..bad4215a65e 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py +++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py @@ -305,6 +305,18 @@ async def _(scale_up_params: _ScaleUpParams) -> list[distributed.Future]: return _ +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["with_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["without_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) async def test_cluster_scaling_with_no_tasks_does_nothing( minimal_configuration: None, app_settings: ApplicationSettings, @@ -330,6 +342,18 @@ async def test_cluster_scaling_with_no_tasks_does_nothing( @pytest.mark.acceptance_test( "Ensure this does not happen https://github.com/ITISFoundation/osparc-simcore/issues/6227" ) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["with_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["without_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) async def test_cluster_scaling_with_disabled_ssm_does_not_block_autoscaling( minimal_configuration: None, disabled_ssm: None, @@ -353,6 +377,18 @@ async def test_cluster_scaling_with_disabled_ssm_does_not_block_autoscaling( ) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["with_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["without_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) async def test_cluster_scaling_with_task_with_too_much_resources_starts_nothing( minimal_configuration: None, app_settings: ApplicationSettings, @@ -800,6 +836,18 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 mock_docker_compute_node_used_resources.assert_not_called() +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["with_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["without_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) async def test_cluster_does_not_scale_up_if_defined_instance_is_not_allowed( minimal_configuration: None, app_settings: ApplicationSettings, @@ -839,6 +887,18 @@ async def test_cluster_does_not_scale_up_if_defined_instance_is_not_allowed( assert "Unexpected error:" in error_messages[0] +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["with_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["without_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) async def test_cluster_does_not_scale_up_if_defined_instance_is_not_fitting_resources( minimal_configuration: None, app_settings: ApplicationSettings, @@ -878,6 +938,18 @@ async def test_cluster_does_not_scale_up_if_defined_instance_is_not_fitting_reso assert "Unexpected error:" in error_messages[0] +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["with_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["without_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) @pytest.mark.parametrize( "scale_up_params", [ @@ -948,6 +1020,18 @@ async def test_cluster_scaling_up_starts_multiple_instances( mock_rabbitmq_post_message.reset_mock() +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["with_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["without_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) @pytest.mark.parametrize( "scale_up_params", [ @@ -1044,6 +1128,18 @@ async def test_cluster_scaling_up_more_than_allowed_max_starts_max_instances_and ) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["with_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["without_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) async def test_cluster_scaling_up_more_than_allowed_with_multiple_types_max_starts_max_instances_and_not_more( patch_ec2_client_launch_instances_min_number_of_instances: mock.Mock, minimal_configuration: None, @@ -1141,6 +1237,18 @@ async def test_cluster_scaling_up_more_than_allowed_with_multiple_types_max_star ) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["with_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["without_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) @pytest.mark.parametrize( "scale_up_params", [ @@ -1305,11 +1413,15 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted( @pytest.mark.parametrize( - "with_docker_join_drained", ["with_AUTOSCALING_DOCKER_JOIN_DRAINED"], indirect=True + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["with_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, ) @pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options "with_drain_nodes_labelled", - ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + ["without_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], indirect=True, ) @pytest.mark.parametrize( diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py index ccdb2461c04..afd3c01e4a3 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py +++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py @@ -24,6 +24,7 @@ from fastapi import FastAPI from models_library.docker import ( DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY, + DockerGenericTag, DockerLabelKey, StandardSimcoreDockerLabels, ) @@ -43,9 +44,13 @@ assert_cluster_state, create_fake_association, ) -from pytest_simcore.helpers.aws_ec2 import assert_autoscaled_dynamic_ec2_instances +from pytest_simcore.helpers.aws_ec2 import ( + assert_autoscaled_dynamic_ec2_instances, + assert_autoscaled_dynamic_warm_pools_ec2_instances, +) from pytest_simcore.helpers.logging_tools import log_context from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict +from simcore_service_autoscaling.constants import BUFFER_MACHINE_TAG_KEY from simcore_service_autoscaling.core.settings import ApplicationSettings from simcore_service_autoscaling.models import AssociatedInstance, Cluster from simcore_service_autoscaling.modules.auto_scaling_core import ( @@ -68,7 +73,7 @@ _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY, ) from types_aiobotocore_ec2.client import EC2Client -from types_aiobotocore_ec2.literals import InstanceTypeType +from types_aiobotocore_ec2.literals import InstanceStateNameType, InstanceTypeType from types_aiobotocore_ec2.type_defs import FilterTypeDef, InstanceTypeDef @@ -286,6 +291,18 @@ async def _(scale_up_params: _ScaleUpParams) -> list[Service]: return _ +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["without_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) async def test_cluster_scaling_with_no_services_does_nothing( minimal_configuration: None, app_settings: ApplicationSettings, @@ -304,10 +321,22 @@ async def test_cluster_scaling_with_no_services_does_nothing( ) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["without_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) async def test_cluster_scaling_with_no_services_and_machine_buffer_starts_expected_machines( patch_ec2_client_launch_instances_min_number_of_instances: mock.Mock, minimal_configuration: None, - mock_machines_buffer: int, + with_instances_machines_hot_buffer: EnvVarsDict, app_settings: ApplicationSettings, initialized_app: FastAPI, aws_allowed_ec2_instance_type_names_env: list[str], @@ -321,17 +350,13 @@ async def test_cluster_scaling_with_no_services_and_machine_buffer_starts_expect instance_type_filters: Sequence[FilterTypeDef], ): assert app_settings.AUTOSCALING_EC2_INSTANCES - assert ( - mock_machines_buffer - == app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER - ) await auto_scale_cluster( app=initialized_app, auto_scaling_mode=DynamicAutoscaling() ) await assert_autoscaled_dynamic_ec2_instances( ec2_client, expected_num_reservations=1, - expected_num_instances=mock_machines_buffer, + expected_num_instances=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER, expected_instance_type=cast( InstanceTypeType, next( @@ -346,7 +371,7 @@ async def test_cluster_scaling_with_no_services_and_machine_buffer_starts_expect mock_rabbitmq_post_message, app_settings, initialized_app, - instances_pending=mock_machines_buffer, + instances_pending=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER, ) mock_rabbitmq_post_message.reset_mock() # calling again should attach the new nodes to the reserve, but nothing should start @@ -356,7 +381,7 @@ async def test_cluster_scaling_with_no_services_and_machine_buffer_starts_expect await assert_autoscaled_dynamic_ec2_instances( ec2_client, expected_num_reservations=1, - expected_num_instances=mock_machines_buffer, + expected_num_instances=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER, expected_instance_type=cast( InstanceTypeType, next( @@ -375,14 +400,15 @@ async def test_cluster_scaling_with_no_services_and_machine_buffer_starts_expect mock_rabbitmq_post_message, app_settings, initialized_app, - nodes_total=mock_machines_buffer, - nodes_drained=mock_machines_buffer, - instances_running=mock_machines_buffer, + nodes_total=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER, + nodes_drained=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER, + instances_running=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER, cluster_total_resources={ - "cpus": mock_machines_buffer + "cpus": app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER * fake_node.description.resources.nano_cp_us / 1e9, - "ram": mock_machines_buffer * fake_node.description.resources.memory_bytes, + "ram": app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER + * fake_node.description.resources.memory_bytes, }, ) @@ -394,7 +420,7 @@ async def test_cluster_scaling_with_no_services_and_machine_buffer_starts_expect await assert_autoscaled_dynamic_ec2_instances( ec2_client, expected_num_reservations=1, - expected_num_instances=mock_machines_buffer, + expected_num_instances=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER, expected_instance_type=cast( InstanceTypeType, next( @@ -407,6 +433,18 @@ async def test_cluster_scaling_with_no_services_and_machine_buffer_starts_expect ) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["without_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) @pytest.mark.parametrize( "scale_up_params", [ @@ -990,6 +1028,18 @@ async def test_cluster_scaling_up_and_down( ) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["without_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) @pytest.mark.parametrize( "scale_up_params", [ @@ -1066,6 +1116,18 @@ async def test_cluster_scaling_up_and_down_against_aws( ) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["without_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) @pytest.mark.parametrize( "scale_up_params", [ @@ -1148,9 +1210,13 @@ async def test_cluster_scaling_up_starts_multiple_instances( @pytest.mark.parametrize( - "with_docker_join_drained", ["with_AUTOSCALING_DOCKER_JOIN_DRAINED"], indirect=True + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["without_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, ) @pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options "with_drain_nodes_labelled", ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], indirect=True, @@ -1445,6 +1511,18 @@ async def test_cluster_adapts_machines_on_the_fly( # noqa: PLR0915 assert instance["InstanceType"] == scale_up_params2.expected_instance_type +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["without_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) @pytest.mark.parametrize( "scale_up_params", [ @@ -1606,6 +1684,18 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted( ) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["without_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) async def test__find_terminateable_nodes_with_no_hosts( minimal_configuration: None, initialized_app: FastAPI, @@ -1626,6 +1716,18 @@ async def test__find_terminateable_nodes_with_no_hosts( assert await _find_terminateable_instances(initialized_app, active_cluster) == [] +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["without_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) async def test__try_scale_down_cluster_with_no_nodes( minimal_configuration: None, with_valid_time_before_termination: datetime.timedelta, @@ -1650,6 +1752,18 @@ async def test__try_scale_down_cluster_with_no_nodes( mock_remove_nodes.assert_not_called() +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["without_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) async def test__activate_drained_nodes_with_no_tasks( minimal_configuration: None, with_valid_time_before_termination: datetime.timedelta, @@ -1683,6 +1797,18 @@ async def test__activate_drained_nodes_with_no_tasks( mock_docker_tag_node.assert_not_called() +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["without_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) async def test__activate_drained_nodes_with_no_drained_nodes( minimal_configuration: None, with_valid_time_before_termination: datetime.timedelta, @@ -1724,6 +1850,18 @@ async def test__activate_drained_nodes_with_no_drained_nodes( mock_docker_tag_node.assert_not_called() +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["without_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) async def test__activate_drained_nodes_with_drained_node( minimal_configuration: None, with_valid_time_before_termination: datetime.timedelta, @@ -1790,3 +1928,136 @@ async def test__activate_drained_nodes_with_drained_node( }, available=True, ) + + +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["without_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) +async def test_warm_buffers_are_started_to_replace_missing_hot_buffers( + patch_ec2_client_launch_instances_min_number_of_instances: mock.Mock, + minimal_configuration: None, + with_instances_machines_hot_buffer: EnvVarsDict, + ec2_client: EC2Client, + initialized_app: FastAPI, + app_settings: ApplicationSettings, + ec2_instance_custom_tags: dict[str, str], + buffer_count: int, + create_buffer_machines: Callable[ + [int, InstanceTypeType, InstanceStateNameType, list[DockerGenericTag] | None], + Awaitable[list[str]], + ], + spied_cluster_analysis: MockType, + instance_type_filters: Sequence[FilterTypeDef], + mock_find_node_with_name_returns_fake_node: mock.Mock, + mock_compute_node_used_resources: mock.Mock, + mock_docker_tag_node: mock.Mock, +): + # pre-requisites + assert app_settings.AUTOSCALING_EC2_INSTANCES + assert app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER > 0 + + # we have nothing running now + all_instances = await ec2_client.describe_instances() + assert not all_instances["Reservations"] + + # have a few warm buffers ready with the same type as the hot buffer machines + buffer_machines = await create_buffer_machines( + buffer_count, + cast( + InstanceTypeType, + next( + iter(app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_ALLOWED_TYPES) + ), + ), + "stopped", + None, + ) + await assert_autoscaled_dynamic_warm_pools_ec2_instances( + ec2_client, + expected_num_reservations=1, + expected_num_instances=buffer_count, + expected_instance_type=cast( + InstanceTypeType, + next( + iter(app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_ALLOWED_TYPES) + ), + ), + expected_instance_state="stopped", + expected_additional_tag_keys=list(ec2_instance_custom_tags), + expected_pre_pulled_images=None, + instance_filters=None, + ) + + # let's autoscale, this should move the warm buffers to hot buffers + await auto_scale_cluster( + app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + ) + mock_docker_tag_node.assert_not_called() + # at analysis time, we had no machines running + analyzed_cluster = assert_cluster_state( + spied_cluster_analysis, + expected_calls=1, + expected_num_machines=0, + ) + assert not analyzed_cluster.active_nodes + assert analyzed_cluster.buffer_ec2s + assert len(analyzed_cluster.buffer_ec2s) == len(buffer_machines) + + # now we should have a warm buffer moved to the hot buffer + await assert_autoscaled_dynamic_ec2_instances( + ec2_client, + expected_num_reservations=1, + expected_num_instances=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER, + expected_instance_type=cast( + InstanceTypeType, + next( + iter(app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_ALLOWED_TYPES) + ), + ), + expected_instance_state="running", + expected_additional_tag_keys=[ + *list(ec2_instance_custom_tags), + BUFFER_MACHINE_TAG_KEY, + ], + instance_filters=instance_type_filters, + expected_user_data=[], + ) + + # let's autoscale again, to check the cluster analysis and tag the nodes + await auto_scale_cluster( + app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + ) + mock_docker_tag_node.assert_called() + assert ( + mock_docker_tag_node.call_count + == app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER + ) + # at analysis time, we had no machines running + analyzed_cluster = assert_cluster_state( + spied_cluster_analysis, + expected_calls=1, + expected_num_machines=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER, + ) + assert not analyzed_cluster.active_nodes + assert len(analyzed_cluster.buffer_ec2s) == max( + 0, + buffer_count + - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER, + ), ( + "the warm buffers were not used as expected there should be" + f" {buffer_count - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER} remaining, " + f"found {len(analyzed_cluster.buffer_ec2s)}" + ) + assert ( + len(analyzed_cluster.pending_ec2s) + == app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER + ) diff --git a/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py b/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py index 24a552f342b..26375418417 100644 --- a/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py +++ b/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py @@ -16,9 +16,7 @@ import pytest import tenacity -from aws_library.ec2 import AWSTagKey, EC2InstanceBootSpecific -from common_library.json_serialization import json_dumps -from faker import Faker +from aws_library.ec2 import AWSTagKey from fastapi import FastAPI from fastapi.encoders import jsonable_encoder from models_library.docker import DockerGenericTag @@ -30,68 +28,15 @@ from pytest_simcore.helpers.logging_tools import log_context from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_autoscaling.constants import PRE_PULLED_IMAGES_EC2_TAG_KEY -from simcore_service_autoscaling.core.settings import ApplicationSettings from simcore_service_autoscaling.modules.auto_scaling_mode_dynamic import ( DynamicAutoscaling, ) from simcore_service_autoscaling.modules.buffer_machines_pool_core import ( monitor_buffer_machines, ) -from simcore_service_autoscaling.utils.buffer_machines_pool_core import ( - get_deactivated_buffer_ec2_tags, -) from types_aiobotocore_ec2 import EC2Client from types_aiobotocore_ec2.literals import InstanceStateNameType, InstanceTypeType -from types_aiobotocore_ec2.type_defs import FilterTypeDef, TagTypeDef - - -@pytest.fixture -def fake_pre_pull_images() -> list[DockerGenericTag]: - return TypeAdapter(list[DockerGenericTag]).validate_python( - [ - "nginx:latest", - "itisfoundation/my-very-nice-service:latest", - "simcore/services/dynamic/another-nice-one:2.4.5", - "asd", - ] - ) - - -@pytest.fixture -def ec2_instances_allowed_types_with_only_1_buffered( - faker: Faker, - fake_pre_pull_images: list[DockerGenericTag], - external_ec2_instances_allowed_types: None | dict[str, EC2InstanceBootSpecific], -) -> dict[InstanceTypeType, EC2InstanceBootSpecific]: - if not external_ec2_instances_allowed_types: - return { - "t2.micro": EC2InstanceBootSpecific( - ami_id=faker.pystr(), - pre_pull_images=fake_pre_pull_images, - buffer_count=faker.pyint(min_value=1, max_value=10), - ) - } - - allowed_ec2_types = external_ec2_instances_allowed_types - allowed_ec2_types_with_buffer_defined = dict( - filter( - lambda instance_type_and_settings: instance_type_and_settings[ - 1 - ].buffer_count - > 0, - allowed_ec2_types.items(), - ) - ) - assert ( - allowed_ec2_types_with_buffer_defined - ), "one type with buffer is needed for the tests!" - assert ( - len(allowed_ec2_types_with_buffer_defined) == 1 - ), "more than one type with buffer is disallowed in this test!" - return { - TypeAdapter(InstanceTypeType).validate_python(k): v - for k, v in allowed_ec2_types_with_buffer_defined.items() - } +from types_aiobotocore_ec2.type_defs import FilterTypeDef @pytest.fixture @@ -345,96 +290,6 @@ async def test_monitor_buffer_machines( ) -@pytest.fixture -async def create_buffer_machines( - ec2_client: EC2Client, - aws_ami_id: str, - app_settings: ApplicationSettings, - initialized_app: FastAPI, -) -> Callable[ - [int, InstanceTypeType, InstanceStateNameType, list[DockerGenericTag]], - Awaitable[list[str]], -]: - async def _do( - num: int, - instance_type: InstanceTypeType, - instance_state_name: InstanceStateNameType, - pre_pull_images: list[DockerGenericTag], - ) -> list[str]: - assert app_settings.AUTOSCALING_EC2_INSTANCES - - assert instance_state_name in [ - "running", - "stopped", - ], "only 'running' and 'stopped' are supported for testing" - - resource_tags: list[TagTypeDef] = [ - {"Key": tag_key, "Value": tag_value} - for tag_key, tag_value in get_deactivated_buffer_ec2_tags( - initialized_app, DynamicAutoscaling() - ).items() - ] - if pre_pull_images is not None and instance_state_name == "stopped": - resource_tags.append( - { - "Key": PRE_PULLED_IMAGES_EC2_TAG_KEY, - "Value": f"{json_dumps(pre_pull_images)}", - } - ) - with log_context( - logging.INFO, f"creating {num} buffer machines of {instance_type}" - ): - instances = await ec2_client.run_instances( - ImageId=aws_ami_id, - MaxCount=num, - MinCount=num, - InstanceType=instance_type, - KeyName=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_KEY_NAME, - SecurityGroupIds=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_SECURITY_GROUP_IDS, - SubnetId=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_SUBNET_ID, - IamInstanceProfile={ - "Arn": app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_ATTACHED_IAM_PROFILE - }, - TagSpecifications=[ - {"ResourceType": "instance", "Tags": resource_tags}, - {"ResourceType": "volume", "Tags": resource_tags}, - {"ResourceType": "network-interface", "Tags": resource_tags}, - ], - UserData="echo 'I am pytest'", - ) - instance_ids = [ - i["InstanceId"] for i in instances["Instances"] if "InstanceId" in i - ] - - waiter = ec2_client.get_waiter("instance_exists") - await waiter.wait(InstanceIds=instance_ids) - instances = await ec2_client.describe_instances(InstanceIds=instance_ids) - assert "Reservations" in instances - assert instances["Reservations"] - assert "Instances" in instances["Reservations"][0] - assert len(instances["Reservations"][0]["Instances"]) == num - for instance in instances["Reservations"][0]["Instances"]: - assert "State" in instance - assert "Name" in instance["State"] - assert instance["State"]["Name"] == "running" - - if instance_state_name == "stopped": - await ec2_client.stop_instances(InstanceIds=instance_ids) - instances = await ec2_client.describe_instances(InstanceIds=instance_ids) - assert "Reservations" in instances - assert instances["Reservations"] - assert "Instances" in instances["Reservations"][0] - assert len(instances["Reservations"][0]["Instances"]) == num - for instance in instances["Reservations"][0]["Instances"]: - assert "State" in instance - assert "Name" in instance["State"] - assert instance["State"]["Name"] == "stopped" - - return instance_ids - - return _do - - @dataclass class _BufferMachineParams: instance_state_name: InstanceStateNameType @@ -652,29 +507,6 @@ async def test_monitor_buffer_machines_terminates_unneeded_pool( ) -@pytest.fixture -def buffer_count( - ec2_instances_allowed_types_with_only_1_buffered: dict[ - InstanceTypeType, EC2InstanceBootSpecific - ], -) -> int: - def _by_buffer_count( - instance_type_and_settings: tuple[InstanceTypeType, EC2InstanceBootSpecific] - ) -> bool: - _, boot_specific = instance_type_and_settings - return boot_specific.buffer_count > 0 - - allowed_ec2_types = ec2_instances_allowed_types_with_only_1_buffered - allowed_ec2_types_with_buffer_defined = dict( - filter(_by_buffer_count, allowed_ec2_types.items()) - ) - assert allowed_ec2_types_with_buffer_defined, "you need one type with buffer" - assert ( - len(allowed_ec2_types_with_buffer_defined) == 1 - ), "more than one type with buffer is disallowed in this test!" - return next(iter(allowed_ec2_types_with_buffer_defined.values())).buffer_count - - @pytest.fixture def pre_pull_images( ec2_instances_allowed_types_with_only_1_buffered: dict[InstanceTypeType, Any] diff --git a/services/autoscaling/tests/unit/test_utils_auto_scaling_core.py b/services/autoscaling/tests/unit/test_utils_auto_scaling_core.py index f576292ec6b..5a5a3240057 100644 --- a/services/autoscaling/tests/unit/test_utils_auto_scaling_core.py +++ b/services/autoscaling/tests/unit/test_utils_auto_scaling_core.py @@ -323,7 +323,7 @@ def test_sort_empty_drained_nodes( def test_sort_drained_nodes( - mock_machines_buffer: int, + with_instances_machines_hot_buffer: EnvVarsDict, minimal_configuration: None, app_settings: ApplicationSettings, random_fake_available_instances: list[EC2InstanceType], @@ -332,7 +332,9 @@ def test_sort_drained_nodes( ): machine_buffer_type = get_machine_buffer_type(random_fake_available_instances) _NUM_DRAINED_NODES = 20 - _NUM_NODE_WITH_TYPE_BUFFER = 3 * mock_machines_buffer + _NUM_NODE_WITH_TYPE_BUFFER = ( + 3 * app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER + ) _NUM_NODES_TERMINATING = 13 fake_drained_nodes = [] for _ in range(_NUM_DRAINED_NODES): @@ -388,10 +390,6 @@ def test_sort_drained_nodes( app_settings, fake_drained_nodes, random_fake_available_instances ) assert app_settings.AUTOSCALING_EC2_INSTANCES - assert ( - mock_machines_buffer - == app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER - ) assert len(sorted_drained_nodes) == ( _NUM_DRAINED_NODES + _NUM_NODE_WITH_TYPE_BUFFER diff --git a/services/catalog/src/simcore_service_catalog/api/rest/_services.py b/services/catalog/src/simcore_service_catalog/api/rest/_services.py index e2abc23d179..78362d63733 100644 --- a/services/catalog/src/simcore_service_catalog/api/rest/_services.py +++ b/services/catalog/src/simcore_service_catalog/api/rest/_services.py @@ -9,6 +9,7 @@ from fastapi import APIRouter, Depends, Header, HTTPException, status from models_library.api_schemas_catalog.services import ServiceGet, ServiceUpdate from models_library.services import ServiceKey, ServiceType, ServiceVersion +from models_library.services_authoring import Author from models_library.services_metadata_published import ServiceMetaDataPublished from pydantic import ValidationError from pydantic.types import PositiveInt @@ -127,7 +128,11 @@ async def list_services( name="nodetails", description="nodetails", type=ServiceType.COMPUTATIONAL, - authors=[{"name": "nodetails", "email": "nodetails@nodetails.com"}], + authors=[ + Author.model_construct( + name="nodetails", email="nodetails@nodetails.com" + ) + ], contact="nodetails@nodetails.com", inputs={}, outputs={}, diff --git a/services/catalog/tests/unit/with_dbs/test_api_rpc.py b/services/catalog/tests/unit/with_dbs/test_api_rpc.py index 16fb6adb4cb..3192eabbfe6 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_rpc.py +++ b/services/catalog/tests/unit/with_dbs/test_api_rpc.py @@ -108,6 +108,25 @@ async def background_sync_task_mocked( await services_db_tables_injector(fake_data_for_services) +async def test_rpc_catalog_with_no_services_returns_empty_page( + background_sync_task_mocked: None, + mocked_director_service_api: MockRouter, + rpc_client: RabbitMQRPCClient, + user_id: UserID, + app: FastAPI, +): + assert app + + page = await list_services_paginated( + rpc_client, product_name="not_existing_returns_no_services", user_id=user_id + ) + assert page.data == [] + assert page.links.next is None + assert page.links.prev is None + assert page.meta.count == 0 + assert page.meta.total == 0 + + async def test_rpc_catalog_client( background_sync_task_mocked: None, mocked_director_service_api: MockRouter, diff --git a/services/director-v2/openapi.json b/services/director-v2/openapi.json index 63418baabe5..c769aff191a 100644 --- a/services/director-v2/openapi.json +++ b/services/director-v2/openapi.json @@ -2018,7 +2018,8 @@ "docker_node_id": { "anyOf": [ { - "type": "string" + "type": "string", + "pattern": "[a-zA-Z0-9]" }, { "type": "null" @@ -2057,7 +2058,10 @@ "required": [ "is_inactive" ], - "title": "GetProjectInactivityResponse" + "title": "GetProjectInactivityResponse", + "example": { + "is_inactive": "false" + } }, "HTTPValidationError": { "properties": { diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py index f0b6e635ac7..707d7a8cc1e 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py @@ -1,4 +1,4 @@ -""" CRUD operations on a "computation" resource +"""CRUD operations on a "computation" resource A computation is a resource that represents a running pipeline of computational services in a give project Therefore, @@ -15,7 +15,6 @@ # pylint: disable=too-many-arguments # pylint: disable=too-many-statements - import contextlib import logging from typing import Annotated, Any, Final @@ -75,7 +74,6 @@ compute_pipeline_details, compute_pipeline_started_timestamp, compute_pipeline_stopped_timestamp, - compute_pipeline_submitted_timestamp, create_complete_dag, create_complete_dag_from_tasks, create_minimal_computational_graph_based_on_selection, @@ -396,9 +394,7 @@ async def create_computation( # noqa: PLR0913 # pylint: disable=too-many-positi stopped=compute_pipeline_stopped_timestamp( minimal_computational_dag, comp_tasks ), - submitted=compute_pipeline_submitted_timestamp( - minimal_computational_dag, comp_tasks - ), + submitted=last_run.created if last_run else None, ) except ProjectNotFoundError as e: @@ -498,7 +494,7 @@ async def get_computation( result=None, started=compute_pipeline_started_timestamp(pipeline_dag, all_tasks), stopped=compute_pipeline_stopped_timestamp(pipeline_dag, all_tasks), - submitted=compute_pipeline_submitted_timestamp(pipeline_dag, all_tasks), + submitted=last_run.created if last_run else None, ) @@ -572,7 +568,7 @@ async def stop_computation( result=None, started=compute_pipeline_started_timestamp(pipeline_dag, tasks), stopped=compute_pipeline_stopped_timestamp(pipeline_dag, tasks), - submitted=compute_pipeline_submitted_timestamp(pipeline_dag, tasks), + submitted=last_run.created if last_run else None, ) except ProjectNotFoundError as e: diff --git a/services/director-v2/src/simcore_service_director_v2/constants.py b/services/director-v2/src/simcore_service_director_v2/constants.py index b84865745df..194425d0328 100644 --- a/services/director-v2/src/simcore_service_director_v2/constants.py +++ b/services/director-v2/src/simcore_service_director_v2/constants.py @@ -1,8 +1,11 @@ from typing import Final -# dynamic services -DYNAMIC_SIDECAR_SERVICE_PREFIX: Final[str] = "dy-sidecar" -DYNAMIC_PROXY_SERVICE_PREFIX: Final[str] = "dy-proxy" +from models_library.api_schemas_directorv2.services import ( + DYNAMIC_PROXY_SERVICE_PREFIX, + DYNAMIC_SIDECAR_SERVICE_PREFIX, +) + +# dynamic services # label storing scheduler_data to allow service # monitoring recovery after director-v2 reboots diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index 4bf9e1e30af..5a6675e5f6f 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -128,7 +128,6 @@ class CompTaskAtDB(BaseModel): description="the hex digest of the resolved inputs +outputs hash at the time when the last outputs were generated", ) image: Image - submit: dt.datetime start: dt.datetime | None = None end: dt.datetime | None = None state: RunningState @@ -151,6 +150,10 @@ class CompTaskAtDB(BaseModel): pricing_info: dict | None hardware_info: HardwareInfo + submit: dt.datetime | None = Field( + default=None, deprecated=True, description="Required for legacy services" + ) + @field_validator("state", mode="before") @classmethod def _convert_state_from_state_type_enum_if_needed(cls, v): @@ -163,7 +166,7 @@ def _convert_state_from_state_type_enum_if_needed(cls, v): return RunningState(DB_TO_RUNNING_STATE[StateType(v)]) return v - @field_validator("start", "end", "submit") + @field_validator("start", "end") @classmethod def _ensure_utc(cls, v: dt.datetime | None) -> dt.datetime | None: if v is not None and v.tzinfo is None: @@ -228,7 +231,6 @@ def to_db_model(self, **exclusion_rules) -> dict[str, Any]: } }, "image": image_example, - "submit": "2021-03-01 13:07:34.19161", "node_class": "INTERACTIVE", "state": "NOT_STARTED", "progress": 0.44, @@ -240,7 +242,9 @@ def to_db_model(self, **exclusion_rules) -> dict[str, Any]: "pricing_unit_id": 1, "pricing_unit_cost_id": 1, }, - "hardware_info": next(iter(HardwareInfo.model_config["json_schema_extra"]["examples"])), # type: ignore + "hardware_info": next( + iter(HardwareInfo.model_config["json_schema_extra"]["examples"]) # type: ignore + ), } for image_example in Image.model_config["json_schema_extra"]["examples"] # type: ignore ] diff --git a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py index 7e9b3ebeac6..56c2a27c9c2 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py @@ -14,8 +14,13 @@ from models_library.api_schemas_directorv2.dynamic_services_service import ( CommonServiceDetails, ) +from models_library.api_schemas_directorv2.services import ( + DYNAMIC_PROXY_SERVICE_PREFIX, + DYNAMIC_SIDECAR_SERVICE_PREFIX, +) from models_library.basic_types import PortInt from models_library.callbacks_mapping import CallbacksMapping +from models_library.docker import DockerNodeID from models_library.generated_models.docker_rest_api import ContainerState, Status2 from models_library.projects_nodes_io import NodeID from models_library.resource_tracker import HardwareInfo, PricingInfo @@ -24,7 +29,7 @@ PathMappingsLabel, SimcoreServiceLabels, ) -from models_library.services import RunID +from models_library.services import ServiceRunID from models_library.services_resources import ServiceResourcesDict from models_library.wallets import WalletInfo from pydantic import ( @@ -39,9 +44,7 @@ from servicelib.exception_utils import DelayedExceptionHandler from ..constants import ( - DYNAMIC_PROXY_SERVICE_PREFIX, DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL, - DYNAMIC_SIDECAR_SERVICE_PREFIX, REGEX_DY_SERVICE_PROXY, REGEX_DY_SERVICE_SIDECAR, ) @@ -297,7 +300,7 @@ def compose_spec_submitted(self) -> bool: default=None, description="used for starting the proxy" ) - docker_node_id: str | None = Field( + docker_node_id: DockerNodeID | None = Field( default=None, description=( "contains node id of the docker node where all services " @@ -376,8 +379,8 @@ class SchedulerData(CommonServiceDetails, DynamicSidecarServiceLabels): ..., description="Name of the current dynamic-sidecar being observed", ) - run_id: RunID = Field( - default_factory=RunID.create, + run_id: ServiceRunID = Field( + default_factory=ServiceRunID.get_resource_tracking_run_id_for_dynamic, description=( "Uniquely identify the dynamic sidecar session (a.k.a. 2 " "subsequent exact same services will have a different run_id)" @@ -483,7 +486,7 @@ def from_http_request( request_scheme: str, request_simcore_user_agent: str, can_save: bool, - run_id: RunID | None = None, + run_id: ServiceRunID | None = None, ) -> "SchedulerData": # This constructor method sets current product names_helper = DynamicSidecarNamesHelper.make(service.node_uuid) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py index b959c9c8014..22b8677611f 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py @@ -26,6 +26,7 @@ from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.projects_state import RunningState from models_library.services import ServiceType +from models_library.services_types import ServiceRunID from models_library.users import UserID from networkx.classes.reportviews import InDegreeView from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE @@ -66,7 +67,6 @@ TASK_TO_START_STATES, WAITING_FOR_START_STATES, create_service_resources_from_task, - get_resource_tracking_run_id, ) _logger = logging.getLogger(__name__) @@ -295,7 +295,7 @@ def _need_heartbeat(task: CompTaskAtDB) -> bool: *( publish_service_resource_tracking_heartbeat( self.rabbitmq_client, - get_resource_tracking_run_id( + ServiceRunID.get_resource_tracking_run_id_for_computational( user_id, t.project_id, t.node_id, iteration ), ) @@ -348,7 +348,7 @@ async def _process_started_tasks( *( publish_service_resource_tracking_started( self.rabbitmq_client, - service_run_id=get_resource_tracking_run_id( + service_run_id=ServiceRunID.get_resource_tracking_run_id_for_computational( user_id, t.project_id, t.node_id, iteration ), wallet_id=run_metadata.get("wallet_id"), diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_dask.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_dask.py index 153378e9ee5..cc33c129f1b 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_dask.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_dask.py @@ -18,6 +18,7 @@ from models_library.projects_nodes_io import NodeID from models_library.projects_state import RunningState from models_library.rabbitmq_messages import SimcorePlatformStatus +from models_library.services_types import ServiceRunID from models_library.users import UserID from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE from servicelib.logging_utils import log_catch @@ -48,7 +49,6 @@ from ..db.repositories.comp_runs import CompRunsRepository from ..db.repositories.comp_tasks import CompTasksRepository from ._scheduler_base import BaseCompScheduler -from ._utils import get_resource_tracking_run_id _logger = logging.getLogger(__name__) @@ -129,6 +129,9 @@ async def _start_tasks( hardware_info=task.hardware_info, callback=wake_up_callback, metadata=comp_run.metadata, + resource_tracking_run_id=ServiceRunID.get_resource_tracking_run_id_for_computational( + user_id, project_id, node_id, comp_run.iteration + ), ) for node_id, task in scheduled_tasks.items() ), @@ -319,7 +322,9 @@ async def _process_task_result( # resource tracking await publish_service_resource_tracking_stopped( self.rabbitmq_client, - get_resource_tracking_run_id(user_id, project_id, node_id, iteration), + ServiceRunID.get_resource_tracking_run_id_for_computational( + user_id, project_id, node_id, iteration + ), simcore_platform_status=simcore_platform_status, ) # instrumentation diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_utils.py index 9d2722e3b6c..dc414376db0 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_utils.py @@ -2,19 +2,15 @@ from fastapi import FastAPI from models_library.docker import DockerGenericTag -from models_library.projects import ProjectID -from models_library.projects_nodes_io import NodeID from models_library.projects_state import RunningState from models_library.services_resources import ( ResourceValue, ServiceResourcesDict, ServiceResourcesDictHelpers, ) -from models_library.users import UserID from servicelib.redis import RedisClientSDK from settings_library.redis import RedisDatabase -from ...models.comp_runs import Iteration from ...models.comp_tasks import CompTaskAtDB from ..redis import get_redis_client_manager @@ -55,12 +51,6 @@ } -def get_resource_tracking_run_id( - user_id: UserID, project_id: ProjectID, node_id: NodeID, iteration: Iteration -) -> str: - return f"comp_{user_id}_{project_id}_{node_id}_{iteration}" - - def create_service_resources_from_task(task: CompTaskAtDB) -> ServiceResourcesDict: assert task.image.node_requirements # nosec return ServiceResourcesDictHelpers.create_from_single_service( diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py b/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py index 181c6c22a6d..731388f8ae9 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py @@ -47,6 +47,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.resource_tracker import HardwareInfo +from models_library.services import ServiceRunID from models_library.users import UserID from pydantic import TypeAdapter, ValidationError from pydantic.networks import AnyUrl @@ -293,6 +294,7 @@ async def send_computation_tasks( remote_fct: ContainerRemoteFct | None = None, metadata: RunMetadataDict, hardware_info: HardwareInfo, + resource_tracking_run_id: ServiceRunID, ) -> list[PublishedComputationTask]: """actually sends the function remote_fct to be remotely executed. if None is kept then the default function that runs container will be started. @@ -396,6 +398,7 @@ async def send_computation_tasks( node_id=node_id, node_image=node_image, metadata=metadata, + resource_tracking_run_id=resource_tracking_run_id, ) task_owner = dask_utils.compute_task_owner( user_id, project_id, node_id, metadata.get("project_metadata", {}) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py index 2619d9ce98f..b703691926f 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py @@ -145,12 +145,12 @@ async def _get_node_infos( None, ) - result: tuple[ServiceMetaDataPublished, ServiceExtras, SimcoreServiceLabels] = ( - await asyncio.gather( - _get_service_details(catalog_client, user_id, product_name, node), - director_client.get_service_extras(node.key, node.version), - director_client.get_service_labels(node), - ) + result: tuple[ + ServiceMetaDataPublished, ServiceExtras, SimcoreServiceLabels + ] = await asyncio.gather( + _get_service_details(catalog_client, user_id, product_name, node), + director_client.get_service_extras(node.key, node.version), + director_client.get_service_labels(node), ) return result @@ -246,9 +246,9 @@ async def _get_pricing_and_hardware_infos( return pricing_info, hardware_info -_RAM_SAFE_MARGIN_RATIO: Final[float] = ( - 0.1 # NOTE: machines always have less available RAM than advertised -) +_RAM_SAFE_MARGIN_RATIO: Final[ + float +] = 0.1 # NOTE: machines always have less available RAM than advertised _CPUS_SAFE_MARGIN: Final[float] = 0.1 @@ -266,11 +266,11 @@ async def _update_project_node_resources_from_hardware_info( if not hardware_info.aws_ec2_instances: return try: - unordered_list_ec2_instance_types: list[EC2InstanceTypeGet] = ( - await get_instance_type_details( - rabbitmq_rpc_client, - instance_type_names=set(hardware_info.aws_ec2_instances), - ) + unordered_list_ec2_instance_types: list[ + EC2InstanceTypeGet + ] = await get_instance_type_details( + rabbitmq_rpc_client, + instance_type_names=set(hardware_info.aws_ec2_instances), ) assert unordered_list_ec2_instance_types # nosec @@ -439,7 +439,6 @@ async def generate_tasks_list_from_project( inputs=node.inputs, outputs=node.outputs, image=image, - submit=arrow.utcnow().datetime, state=task_state, internal_id=internal_id, node_class=to_node_class(node.key), diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/_namespace.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/_namespace.py index 32bb114f095..37dc914451c 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/_namespace.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/_namespace.py @@ -1,7 +1,8 @@ +from models_library.api_schemas_directorv2.services import ( + DYNAMIC_SIDECAR_SERVICE_PREFIX, +) from models_library.projects_nodes_io import NodeID -from ...constants import DYNAMIC_SIDECAR_SERVICE_PREFIX - def get_compose_namespace(node_uuid: NodeID) -> str: # To avoid collisions for started docker resources a unique identifier is computed: diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_core.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_core.py index 1e05524b48d..350c406c1eb 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_core.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_core.py @@ -9,7 +9,10 @@ from common_library.json_serialization import json_dumps from fastapi.encoders import jsonable_encoder from models_library.aiodocker_api import AioDockerServiceSpec -from models_library.docker import to_simcore_runtime_docker_label_key +from models_library.api_schemas_directorv2.services import ( + DYNAMIC_SIDECAR_SERVICE_PREFIX, +) +from models_library.docker import DockerNodeID, to_simcore_runtime_docker_label_key from models_library.projects import ProjectID from models_library.projects_networks import DockerNetworkName from models_library.projects_nodes_io import NodeID @@ -22,10 +25,7 @@ from tenacity.stop import stop_after_delay from tenacity.wait import wait_exponential, wait_random_exponential -from ....constants import ( - DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL, - DYNAMIC_SIDECAR_SERVICE_PREFIX, -) +from ....constants import DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL from ....core.dynamic_services_settings.scheduler import ( DynamicServicesSchedulerSettings, ) @@ -170,7 +170,7 @@ async def _get_service_latest_task(service_id: str) -> Mapping[str, Any]: async def get_dynamic_sidecar_placement( service_id: str, dynamic_services_scheduler_settings: DynamicServicesSchedulerSettings, -) -> str: +) -> DockerNodeID: """ Waits until the service has a task in `running` state and returns it's `docker_node_id`. @@ -205,7 +205,7 @@ async def _get_task_data_when_service_running(service_id: str) -> Mapping[str, A task = await _get_task_data_when_service_running(service_id=service_id) - docker_node_id: None | str = task.get("NodeID", None) + docker_node_id: DockerNodeID | None = task.get("NodeID", None) if not docker_node_id: msg = f"Could not find an assigned NodeID for service_id={service_id}. Last task inspect result: {task}" raise DynamicSidecarError(msg=msg) @@ -494,7 +494,9 @@ async def update_scheduler_data_label(scheduler_data: SchedulerData) -> None: ) -async def constrain_service_to_node(service_name: str, docker_node_id: str) -> None: +async def constrain_service_to_node( + service_name: str, docker_node_id: DockerNodeID +) -> None: await _update_service_spec( service_name, update_in_service_spec={ diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py index 98ba1ea2f40..54f791a0a53 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py @@ -20,6 +20,7 @@ ResourceValue, ServiceResourcesDict, ) +from models_library.services_types import ServiceRunID from models_library.users import UserID from models_library.utils.docker_compose import replace_env_vars_in_compose_spec from pydantic import ByteSize @@ -278,6 +279,7 @@ async def assemble_spec( # pylint: disable=too-many-arguments # noqa: PLR0913 node_id: NodeID, simcore_user_agent: str, swarm_stack_name: str, + service_run_id: ServiceRunID, ) -> str: """ returns a docker-compose spec used by @@ -350,6 +352,7 @@ async def assemble_spec( # pylint: disable=too-many-arguments # noqa: PLR0913 product_name=product_name, project_id=project_id, node_id=node_id, + service_run_id=service_run_id, ) add_egress_configuration( @@ -388,6 +391,7 @@ async def assemble_spec( # pylint: disable=too-many-arguments # noqa: PLR0913 product_name=product_name, project_id=project_id, node_id=node_id, + service_run_id=service_run_id, ) stringified_service_spec: str = replace_env_vars_in_compose_spec( diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py index 4ee83bee16f..35fa3ae9ae5 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py @@ -247,7 +247,7 @@ async def _get_mounts( DynamicSidecarVolumesPathsResolver.mount_shared_store( swarm_stack_name=dynamic_services_scheduler_settings.SWARM_STACK_NAME, node_uuid=scheduler_data.node_uuid, - run_id=scheduler_data.run_id, + service_run_id=scheduler_data.run_id, project_id=scheduler_data.project_id, user_id=scheduler_data.user_id, has_quota_support=has_quota_support, @@ -279,7 +279,7 @@ async def _get_mounts( swarm_stack_name=dynamic_services_scheduler_settings.SWARM_STACK_NAME, path=path_to_mount, node_uuid=scheduler_data.node_uuid, - run_id=scheduler_data.run_id, + service_run_id=scheduler_data.run_id, project_id=scheduler_data.project_id, user_id=scheduler_data.user_id, volume_size_limit=volume_size_limits.get(f"{path_to_mount}"), @@ -305,7 +305,7 @@ async def _get_mounts( swarm_stack_name=dynamic_services_scheduler_settings.SWARM_STACK_NAME, path=path_to_mount, node_uuid=scheduler_data.node_uuid, - run_id=scheduler_data.run_id, + service_run_id=scheduler_data.run_id, project_id=scheduler_data.project_id, user_id=scheduler_data.user_id, efs_settings=dynamic_sidecar_settings.DYNAMIC_SIDECAR_EFS_SETTINGS, @@ -319,7 +319,7 @@ async def _get_mounts( swarm_stack_name=dynamic_services_scheduler_settings.SWARM_STACK_NAME, path=path_to_mount, node_uuid=scheduler_data.node_uuid, - run_id=scheduler_data.run_id, + service_run_id=scheduler_data.run_id, project_id=scheduler_data.project_id, user_id=scheduler_data.user_id, r_clone_settings=dynamic_sidecar_settings.DYNAMIC_SIDECAR_R_CLONE_SETTINGS, @@ -331,7 +331,7 @@ async def _get_mounts( swarm_stack_name=dynamic_services_scheduler_settings.SWARM_STACK_NAME, path=path_to_mount, node_uuid=scheduler_data.node_uuid, - run_id=scheduler_data.run_id, + service_run_id=scheduler_data.run_id, project_id=scheduler_data.project_id, user_id=scheduler_data.user_id, volume_size_limit=volume_size_limits.get(f"{path_to_mount}"), @@ -372,7 +372,7 @@ async def _get_mounts( user_preferences_path=scheduler_data.user_preferences_path, swarm_stack_name=dynamic_services_scheduler_settings.SWARM_STACK_NAME, node_uuid=scheduler_data.node_uuid, - run_id=scheduler_data.run_id, + service_run_id=scheduler_data.run_id, project_id=scheduler_data.project_id, user_id=scheduler_data.user_id, has_quota_support=has_quota_support, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py index c6a4cba08f3..8352cab1f8a 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py @@ -17,7 +17,7 @@ ProgressType, ) from models_library.service_settings_labels import SimcoreServiceSettingsLabel -from models_library.services import RunID +from models_library.services import ServiceRunID from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient from simcore_postgres_database.models.comp_tasks import NodeClass @@ -237,7 +237,7 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: # Each time a new dynamic-sidecar service is created # generate a new `run_id` to avoid resource collisions - scheduler_data.run_id = RunID.create() + scheduler_data.run_id = ServiceRunID.get_resource_tracking_run_id_for_dynamic() rpc_client: RabbitMQRPCClient = app.state.rabbitmq_rpc_client diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py index a6976796560..289cfc162c9 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py @@ -103,6 +103,7 @@ async def submit_compose_sepc(app: FastAPI, scheduler_data: SchedulerData) -> No node_id=scheduler_data.node_uuid, simcore_user_agent=scheduler_data.request_simcore_user_agent, swarm_stack_name=dynamic_services_scheduler_settings.SWARM_STACK_NAME, + service_run_id=scheduler_data.run_id, ) _logger.debug( diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py index e3b6d024bf8..4a127e59e51 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py @@ -28,6 +28,7 @@ from servicelib.logging_utils import log_context from servicelib.rabbitmq import RabbitMQClient from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.agent.containers import force_container_cleanup from servicelib.rabbitmq.rpc_interfaces.agent.errors import ( NoServiceVolumesFoundRPCError, ) @@ -210,6 +211,7 @@ async def service_remove_sidecar_proxy_docker_networks_and_volumes( set_were_state_and_outputs_saved: bool | None = None, ) -> None: scheduler_data: SchedulerData = _get_scheduler_data(app, node_uuid) + rabbit_rpc_client: RabbitMQRPCClient = app.state.rabbitmq_rpc_client if set_were_state_and_outputs_saved is not None: scheduler_data.dynamic_sidecar.were_state_and_outputs_saved = True @@ -221,7 +223,14 @@ async def service_remove_sidecar_proxy_docker_networks_and_volumes( node_uuid=scheduler_data.node_uuid, swarm_stack_name=swarm_stack_name, ) - # remove network + if scheduler_data.dynamic_sidecar.docker_node_id: + await force_container_cleanup( + rabbit_rpc_client, + docker_node_id=scheduler_data.dynamic_sidecar.docker_node_id, + swarm_stack_name=swarm_stack_name, + node_id=scheduler_data.node_uuid, + ) + task_progress.update(message="removing network", percent=ProgressPercent(0.2)) await remove_dynamic_sidecar_network(scheduler_data.dynamic_sidecar_network_name) @@ -237,7 +246,6 @@ async def service_remove_sidecar_proxy_docker_networks_and_volumes( message="removing volumes", percent=ProgressPercent(0.3) ) with log_context(_logger, logging.DEBUG, f"removing volumes '{node_uuid}'"): - rabbit_rpc_client: RabbitMQRPCClient = app.state.rabbitmq_rpc_client try: await remove_volumes_without_backup_for_service( rabbit_rpc_client, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/volumes.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/volumes.py index 7f55dc68498..bf375b29eed 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/volumes.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/volumes.py @@ -7,7 +7,7 @@ ) from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID -from models_library.services import RunID +from models_library.services import ServiceRunID from models_library.users import UserID from servicelib.docker_constants import PREFIX_DYNAMIC_SIDECAR_VOLUMES from settings_library.efs import ( @@ -120,7 +120,7 @@ def volume_name(cls, path: Path) -> str: return f"{path}".replace(os.sep, "_") @classmethod - def source(cls, path: Path, node_uuid: NodeID, run_id: RunID) -> str: + def source(cls, path: Path, node_uuid: NodeID, service_run_id: ServiceRunID) -> str: """Returns a valid and unique volume name that is composed out of identifiers, namely - relative target path - node_uuid @@ -138,7 +138,7 @@ def source(cls, path: Path, node_uuid: NodeID, run_id: RunID) -> str: reversed_volume_name = cls.volume_name(path)[::-1] # ensure prefix size does not change - prefix = f"{PREFIX_DYNAMIC_SIDECAR_VOLUMES}_{run_id}_{node_uuid}" + prefix = f"{PREFIX_DYNAMIC_SIDECAR_VOLUMES}_{service_run_id}_{node_uuid}" assert len(prefix) == CHARS_IN_VOLUME_NAME_BEFORE_DIR_NAME - 1 # nosec unique_name = f"{prefix}_{reversed_volume_name}" @@ -150,7 +150,7 @@ def mount_entry( swarm_stack_name: str, path: Path, node_uuid: NodeID, - run_id: RunID, + service_run_id: ServiceRunID, project_id: ProjectID, user_id: UserID, volume_size_limit: str | None, @@ -159,13 +159,13 @@ def mount_entry( Creates specification for mount to be added to containers created as part of a service """ return { - "Source": cls.source(path, node_uuid, run_id), + "Source": cls.source(path, node_uuid, service_run_id), "Target": cls.target(path), "Type": "volume", "VolumeOptions": { "Labels": { - "source": cls.source(path, node_uuid, run_id), - "run_id": f"{run_id}", + "source": cls.source(path, node_uuid, service_run_id), + "run_id": f"{service_run_id}", "node_uuid": f"{node_uuid}", "study_id": f"{project_id}", "user_id": f"{user_id}", @@ -182,7 +182,7 @@ def mount_entry( @classmethod def mount_shared_store( cls, - run_id: RunID, + service_run_id: ServiceRunID, node_uuid: NodeID, project_id: ProjectID, user_id: UserID, @@ -194,7 +194,7 @@ def mount_shared_store( swarm_stack_name=swarm_stack_name, path=DY_SIDECAR_SHARED_STORE_PATH, node_uuid=node_uuid, - run_id=run_id, + service_run_id=service_run_id, project_id=project_id, user_id=user_id, volume_size_limit="1M" if has_quota_support else None, @@ -204,7 +204,7 @@ def mount_shared_store( def mount_user_preferences( cls, user_preferences_path: Path, - run_id: RunID, + service_run_id: ServiceRunID, node_uuid: NodeID, project_id: ProjectID, user_id: UserID, @@ -216,7 +216,7 @@ def mount_user_preferences( swarm_stack_name=swarm_stack_name, path=user_preferences_path, node_uuid=node_uuid, - run_id=run_id, + service_run_id=service_run_id, project_id=project_id, user_id=user_id, # NOTE: the contents of this volume will be zipped and much @@ -231,19 +231,19 @@ def mount_r_clone( swarm_stack_name: str, path: Path, node_uuid: NodeID, - run_id: RunID, + service_run_id: ServiceRunID, project_id: ProjectID, user_id: UserID, r_clone_settings: RCloneSettings, ) -> dict[str, Any]: return { - "Source": cls.source(path, node_uuid, run_id), + "Source": cls.source(path, node_uuid, service_run_id), "Target": cls.target(path), "Type": "volume", "VolumeOptions": { "Labels": { - "source": cls.source(path, node_uuid, run_id), - "run_id": f"{run_id}", + "source": cls.source(path, node_uuid, service_run_id), + "run_id": f"{service_run_id}", "node_uuid": f"{node_uuid}", "study_id": f"{project_id}", "user_id": f"{user_id}", @@ -264,20 +264,20 @@ def mount_efs( swarm_stack_name: str, path: Path, node_uuid: NodeID, - run_id: RunID, + service_run_id: ServiceRunID, project_id: ProjectID, user_id: UserID, efs_settings: AwsEfsSettings, storage_directory_name: str, ) -> dict[str, Any]: return { - "Source": cls.source(path, node_uuid, run_id), + "Source": cls.source(path, node_uuid, service_run_id), "Target": cls.target(path), "Type": "volume", "VolumeOptions": { "Labels": { - "source": cls.source(path, node_uuid, run_id), - "run_id": f"{run_id}", + "source": cls.source(path, node_uuid, service_run_id), + "run_id": f"{service_run_id}", "node_uuid": f"{node_uuid}", "study_id": f"{project_id}", "user_id": f"{user_id}", diff --git a/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/substitutions.py b/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/substitutions.py index a855295d9e4..2249937341d 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/substitutions.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/substitutions.py @@ -18,6 +18,7 @@ from models_library.projects_nodes_io import NodeID from models_library.service_settings_labels import ComposeSpecLabelDict from models_library.services import ServiceKey, ServiceVersion +from models_library.services_types import ServiceRunID from models_library.users import UserID from models_library.utils.specs_substitution import SpecsSubstitutionsResolver from pydantic import BaseModel @@ -120,6 +121,7 @@ def create(cls, app: FastAPI): ("OSPARC_VARIABLE_NODE_ID", "node_id"), ("OSPARC_VARIABLE_PRODUCT_NAME", "product_name"), ("OSPARC_VARIABLE_STUDY_UUID", "project_id"), + ("OSPARC_VARIABLE_SERVICE_RUN_ID", "run_id"), ("OSPARC_VARIABLE_USER_ID", "user_id"), ("OSPARC_VARIABLE_API_HOST", "api_server_base_url"), ]: @@ -181,6 +183,7 @@ async def resolve_and_substitute_session_variables_in_model( product_name: str, project_id: ProjectID, node_id: NodeID, + service_run_id: ServiceRunID, ) -> TBaseModel: result: TBaseModel = model try: @@ -200,6 +203,7 @@ async def resolve_and_substitute_session_variables_in_model( product_name=product_name, project_id=project_id, node_id=node_id, + run_id=service_run_id, api_server_base_url=app.state.settings.DIRECTOR_V2_PUBLIC_API_BASE_URL, ), ) @@ -221,6 +225,7 @@ async def resolve_and_substitute_session_variables_in_specs( product_name: str, project_id: ProjectID, node_id: NodeID, + service_run_id: ServiceRunID, ) -> dict[str, Any]: table = OsparcSessionVariablesTable.get_from_app_state(app) resolver = SpecsSubstitutionsResolver(specs, upgrade=False) @@ -241,6 +246,7 @@ async def resolve_and_substitute_session_variables_in_specs( product_name=product_name, project_id=project_id, node_id=node_id, + run_id=service_run_id, api_server_base_url=app.state.settings.DIRECTOR_V2_PUBLIC_API_BASE_URL, ), ) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dags.py b/services/director-v2/src/simcore_service_director_v2/utils/dags.py index 07d60e82fd5..2b8593fce07 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dags.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dags.py @@ -217,17 +217,6 @@ def compute_pipeline_stopped_timestamp( return pipeline_stopped_at -def compute_pipeline_submitted_timestamp( - pipeline_dag: nx.DiGraph, comp_tasks: list[CompTaskAtDB] -) -> datetime.datetime | None: - if not pipeline_dag.nodes: - return None - node_id_to_comp_task: dict[NodeIDStr, CompTaskAtDB] = { - NodeIDStr(f"{task.node_id}"): task for task in comp_tasks - } - return max(node_id_to_comp_task[node_id].submit for node_id in pipeline_dag.nodes) - - async def compute_pipeline_details( complete_dag: nx.DiGraph, pipeline_dag: nx.DiGraph, comp_tasks: list[CompTaskAtDB] ) -> PipelineDetails: diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dask.py b/services/director-v2/src/simcore_service_director_v2/utils/dask.py index 13967b0c5da..be897b9f0d6 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dask.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dask.py @@ -26,6 +26,7 @@ from models_library.projects import ProjectID, ProjectIDStr from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.services import ServiceKey, ServiceVersion +from models_library.services_types import ServiceRunID from models_library.users import UserID from pydantic import AnyUrl, ByteSize, TypeAdapter, ValidationError from servicelib.logging_utils import log_catch, log_context @@ -342,6 +343,7 @@ async def compute_task_envs( node_id: NodeID, node_image: Image, metadata: RunMetadataDict, + resource_tracking_run_id: ServiceRunID, ) -> ContainerEnvsDict: product_name = metadata.get("product_name", UNDEFINED_DOCKER_LABEL) task_envs = node_image.envs @@ -360,6 +362,7 @@ async def compute_task_envs( product_name=product_name, project_id=project_id, node_id=node_id, + service_run_id=resource_tracking_run_id, ) # NOTE: see https://github.com/ITISFoundation/osparc-simcore/issues/3638 # we currently do not validate as we are using illegal docker key names with underscores diff --git a/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py b/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py index 2715858965b..28ce84c605d 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py @@ -25,7 +25,7 @@ def _get_or_raise(context: ContextDict) -> Any: try: return context[parameter_name] except KeyError as err: - msg = "Parameter {keyname} missing from substitution context" + msg = f"{parameter_name=} missing from substitution context" raise CaptureError(msg) from err # For context["foo"] -> return operator.methodcaller("__getitem__", keyname) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py b/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py index 70249d3c1da..6f6e1693193 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py @@ -15,6 +15,7 @@ ) from models_library.services import ServiceKey, ServiceType, ServiceVersion from models_library.services_resources import ServiceResourcesDict +from models_library.services_types import ServiceRunID from models_library.users import UserID from models_library.wallets import WalletID from pydantic import NonNegativeFloat @@ -70,7 +71,7 @@ async def publish_service_stopped_metrics( async def publish_service_resource_tracking_started( # pylint: disable=too-many-arguments # noqa: PLR0913 rabbitmq_client: RabbitMQClient, - service_run_id: str, + service_run_id: ServiceRunID, *, wallet_id: WalletID | None, wallet_name: str | None, @@ -127,7 +128,7 @@ async def publish_service_resource_tracking_started( # pylint: disable=too-many async def publish_service_resource_tracking_stopped( rabbitmq_client: RabbitMQClient, - service_run_id: str, + service_run_id: ServiceRunID, *, simcore_platform_status: SimcorePlatformStatus, ) -> None: @@ -138,7 +139,7 @@ async def publish_service_resource_tracking_stopped( async def publish_service_resource_tracking_heartbeat( - rabbitmq_client: RabbitMQClient, service_run_id: str + rabbitmq_client: RabbitMQClient, service_run_id: ServiceRunID ) -> None: message = RabbitResourceTrackingHeartbeatMessage(service_run_id=service_run_id) await rabbitmq_client.publish(message.channel_name, message) diff --git a/services/director-v2/tests/mocks/fake_task.json b/services/director-v2/tests/mocks/fake_task.json index 00a9dfe3501..57d7a4c2837 100644 --- a/services/director-v2/tests/mocks/fake_task.json +++ b/services/director-v2/tests/mocks/fake_task.json @@ -50,7 +50,6 @@ "requires_gpu": false, "requires_mpi": true }, - "submit": "1994-11-10T19:23:02.115Z", "state": "PUBLISHED", "internal_id": 21107840, "node_class": "COMPUTATIONAL", diff --git a/services/director-v2/tests/unit/conftest.py b/services/director-v2/tests/unit/conftest.py index 7f4bb33b47c..7a6ab8e439d 100644 --- a/services/director-v2/tests/unit/conftest.py +++ b/services/director-v2/tests/unit/conftest.py @@ -22,9 +22,17 @@ from models_library.generated_models.docker_rest_api import ( ServiceSpec as DockerServiceSpec, ) +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID from models_library.service_settings_labels import SimcoreServiceLabels -from models_library.services import RunID, ServiceKey, ServiceKeyVersion, ServiceVersion +from models_library.services import ( + ServiceKey, + ServiceKeyVersion, + ServiceRunID, + ServiceVersion, +) from models_library.services_enums import ServiceState +from models_library.users import UserID from models_library.utils._original_fastapi_encoders import jsonable_encoder from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture @@ -71,8 +79,17 @@ def dynamic_sidecar_port() -> PortInt: @pytest.fixture -def run_id() -> RunID: - return RunID.create() +def service_run_id() -> ServiceRunID: + return ServiceRunID.get_resource_tracking_run_id_for_dynamic() + + +@pytest.fixture +def resource_tracking_run_id( + user_id: UserID, project_id: ProjectID, node_id: NodeID +) -> ServiceRunID: + return ServiceRunID.get_resource_tracking_run_id_for_computational( + user_id, project_id, node_id, iteration=42 + ) @pytest.fixture @@ -104,7 +121,7 @@ def scheduler_data_from_http_request( request_scheme: str, request_simcore_user_agent: str, can_save: bool, - run_id: RunID, + service_run_id: ServiceRunID, ) -> SchedulerData: return SchedulerData.from_http_request( service=dynamic_service_create, @@ -114,7 +131,7 @@ def scheduler_data_from_http_request( request_scheme=request_scheme, request_simcore_user_agent=request_simcore_user_agent, can_save=can_save, - run_id=run_id, + run_id=service_run_id, ) diff --git a/services/director-v2/tests/unit/test_modules_dask_client.py b/services/director-v2/tests/unit/test_modules_dask_client.py index f8e1ccd6c61..2bb2787ddc3 100644 --- a/services/director-v2/tests/unit/test_modules_dask_client.py +++ b/services/director-v2/tests/unit/test_modules_dask_client.py @@ -45,6 +45,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.resource_tracker import HardwareInfo +from models_library.services_types import ServiceRunID from models_library.users import UserID from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_mock.plugin import MockerFixture @@ -442,6 +443,7 @@ async def test_send_computation_task( task_labels: ContainerLabelsDict, empty_hardware_info: HardwareInfo, faker: Faker, + resource_tracking_run_id: ServiceRunID, ): _DASK_EVENT_NAME = faker.pystr() @@ -503,6 +505,7 @@ def fake_sidecar_fct( ), metadata=comp_run_metadata, hardware_info=empty_hardware_info, + resource_tracking_run_id=resource_tracking_run_id, ) assert node_id_to_job_ids assert len(node_id_to_job_ids) == 1 @@ -559,6 +562,7 @@ async def test_computation_task_is_persisted_on_dask_scheduler( mocked_storage_service_api: respx.MockRouter, comp_run_metadata: RunMetadataDict, empty_hardware_info: HardwareInfo, + resource_tracking_run_id: ServiceRunID, ): """rationale: When a task is submitted to the dask backend, a dask future is returned. @@ -594,6 +598,7 @@ def fake_sidecar_fct( remote_fct=fake_sidecar_fct, metadata=comp_run_metadata, hardware_info=empty_hardware_info, + resource_tracking_run_id=resource_tracking_run_id, ) assert published_computation_task assert len(published_computation_task) == 1 @@ -649,6 +654,7 @@ async def test_abort_computation_tasks( faker: Faker, comp_run_metadata: RunMetadataDict, empty_hardware_info: HardwareInfo, + resource_tracking_run_id: ServiceRunID, ): _DASK_EVENT_NAME = faker.pystr() @@ -687,6 +693,7 @@ def fake_remote_fct( remote_fct=fake_remote_fct, metadata=comp_run_metadata, hardware_info=empty_hardware_info, + resource_tracking_run_id=resource_tracking_run_id, ) assert published_computation_task assert len(published_computation_task) == 1 @@ -738,6 +745,7 @@ async def test_failed_task_returns_exceptions( mocked_storage_service_api: respx.MockRouter, comp_run_metadata: RunMetadataDict, empty_hardware_info: HardwareInfo, + resource_tracking_run_id: ServiceRunID, ): # NOTE: this must be inlined so that the test works, # the dask-worker must be able to import the function @@ -758,6 +766,7 @@ def fake_failing_sidecar_fct( remote_fct=fake_failing_sidecar_fct, metadata=comp_run_metadata, hardware_info=empty_hardware_info, + resource_tracking_run_id=resource_tracking_run_id, ) assert published_computation_task assert len(published_computation_task) == 1 @@ -800,6 +809,7 @@ async def test_send_computation_task_with_missing_resources_raises( mocked_storage_service_api: respx.MockRouter, comp_run_metadata: RunMetadataDict, empty_hardware_info: HardwareInfo, + resource_tracking_run_id: ServiceRunID, ): # remove the workers that can handle gpu scheduler_info = dask_client.backend.client.scheduler_info() @@ -826,6 +836,7 @@ async def test_send_computation_task_with_missing_resources_raises( remote_fct=None, metadata=comp_run_metadata, hardware_info=empty_hardware_info, + resource_tracking_run_id=resource_tracking_run_id, ) mocked_user_completed_cb.assert_not_called() @@ -844,6 +855,7 @@ async def test_send_computation_task_with_hardware_info_raises( mocked_storage_service_api: respx.MockRouter, comp_run_metadata: RunMetadataDict, hardware_info: HardwareInfo, + resource_tracking_run_id: ServiceRunID, ): # NOTE: running on the default cluster will raise missing resources with pytest.raises(MissingComputationalResourcesError): @@ -855,6 +867,7 @@ async def test_send_computation_task_with_hardware_info_raises( remote_fct=None, metadata=comp_run_metadata, hardware_info=hardware_info, + resource_tracking_run_id=resource_tracking_run_id, ) mocked_user_completed_cb.assert_not_called() @@ -872,6 +885,7 @@ async def test_too_many_resources_send_computation_task( mocked_storage_service_api: respx.MockRouter, comp_run_metadata: RunMetadataDict, empty_hardware_info: HardwareInfo, + resource_tracking_run_id: ServiceRunID, ): # create an image that needs a huge amount of CPU image = Image( @@ -895,6 +909,7 @@ async def test_too_many_resources_send_computation_task( remote_fct=None, metadata=comp_run_metadata, hardware_info=empty_hardware_info, + resource_tracking_run_id=resource_tracking_run_id, ) mocked_user_completed_cb.assert_not_called() @@ -911,6 +926,7 @@ async def test_disconnected_backend_raises_exception( mocked_storage_service_api: respx.MockRouter, comp_run_metadata: RunMetadataDict, empty_hardware_info: HardwareInfo, + resource_tracking_run_id: ServiceRunID, ): # DISCONNECT THE CLUSTER await dask_spec_local_cluster.close() # type: ignore @@ -923,6 +939,7 @@ async def test_disconnected_backend_raises_exception( remote_fct=None, metadata=comp_run_metadata, hardware_info=empty_hardware_info, + resource_tracking_run_id=resource_tracking_run_id, ) mocked_user_completed_cb.assert_not_called() @@ -942,6 +959,7 @@ async def test_changed_scheduler_raises_exception( unused_tcp_port_factory: Callable, comp_run_metadata: RunMetadataDict, empty_hardware_info: HardwareInfo, + resource_tracking_run_id: ServiceRunID, ): # change the scheduler (stop the current one and start another at the same address) scheduler_address = URL(dask_spec_local_cluster.scheduler_address) @@ -971,6 +989,7 @@ async def test_changed_scheduler_raises_exception( remote_fct=None, metadata=comp_run_metadata, hardware_info=empty_hardware_info, + resource_tracking_run_id=resource_tracking_run_id, ) mocked_user_completed_cb.assert_not_called() @@ -988,6 +1007,7 @@ async def test_get_tasks_status( fail_remote_fct: bool, comp_run_metadata: RunMetadataDict, empty_hardware_info: HardwareInfo, + resource_tracking_run_id: ServiceRunID, ): # NOTE: this must be inlined so that the test works, # the dask-worker must be able to import the function @@ -1015,6 +1035,7 @@ def fake_remote_fct( remote_fct=fake_remote_fct, metadata=comp_run_metadata, hardware_info=empty_hardware_info, + resource_tracking_run_id=resource_tracking_run_id, ) assert published_computation_task assert len(published_computation_task) == 1 @@ -1069,6 +1090,7 @@ async def test_dask_sub_handlers( fake_task_handlers: TaskHandlers, comp_run_metadata: RunMetadataDict, empty_hardware_info: HardwareInfo, + resource_tracking_run_id: ServiceRunID, ): dask_client.register_handlers(fake_task_handlers) _DASK_START_EVENT = "start" @@ -1098,6 +1120,7 @@ def fake_remote_fct( remote_fct=fake_remote_fct, metadata=comp_run_metadata, hardware_info=empty_hardware_info, + resource_tracking_run_id=resource_tracking_run_id, ) assert published_computation_task assert len(published_computation_task) == 1 @@ -1142,6 +1165,7 @@ async def test_get_cluster_details( comp_run_metadata: RunMetadataDict, empty_hardware_info: HardwareInfo, faker: Faker, + resource_tracking_run_id: ServiceRunID, ): cluster_details = await dask_client.get_cluster_details() assert cluster_details @@ -1178,6 +1202,7 @@ def fake_sidecar_fct( ), metadata=comp_run_metadata, hardware_info=empty_hardware_info, + resource_tracking_run_id=resource_tracking_run_id, ) assert published_computation_task assert len(published_computation_task) == 1 diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_task.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_task.py index 5410c37f203..fd328bd66aa 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_task.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_task.py @@ -14,6 +14,8 @@ import respx from faker import Faker from fastapi import FastAPI +from models_library.docker import DockerNodeID +from pydantic import TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -72,7 +74,9 @@ def mock_env( @pytest.fixture def scheduler_data(scheduler_data_from_http_request: SchedulerData) -> SchedulerData: - scheduler_data_from_http_request.docker_node_id = "test_docker_node_id" + scheduler_data_from_http_request.dynamic_sidecar.docker_node_id = TypeAdapter( + DockerNodeID + ).validate_python("testdockernodeid") return scheduler_data_from_http_request @@ -211,8 +215,10 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: @pytest.fixture -def mock_remove_calls(mocker: MockerFixture) -> None: +def mock_rpc_calls(mocker: MockerFixture, minimal_app: FastAPI) -> None: + minimal_app.state.rabbitmq_rpc_client = AsyncMock() mocker.patch.object(_events_utils, "remove_volumes_without_backup_for_service") + mocker.patch.object(_events_utils, "force_container_cleanup") @pytest.fixture(params=[True, False]) @@ -241,8 +247,9 @@ async def test_skip_observation_cycle_after_error( mocked_dynamic_scheduler_events: ACounter, error_raised_by_saving_state: bool, use_case: UseCase, - mock_remove_calls: None, + mock_rpc_calls: None, ): + # add a task, emulate an error make sure no observation cycle is # being triggered again assert mocked_dynamic_scheduler_events.count == 0 diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_volumes_resolver.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_volumes_resolver.py index b617c3da637..4acacd3a4e4 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_volumes_resolver.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_volumes_resolver.py @@ -13,7 +13,7 @@ CHARS_IN_VOLUME_NAME_BEFORE_DIR_NAME, ) from models_library.projects import ProjectID -from models_library.services import RunID +from models_library.services import ServiceRunID from models_library.users import UserID from simcore_service_director_v2.modules.dynamic_sidecar.volumes import ( DynamicSidecarVolumesPathsResolver, @@ -36,8 +36,8 @@ def state_paths() -> list[Path]: @pytest.fixture -def run_id() -> RunID: - return RunID.create() +def service_run_id() -> ServiceRunID: + return ServiceRunID.get_resource_tracking_run_id_for_dynamic() @pytest.fixture @@ -49,7 +49,7 @@ def project_id(faker: Faker) -> ProjectID: def expected_volume_config( swarm_stack_name: str, node_uuid: UUID, - run_id: RunID, + service_run_id: ServiceRunID, project_id: ProjectID, user_id: UserID, ) -> Callable[[str, str], dict[str, Any]]: @@ -62,7 +62,7 @@ def _callable(source: str, target: str) -> dict[str, Any]: "DriverConfig": None, "Labels": { "source": source, - "run_id": f"{run_id}", + "run_id": service_run_id, "study_id": f"{project_id}", "user_id": f"{user_id}", "swarm_stack_name": swarm_stack_name, @@ -79,7 +79,7 @@ def test_expected_paths( node_uuid: UUID, state_paths: list[Path], expected_volume_config: Callable[[str, str], dict[str, Any]], - run_id: RunID, + service_run_id: ServiceRunID, project_id: ProjectID, user_id: UserID, ) -> None: @@ -87,26 +87,38 @@ def test_expected_paths( inputs_path = Path(fake.file_path(depth=3)).parent assert DynamicSidecarVolumesPathsResolver.mount_entry( - swarm_stack_name, inputs_path, node_uuid, run_id, project_id, user_id, None + swarm_stack_name, + inputs_path, + node_uuid, + service_run_id, + project_id, + user_id, + None, ) == expected_volume_config( - source=f"dyv_{run_id}_{node_uuid}_{f'{inputs_path}'.replace('/', '_')[::-1]}", + source=f"dyv_{service_run_id}_{node_uuid}_{f'{inputs_path}'.replace('/', '_')[::-1]}", target=str(Path("/dy-volumes") / inputs_path.relative_to("/")), ) outputs_path = Path(fake.file_path(depth=3)).parent assert DynamicSidecarVolumesPathsResolver.mount_entry( - swarm_stack_name, outputs_path, node_uuid, run_id, project_id, user_id, None + swarm_stack_name, + outputs_path, + node_uuid, + service_run_id, + project_id, + user_id, + None, ) == expected_volume_config( - source=f"dyv_{run_id}_{node_uuid}_{f'{outputs_path}'.replace('/', '_')[::-1]}", + source=f"dyv_{service_run_id}_{node_uuid}_{f'{outputs_path}'.replace('/', '_')[::-1]}", target=str(Path("/dy-volumes") / outputs_path.relative_to("/")), ) for path in state_paths: name_from_path = f"{path}".replace(os.sep, "_")[::-1] assert DynamicSidecarVolumesPathsResolver.mount_entry( - swarm_stack_name, path, node_uuid, run_id, project_id, user_id, None + swarm_stack_name, path, node_uuid, service_run_id, project_id, user_id, None ) == expected_volume_config( - source=f"dyv_{run_id}_{node_uuid}_{name_from_path}", + source=f"dyv_{service_run_id}_{node_uuid}_{name_from_path}", target=str(Path("/dy-volumes/") / path.relative_to("/")), ) @@ -130,7 +142,7 @@ async def test_unique_name_creation_and_removal(faker: Faker): unique_volume_name = DynamicSidecarVolumesPathsResolver.source( path=Path("/some/random/path/to/a/workspace/folder"), node_uuid=faker.uuid4(cast_to=None), - run_id=RunID.create(), + service_run_id=ServiceRunID.get_resource_tracking_run_id_for_dynamic(), ) await assert_creation_and_removal(unique_volume_name) @@ -138,20 +150,20 @@ async def test_unique_name_creation_and_removal(faker: Faker): def test_volumes_get_truncated_as_expected(faker: Faker): node_uuid = faker.uuid4(cast_to=None) - run_id = RunID.create() - assert node_uuid != run_id + service_run_id = ServiceRunID.get_resource_tracking_run_id_for_dynamic() + assert node_uuid != service_run_id unique_volume_name = DynamicSidecarVolumesPathsResolver.source( path=Path( f"/home/user/a-{'-'.join(['very' for _ in range(34)])}-long-home-path/workspace" ), node_uuid=node_uuid, - run_id=run_id, + service_run_id=service_run_id, ) # if below fails the agent will have issues please check constant_part = unique_volume_name[: CHARS_IN_VOLUME_NAME_BEFORE_DIR_NAME - 1] - assert constant_part == f"dyv_{run_id}_{node_uuid}" + assert constant_part == f"dyv_{service_run_id}_{node_uuid}" assert len(unique_volume_name) == 255 - assert f"{run_id}" in unique_volume_name + assert f"{service_run_id}" in unique_volume_name assert f"{node_uuid}" in unique_volume_name diff --git a/services/director-v2/tests/unit/test_modules_osparc_variables.py b/services/director-v2/tests/unit/test_modules_osparc_variables.py index 635904292b8..61427034a45 100644 --- a/services/director-v2/tests/unit/test_modules_osparc_variables.py +++ b/services/director-v2/tests/unit/test_modules_osparc_variables.py @@ -18,6 +18,7 @@ from fastapi import FastAPI from models_library.service_settings_labels import ComposeSpecLabelDict from models_library.services import ServiceKey, ServiceVersion +from models_library.services_types import ServiceRunID from models_library.users import UserID from models_library.utils.specs_substitution import SubstitutionValue from models_library.utils.string_substitution import OSPARC_IDENTIFIER_PREFIX @@ -48,7 +49,9 @@ def session_context(faker: Faker) -> ContextDict: return ContextDict( app=FastAPI(), - service_key=TypeAdapter(ServiceKey).validate_python("simcore/services/dynamic/foo"), + service_key=TypeAdapter(ServiceKey).validate_python( + "simcore/services/dynamic/foo" + ), service_version=TypeAdapter(ServiceVersion).validate_python("1.2.3"), compose_spec=generate_fake_docker_compose(faker), product_name=faker.word(), @@ -101,7 +104,8 @@ async def request_user_email(app: FastAPI, user_id: UserID) -> SubstitutionValue # All values extracted from the context MUST be SubstitutionValue assert { - key: TypeAdapter(SubstitutionValue).validate_python(value) for key, value in environs.items() + key: TypeAdapter(SubstitutionValue).validate_python(value) + for key, value in environs.items() } for osparc_variable_name, context_name in [ @@ -170,6 +174,7 @@ async def test_resolve_and_substitute_session_variables_in_specs( "user_role": "${OSPARC_VARIABLE_USER_ROLE}", "api_key": "${OSPARC_VARIABLE_API_KEY}", "api_secret": "${OSPARC_VARIABLE_API_SECRET}", + "service_run_id": "${OSPARC_VARIABLE_SERVICE_RUN_ID}", } print("SPECS\n", specs) @@ -180,6 +185,7 @@ async def test_resolve_and_substitute_session_variables_in_specs( product_name="a_product", project_id=faker.uuid4(cast_to=None), node_id=faker.uuid4(cast_to=None), + service_run_id=ServiceRunID("some_run_id"), ) print("REPLACED SPECS\n", replaced_specs) diff --git a/services/director-v2/tests/unit/test_utils_comp_scheduler.py b/services/director-v2/tests/unit/test_utils_comp_scheduler.py index 05c899a5e40..e589d4a933f 100644 --- a/services/director-v2/tests/unit/test_utils_comp_scheduler.py +++ b/services/director-v2/tests/unit/test_utils_comp_scheduler.py @@ -5,18 +5,13 @@ import pytest from models_library.docker import DockerGenericTag -from models_library.projects import ProjectID -from models_library.projects_nodes import NodeID from models_library.projects_state import RunningState -from models_library.users import UserID from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB from simcore_service_director_v2.modules.comp_scheduler._utils import ( COMPLETED_STATES, SCHEDULED_STATES, TASK_TO_START_STATES, - Iteration, create_service_resources_from_task, - get_resource_tracking_run_id, ) @@ -50,31 +45,6 @@ def test_scheduler_knows_all_the_states(): ) == set(RunningState) -@pytest.mark.parametrize( - "user_id, project_id, node_id, iteration, expected_result", - [ - ( - 2, - ProjectID("e08356e4-eb74-49e9-b769-2c26e34c61d9"), - NodeID("a08356e4-eb74-49e9-b769-2c26e34c61d1"), - 5, - "comp_2_e08356e4-eb74-49e9-b769-2c26e34c61d9_a08356e4-eb74-49e9-b769-2c26e34c61d1_5", - ) - ], -) -def test_get_resource_tracking_run_id( - user_id: UserID, - project_id: ProjectID, - node_id: NodeID, - iteration: Iteration, - expected_result: str, -): - assert ( - get_resource_tracking_run_id(user_id, project_id, node_id, iteration) - == expected_result - ) - - @pytest.mark.parametrize( "task", [ diff --git a/services/director-v2/tests/unit/test_utils_dags.py b/services/director-v2/tests/unit/test_utils_dags.py index 11975ac9e88..0fc17030a2d 100644 --- a/services/director-v2/tests/unit/test_utils_dags.py +++ b/services/director-v2/tests/unit/test_utils_dags.py @@ -485,9 +485,8 @@ def pipeline_test_params( state=RunningState.NOT_STARTED, internal_id=3, node_class=NodeClass.COMPUTATIONAL, - submit=datetime.datetime.now(tz=datetime.timezone.utc), - created=datetime.datetime.now(tz=datetime.timezone.utc), - modified=datetime.datetime.now(tz=datetime.timezone.utc), + created=datetime.datetime.now(tz=datetime.UTC), + modified=datetime.datetime.now(tz=datetime.UTC), last_heartbeat=None, progress=1.00, ) @@ -536,9 +535,8 @@ def pipeline_test_params( state=RunningState.NOT_STARTED, internal_id=3, node_class=NodeClass.COMPUTATIONAL, - submit=datetime.datetime.now(tz=datetime.timezone.utc), - created=datetime.datetime.now(tz=datetime.timezone.utc), - modified=datetime.datetime.now(tz=datetime.timezone.utc), + created=datetime.datetime.now(tz=datetime.UTC), + modified=datetime.datetime.now(tz=datetime.UTC), last_heartbeat=None, ), CompTaskAtDB.model_construct( @@ -550,9 +548,8 @@ def pipeline_test_params( state=RunningState.NOT_STARTED, internal_id=3, node_class=NodeClass.COMPUTATIONAL, - submit=datetime.datetime.now(tz=datetime.timezone.utc), - created=datetime.datetime.now(tz=datetime.timezone.utc), - modified=datetime.datetime.now(tz=datetime.timezone.utc), + created=datetime.datetime.now(tz=datetime.UTC), + modified=datetime.datetime.now(tz=datetime.UTC), last_heartbeat=None, ), CompTaskAtDB.model_construct( @@ -564,9 +561,8 @@ def pipeline_test_params( state=RunningState.NOT_STARTED, internal_id=3, node_class=NodeClass.COMPUTATIONAL, - submit=datetime.datetime.now(tz=datetime.timezone.utc), - created=datetime.datetime.now(tz=datetime.timezone.utc), - modified=datetime.datetime.now(tz=datetime.timezone.utc), + created=datetime.datetime.now(tz=datetime.UTC), + modified=datetime.datetime.now(tz=datetime.UTC), last_heartbeat=None, progress=1.00, ), diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations.py index 4c8ed5f4b78..2d076c3fdf0 100644 --- a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations.py @@ -258,7 +258,11 @@ def _mocked_services_details( ).isoformat() } - data = {**ServiceGet.model_config["json_schema_extra"]["examples"][0], **data_published, **deprecated} # type: ignore + data = { + **ServiceGet.model_config["json_schema_extra"]["examples"][0], + **data_published, + **deprecated, + } # type: ignore payload = ServiceGet.model_validate(data) @@ -354,7 +358,6 @@ def _mocked_get_pricing_unit(request, pricing_plan_id: int) -> httpx.Response: assert_all_called=False, assert_all_mocked=True, ) as respx_mock: - respx_mock.get( re.compile( r"services/(?Psimcore/services/(comp|dynamic|frontend)/[^/]+)/(?P[^\.]+.[^\.]+.[^/\?]+)/pricing-plan.+" @@ -915,13 +918,7 @@ async def test_get_computation_from_not_started_computation_task( stopped=None, submitted=None, ) - _CHANGED_FIELDS = {"submitted"} - assert returned_computation.model_dump( - exclude=_CHANGED_FIELDS - ) == expected_computation.model_dump(exclude=_CHANGED_FIELDS) - assert returned_computation.model_dump( - include=_CHANGED_FIELDS - ) != expected_computation.model_dump(include=_CHANGED_FIELDS) + assert returned_computation == expected_computation async def test_get_computation_from_published_computation_task( diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_scheduler_dask.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_scheduler_dask.py index d9559b6c75e..fb06b116c70 100644 --- a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_scheduler_dask.py +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_scheduler_dask.py @@ -255,6 +255,7 @@ async def _return_tasks_pending(job_ids: list[str]) -> list[DaskClientTaskState] callback=mock.ANY, metadata=mock.ANY, hardware_info=mock.ANY, + resource_tracking_run_id=mock.ANY, ) for p in expected_pending_tasks ], @@ -654,6 +655,7 @@ async def _return_random_task_result(job_id) -> TaskOutputData: callback=mock.ANY, metadata=mock.ANY, hardware_info=mock.ANY, + resource_tracking_run_id=mock.ANY, ) mocked_dask_client.send_computation_tasks.reset_mock() mocked_dask_client.get_tasks_status.assert_has_calls( diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index 703686d2526..9b0d03e6eed 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -118,7 +118,6 @@ async def _( ), "node_class": to_node_class(node_data.key), "internal_id": internal_id + 1, - "submit": datetime.datetime.now(datetime.UTC), "job_id": generate_dask_job_id( service_key=node_data.key, service_version=node_data.version, diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py index f7423b3944c..278b386eb86 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py @@ -14,11 +14,12 @@ import pytest from aiodocker.utils import clean_filters from faker import Faker -from models_library.docker import to_simcore_runtime_docker_label_key +from models_library.docker import DockerNodeID, to_simcore_runtime_docker_label_key from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.services_enums import ServiceState from models_library.users import UserID +from pydantic import TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_director_v2.constants import ( DYNAMIC_PROXY_SERVICE_PREFIX, @@ -763,16 +764,16 @@ async def test_regression_update_service_update_out_of_sequence( @pytest.fixture -async def target_node_id(async_docker_client: aiodocker.Docker) -> str: +async def target_node_id(async_docker_client: aiodocker.Docker) -> DockerNodeID: # get a node's ID docker_nodes = await async_docker_client.nodes.list() - return docker_nodes[0]["ID"] + return TypeAdapter(DockerNodeID).validate_python(docker_nodes[0]["ID"]) async def test_constrain_service_to_node( async_docker_client: aiodocker.Docker, mock_service: str, - target_node_id: str, + target_node_id: DockerNodeID, docker_swarm: None, ): await docker_api.constrain_service_to_node( diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py index 99596264831..6d62f1ca952 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py @@ -24,7 +24,7 @@ SimcoreServiceLabels, SimcoreServiceSettingsLabel, ) -from models_library.services import RunID, ServiceKeyVersion +from models_library.services import ServiceKeyVersion, ServiceRunID from models_library.wallets import WalletInfo from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -52,9 +52,7 @@ @pytest.fixture def mock_s3_settings() -> S3Settings: - return S3Settings.model_validate( - S3Settings.model_config["json_schema_extra"]["examples"][0] - ) + return S3Settings.model_validate(S3Settings.model_json_schema()["examples"][0]) @pytest.fixture @@ -127,14 +125,12 @@ def simcore_service_labels() -> SimcoreServiceLabels: @pytest.fixture def hardware_info() -> HardwareInfo: - return HardwareInfo.model_validate( - HardwareInfo.model_config["json_schema_extra"]["examples"][0] - ) + return HardwareInfo.model_validate(HardwareInfo.model_json_schema()["examples"][0]) @pytest.fixture def expected_dynamic_sidecar_spec( - run_id: RunID, + service_run_id: ServiceRunID, osparc_product_name: str, request_simcore_user_agent: str, hardware_info: HardwareInfo, @@ -157,7 +153,7 @@ def expected_dynamic_sidecar_spec( "container_http_entry": "rt-web", "hostname": "dy-sidecar_75c7f3f4-18f9-4678-8610-54a2ade78eaa", "port": 1222, - "run_id": run_id, + "run_id": service_run_id, "dynamic_sidecar": { "containers_inspect": [], "dynamic_sidecar_id": None, @@ -187,9 +183,9 @@ def expected_dynamic_sidecar_spec( "state_exclude": ["/tmp/strip_me/*"], # noqa: S108 "state_paths": ["/tmp/save_1", "/tmp_save_2"], # noqa: S108 }, - "callbacks_mapping": CallbacksMapping.model_config[ - "json_schema_extra" - ]["examples"][3], + "callbacks_mapping": CallbacksMapping.model_json_schema()[ + "examples" + ][3], "product_name": osparc_product_name, "project_id": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe", "proxy_service_name": "dy-proxy_75c7f3f4-18f9-4678-8610-54a2ade78eaa", @@ -197,12 +193,8 @@ def expected_dynamic_sidecar_spec( "request_scheme": "http", "request_simcore_user_agent": request_simcore_user_agent, "restart_policy": "on-inputs-downloaded", - "wallet_info": WalletInfo.model_config["json_schema_extra"][ - "examples" - ][0], - "pricing_info": PricingInfo.model_config["json_schema_extra"][ - "examples" - ][0], + "wallet_info": WalletInfo.model_json_schema()["examples"][0], + "pricing_info": PricingInfo.model_json_schema()["examples"][0], "hardware_info": hardware_info, "service_name": "dy-sidecar_75c7f3f4-18f9-4678-8610-54a2ade78eaa", "service_port": 65534, @@ -238,7 +230,7 @@ def expected_dynamic_sidecar_spec( "Env": { "DYNAMIC_SIDECAR_COMPOSE_NAMESPACE": "dy-sidecar_75c7f3f4-18f9-4678-8610-54a2ade78eaa", "DY_SIDECAR_NODE_ID": "75c7f3f4-18f9-4678-8610-54a2ade78eaa", - "DY_SIDECAR_RUN_ID": run_id, + "DY_SIDECAR_RUN_ID": service_run_id, "DY_SIDECAR_PATH_INPUTS": "/tmp/inputs", # noqa: S108 "DY_SIDECAR_PATH_OUTPUTS": "/tmp/outputs", # noqa: S108 "DY_SIDECAR_PROJECT_ID": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe", @@ -321,7 +313,7 @@ def expected_dynamic_sidecar_spec( "Type": "bind", }, { - "Source": f"dyv_{run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_erots-derahs_", + "Source": f"dyv_{service_run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_erots-derahs_", "Target": "/dy-volumes/shared-store", "Type": "volume", "VolumeOptions": { @@ -329,8 +321,8 @@ def expected_dynamic_sidecar_spec( "Labels": { "node_uuid": "75c7f3f4-18f9-4678-8610-54a2ade78eaa", "study_id": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe", - "run_id": run_id, - "source": f"dyv_{run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_erots-derahs_", + "run_id": service_run_id, + "source": f"dyv_{service_run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_erots-derahs_", "swarm_stack_name": "test_swarm_name", "user_id": "234", }, @@ -338,14 +330,14 @@ def expected_dynamic_sidecar_spec( }, { "Target": "/dy-volumes/tmp/inputs", - "Source": f"dyv_{run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_stupni_pmt_", + "Source": f"dyv_{service_run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_stupni_pmt_", "Type": "volume", "VolumeOptions": { "Labels": { "node_uuid": "75c7f3f4-18f9-4678-8610-54a2ade78eaa", "study_id": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe", - "run_id": run_id, - "source": f"dyv_{run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_stupni_pmt_", + "run_id": service_run_id, + "source": f"dyv_{service_run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_stupni_pmt_", "swarm_stack_name": "test_swarm_name", "user_id": "234", }, @@ -353,14 +345,14 @@ def expected_dynamic_sidecar_spec( }, { "Target": "/dy-volumes/tmp/outputs", - "Source": f"dyv_{run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_stuptuo_pmt_", + "Source": f"dyv_{service_run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_stuptuo_pmt_", "Type": "volume", "VolumeOptions": { "Labels": { "node_uuid": "75c7f3f4-18f9-4678-8610-54a2ade78eaa", "study_id": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe", - "run_id": run_id, - "source": f"dyv_{run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_stuptuo_pmt_", + "run_id": service_run_id, + "source": f"dyv_{service_run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_stuptuo_pmt_", "swarm_stack_name": "test_swarm_name", "user_id": "234", }, @@ -368,14 +360,14 @@ def expected_dynamic_sidecar_spec( }, { "Target": "/dy-volumes/tmp/save_1", - "Source": f"dyv_{run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_1_evas_pmt_", + "Source": f"dyv_{service_run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_1_evas_pmt_", "Type": "volume", "VolumeOptions": { "Labels": { "node_uuid": "75c7f3f4-18f9-4678-8610-54a2ade78eaa", "study_id": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe", - "run_id": run_id, - "source": f"dyv_{run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_1_evas_pmt_", + "run_id": service_run_id, + "source": f"dyv_{service_run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_1_evas_pmt_", "swarm_stack_name": "test_swarm_name", "user_id": "234", }, @@ -383,14 +375,14 @@ def expected_dynamic_sidecar_spec( }, { "Target": "/dy-volumes/tmp_save_2", - "Source": f"dyv_{run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_2_evas_pmt_", + "Source": f"dyv_{service_run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_2_evas_pmt_", "Type": "volume", "VolumeOptions": { "Labels": { "node_uuid": "75c7f3f4-18f9-4678-8610-54a2ade78eaa", "study_id": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe", - "run_id": run_id, - "source": f"dyv_{run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_2_evas_pmt_", + "run_id": service_run_id, + "source": f"dyv_{service_run_id}_75c7f3f4-18f9-4678-8610-54a2ade78eaa_2_evas_pmt_", "swarm_stack_name": "test_swarm_name", "user_id": "234", }, diff --git a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py index d02836de9e2..7fcaeca385d 100644 --- a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py +++ b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py @@ -34,6 +34,7 @@ from models_library.docker import to_simcore_runtime_docker_label_key from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimCoreFileLink, SimcoreS3FileID +from models_library.services import ServiceRunID from models_library.users import UserID from pydantic import ByteSize, TypeAdapter from pydantic.networks import AnyUrl @@ -647,6 +648,7 @@ async def test_compute_task_envs( run_metadata: RunMetadataDict, input_task_envs: ContainerEnvsDict, expected_computed_task_envs: ContainerEnvsDict, + resource_tracking_run_id: ServiceRunID, ): sleeper_task: CompTaskAtDB = published_project.tasks[1] sleeper_task.image.envs = input_task_envs @@ -658,5 +660,6 @@ async def test_compute_task_envs( node_id=sleeper_task.node_id, node_image=sleeper_task.image, metadata=run_metadata, + resource_tracking_run_id=resource_tracking_run_id, ) assert task_envs == expected_computed_task_envs diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index 907e7a8e04e..81d1accf23d 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -1078,7 +1078,7 @@ async def _get_node_details( async def get_services_details( - app: FastAPI, user_id: str | None, study_id: str | None + app: FastAPI, user_id: str | None, project_id: str | None ) -> list[dict]: app_settings = get_application_settings(app) async with docker_utils.docker_client() as client: # pylint: disable=not-async-context-manager @@ -1091,9 +1091,10 @@ async def get_services_details( filters.append( f"{_to_simcore_runtime_docker_label_key('user_id')}=" + user_id ) - if study_id: + if project_id: filters.append( - f"{_to_simcore_runtime_docker_label_key('project_id')}=" + study_id + f"{_to_simcore_runtime_docker_label_key('project_id')}=" + + project_id ) list_running_services = await client.services.list( filters={"label": filters} @@ -1104,7 +1105,7 @@ async def get_services_details( for service in list_running_services ] except aiodocker.DockerError as err: - msg = f"Error while accessing container for {user_id=}, {study_id=}" + msg = f"Error while accessing container for {user_id=}, {project_id=}" raise GenericDockerError(err=msg) from err diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 265d29e56ed..766c117244d 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -568,6 +568,7 @@ services: DYNAMIC_SCHEDULER_PROFILING: ${DYNAMIC_SCHEDULER_PROFILING} DYNAMIC_SCHEDULER_TRACING: ${DYNAMIC_SCHEDULER_TRACING} DYNAMIC_SCHEDULER_UI_STORAGE_SECRET: ${DYNAMIC_SCHEDULER_UI_STORAGE_SECRET} + DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT: ${DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} static-webserver: diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py index 8cd90ddb8f0..b90ed821bfa 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py @@ -1,5 +1,9 @@ from fastapi import FastAPI -from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_directorv2.dynamic_services import ( + DynamicServiceGet, + GetProjectInactivityResponse, + RetrieveDataOutEnveloped, +) from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( DynamicServiceStart, DynamicServiceStop, @@ -7,7 +11,9 @@ from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID +from models_library.services_types import ServicePortKey from models_library.users import UserID +from pydantic import NonNegativeInt from servicelib.rabbitmq import RPCRouter from servicelib.rabbitmq.rpc_interfaces.dynamic_scheduler.errors import ( ServiceWaitingForManualInterventionError, @@ -56,3 +62,31 @@ async def stop_dynamic_service( return await scheduler_interface.stop_dynamic_service( app, dynamic_service_stop=dynamic_service_stop ) + + +@router.expose() +async def get_project_inactivity( + app: FastAPI, *, project_id: ProjectID, max_inactivity_seconds: NonNegativeInt +) -> GetProjectInactivityResponse: + return await scheduler_interface.get_project_inactivity( + app, project_id=project_id, max_inactivity_seconds=max_inactivity_seconds + ) + + +@router.expose() +async def restart_user_services(app: FastAPI, *, node_id: NodeID) -> None: + await scheduler_interface.restart_user_services(app, node_id=node_id) + + +@router.expose() +async def retrieve_inputs( + app: FastAPI, *, node_id: NodeID, port_keys: list[ServicePortKey] +) -> RetrieveDataOutEnveloped: + return await scheduler_interface.retrieve_inputs( + app, node_id=node_id, port_keys=port_keys + ) + + +@router.expose() +async def update_projects_networks(app: FastAPI, *, project_id: ProjectID) -> None: + await scheduler_interface.update_projects_networks(app, project_id=project_id) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py index 36be9f4b587..e44ec885ed3 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py @@ -62,12 +62,26 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT: datetime.timedelta = Field( default=datetime.timedelta(minutes=60), + validation_alias=AliasChoices( + "DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT", + "DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT", + ), description=( "Time to wait before timing out when stopping a dynamic service. " "Since services require data to be stopped, this operation is timed out after 1 hour" ), ) + DYNAMIC_SCHEDULER_SERVICE_UPLOAD_DOWNLOAD_TIMEOUT: datetime.timedelta = Field( + default=datetime.timedelta(minutes=60), + description=( + "When dynamic services upload and download data from storage, " + "sometimes very big payloads are involved. In order to handle " + "such payloads it is required to have long timeouts which " + "allow the service to finish the operation." + ), + ) + DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER: bool = Field( default=False, description=( diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py index 4b49618d6df..cc1e8851684 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py @@ -2,15 +2,20 @@ from typing import Any from fastapi import FastAPI, status -from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_directorv2.dynamic_services import ( + DynamicServiceGet, + GetProjectInactivityResponse, + RetrieveDataOutEnveloped, +) from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( DynamicServiceStart, ) from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID +from models_library.services_types import ServicePortKey from models_library.users import UserID -from pydantic import TypeAdapter +from pydantic import NonNegativeInt, TypeAdapter from servicelib.fastapi.app_state import SingletonInAppStateMixin from servicelib.fastapi.http_client import AttachLifespanMixin, HasClientSetupInterface from servicelib.fastapi.http_client_thin import UnexpectedStatusError @@ -75,7 +80,7 @@ async def stop_dynamic_service( node_id: NodeID, simcore_user_agent: str, save_state: bool, - timeout: datetime.timedelta, # noqa: ASYNC109 + timeout: datetime.timedelta # noqa: ASYNC109 ) -> None: try: await self.thin_client.delete_dynamic_service( @@ -100,6 +105,19 @@ async def stop_dynamic_service( raise + async def retrieve_inputs( + self, + *, + node_id: NodeID, + port_keys: list[ServicePortKey], + timeout: datetime.timedelta # noqa: ASYNC109 + ) -> RetrieveDataOutEnveloped: + response = await self.thin_client.dynamic_service_retrieve( + node_id=node_id, port_keys=port_keys, timeout=timeout + ) + dict_response: dict[str, Any] = response.json() + return TypeAdapter(RetrieveDataOutEnveloped).validate_python(dict_response) + async def list_tracked_dynamic_services( self, *, user_id: UserID | None = None, project_id: ProjectID | None = None ) -> list[DynamicServiceGet]: @@ -108,6 +126,22 @@ async def list_tracked_dynamic_services( ) return TypeAdapter(list[DynamicServiceGet]).validate_python(response.json()) + async def get_project_inactivity( + self, *, project_id: ProjectID, max_inactivity_seconds: NonNegativeInt + ) -> GetProjectInactivityResponse: + response = await self.thin_client.get_projects_inactivity( + project_id=project_id, max_inactivity_seconds=max_inactivity_seconds + ) + return TypeAdapter(GetProjectInactivityResponse).validate_python( + response.json() + ) + + async def restart_user_services(self, *, node_id: NodeID) -> None: + await self.thin_client.post_restart(node_id=node_id) + + async def update_projects_networks(self, *, project_id: ProjectID) -> None: + await self.thin_client.patch_projects_networks(project_id=project_id) + def setup_director_v2(app: FastAPI) -> None: public_client = DirectorV2Client(app) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py index bfb64e8839a..aef8823ce7d 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py @@ -1,8 +1,8 @@ import datetime from typing import cast +from common_library.exclude import as_dict_exclude_none from common_library.json_serialization import json_dumps -from common_library.unset import UnSet, as_dict_exclude_unset from fastapi import FastAPI, status from httpx import Response, Timeout from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( @@ -11,7 +11,9 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.services_resources import ServiceResourcesDictHelpers +from models_library.services_types import ServicePortKey from models_library.users import UserID +from pydantic import NonNegativeInt from servicelib.common_headers import ( X_DYNAMIC_SIDECAR_REQUEST_DNS, X_DYNAMIC_SIDECAR_REQUEST_SCHEME, @@ -91,7 +93,7 @@ async def delete_dynamic_service( node_id: NodeID, simcore_user_agent: str, save_state: bool, - timeout: datetime.timedelta, + timeout: datetime.timedelta, # noqa: ASYNC109 ) -> Response: @retry_on_errors(total_retry_timeout_overwrite=timeout.total_seconds()) @expect_status(status.HTTP_204_NO_CONTENT) @@ -114,13 +116,47 @@ async def _( @retry_on_errors() @expect_status(status.HTTP_200_OK) - async def get_dynamic_services( + async def dynamic_service_retrieve( self, *, - user_id: UserID | None | UnSet = UnSet.VALUE, - project_id: ProjectID | None | UnSet = UnSet.VALUE, + node_id: NodeID, + port_keys: list[ServicePortKey], + timeout: datetime.timedelta, # noqa: ASYNC109 + ) -> Response: + post_data = {"port_keys": port_keys} + return await self.client.post( + f"/dynamic_services/{node_id}:retrieve", + content=json_dumps(post_data), + timeout=timeout.total_seconds(), + ) + + @retry_on_errors() + @expect_status(status.HTTP_200_OK) + async def get_dynamic_services( + self, *, user_id: UserID | None = None, project_id: ProjectID | None = None ) -> Response: return await self.client.get( "/dynamic_services", - params=as_dict_exclude_unset(user_id=user_id, project_id=project_id), + params=as_dict_exclude_none(user_id=user_id, project_id=project_id), + ) + + @retry_on_errors() + @expect_status(status.HTTP_200_OK) + async def get_projects_inactivity( + self, *, project_id: ProjectID, max_inactivity_seconds: NonNegativeInt + ) -> Response: + return await self.client.get( + f"/dynamic_services/projects/{project_id}/inactivity", + params={"max_inactivity_seconds": max_inactivity_seconds}, + ) + + @expect_status(status.HTTP_204_NO_CONTENT) + async def post_restart(self, *, node_id: NodeID) -> Response: + return await self.client.post(f"/dynamic_services/{node_id}:restart") + + @retry_on_errors() + @expect_status(status.HTTP_204_NO_CONTENT) + async def patch_projects_networks(self, *, project_id: ProjectID) -> Response: + return await self.client.patch( + f"/dynamic_services/projects/{project_id}/-/networks" ) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/scheduler_interface.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/scheduler_interface.py index 6f655b544e2..ff279fb75c9 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/scheduler_interface.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/scheduler_interface.py @@ -1,5 +1,9 @@ from fastapi import FastAPI -from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_directorv2.dynamic_services import ( + DynamicServiceGet, + GetProjectInactivityResponse, + RetrieveDataOutEnveloped, +) from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( DynamicServiceStart, DynamicServiceStop, @@ -7,7 +11,9 @@ from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID +from models_library.services_types import ServicePortKey from models_library.users import UserID +from pydantic import NonNegativeInt from ..core.settings import ApplicationSettings from .director_v2 import DirectorV2Client @@ -73,3 +79,52 @@ async def stop_dynamic_service( ) await set_request_as_stopped(app, dynamic_service_stop) + + +async def get_project_inactivity( + app: FastAPI, *, project_id: ProjectID, max_inactivity_seconds: NonNegativeInt +) -> GetProjectInactivityResponse: + settings: ApplicationSettings = app.state.settings + if settings.DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER: + raise NotImplementedError + + director_v2_client = DirectorV2Client.get_from_app_state(app) + response: GetProjectInactivityResponse = ( + await director_v2_client.get_project_inactivity( + project_id=project_id, max_inactivity_seconds=max_inactivity_seconds + ) + ) + return response + + +async def restart_user_services(app: FastAPI, *, node_id: NodeID) -> None: + settings: ApplicationSettings = app.state.settings + if settings.DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER: + raise NotImplementedError + + director_v2_client = DirectorV2Client.get_from_app_state(app) + await director_v2_client.restart_user_services(node_id=node_id) + + +async def retrieve_inputs( + app: FastAPI, *, node_id: NodeID, port_keys: list[ServicePortKey] +) -> RetrieveDataOutEnveloped: + settings: ApplicationSettings = app.state.settings + if settings.DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER: + raise NotImplementedError + + director_v2_client = DirectorV2Client.get_from_app_state(app) + return await director_v2_client.retrieve_inputs( + node_id=node_id, + port_keys=port_keys, + timeout=settings.DYNAMIC_SCHEDULER_SERVICE_UPLOAD_DOWNLOAD_TIMEOUT, + ) + + +async def update_projects_networks(app: FastAPI, *, project_id: ProjectID) -> None: + settings: ApplicationSettings = app.state.settings + if settings.DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER: + raise NotImplementedError + + director_v2_client = DirectorV2Client.get_from_app_state(app) + await director_v2_client.update_projects_networks(project_id=project_id) diff --git a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py index 7c1665065ae..f3380bbb2f5 100644 --- a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py +++ b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py @@ -9,7 +9,11 @@ from faker import Faker from fastapi import FastAPI, status from fastapi.encoders import jsonable_encoder -from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_directorv2.dynamic_services import ( + DynamicServiceGet, + GetProjectInactivityResponse, + RetrieveDataOutEnveloped, +) from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( DynamicServiceStart, DynamicServiceStop, @@ -55,14 +59,14 @@ def node_not_found(faker: Faker) -> NodeID: @pytest.fixture def service_status_new_style() -> DynamicServiceGet: return TypeAdapter(DynamicServiceGet).validate_python( - DynamicServiceGet.model_config["json_schema_extra"]["examples"][1] + DynamicServiceGet.model_json_schema()["examples"][1] ) @pytest.fixture def service_status_legacy() -> NodeGet: return TypeAdapter(NodeGet).validate_python( - NodeGet.model_config["json_schema_extra"]["examples"][1] + NodeGet.model_json_schema()["examples"][1] ) @@ -112,9 +116,7 @@ def mock_director_v2_service_state( mock.get("/dynamic_services").respond( status.HTTP_200_OK, text=json.dumps( - jsonable_encoder( - DynamicServiceGet.model_config["json_schema_extra"]["examples"] - ) + jsonable_encoder(DynamicServiceGet.model_json_schema()["examples"]) ), ) @@ -193,7 +195,7 @@ async def test_list_tracked_dynamic_services(rpc_client: RabbitMQRPCClient): assert len(results) == 2 assert results == [ TypeAdapter(DynamicServiceGet).validate_python(x) - for x in DynamicServiceGet.model_config["json_schema_extra"]["examples"] + for x in DynamicServiceGet.model_json_schema()["examples"] ] @@ -223,7 +225,7 @@ async def test_get_state( def dynamic_service_start() -> DynamicServiceStart: # one for legacy and one for new style? return TypeAdapter(DynamicServiceStart).validate_python( - DynamicServiceStart.model_config["json_schema_extra"]["example"] + DynamicServiceStart.model_json_schema()["example"] ) @@ -490,3 +492,112 @@ async def test_stop_dynamic_service_serializes_generic_errors( ), timeout_s=5, ) + + +@pytest.fixture +def inactivity_response() -> GetProjectInactivityResponse: + return TypeAdapter(GetProjectInactivityResponse).validate_python( + GetProjectInactivityResponse.model_json_schema()["example"] + ) + + +@pytest.fixture +def mock_director_v2_get_project_inactivity( + project_id: ProjectID, inactivity_response: GetProjectInactivityResponse +) -> Iterator[None]: + with respx.mock( + base_url="http://director-v2:8000/v2", + assert_all_called=False, + assert_all_mocked=True, # IMPORTANT: KEEP always True! + ) as mock: + mock.get(f"/dynamic_services/projects/{project_id}/inactivity").respond( + status.HTTP_200_OK, text=inactivity_response.model_dump_json() + ) + yield None + + +async def test_get_project_inactivity( + mock_director_v2_get_project_inactivity: None, + rpc_client: RabbitMQRPCClient, + project_id: ProjectID, + inactivity_response: GetProjectInactivityResponse, +): + result = await services.get_project_inactivity( + rpc_client, project_id=project_id, max_inactivity_seconds=5 + ) + assert result == inactivity_response + + +@pytest.fixture +def mock_director_v2_restart_user_services(node_id: NodeID) -> Iterator[None]: + with respx.mock( + base_url="http://director-v2:8000/v2", + assert_all_called=False, + assert_all_mocked=True, # IMPORTANT: KEEP always True! + ) as mock: + mock.post(f"/dynamic_services/{node_id}:restart").respond( + status.HTTP_204_NO_CONTENT + ) + yield None + + +async def test_restart_user_services( + mock_director_v2_restart_user_services: None, + rpc_client: RabbitMQRPCClient, + node_id: NodeID, +): + await services.restart_user_services(rpc_client, node_id=node_id, timeout_s=5) + + +@pytest.fixture +def mock_director_v2_service_retrieve_inputs(node_id: NodeID) -> Iterator[None]: + with respx.mock( + base_url="http://director-v2:8000/v2", + assert_all_called=False, + assert_all_mocked=True, # IMPORTANT: KEEP always True! + ) as mock: + mock.post(f"/dynamic_services/{node_id}:retrieve").respond( + status.HTTP_200_OK, + text=TypeAdapter(RetrieveDataOutEnveloped) + .validate_python( + RetrieveDataOutEnveloped.model_json_schema()["examples"][0] + ) + .model_dump_json(), + ) + + yield None + + +async def test_retrieve_inputs( + mock_director_v2_service_retrieve_inputs: None, + rpc_client: RabbitMQRPCClient, + node_id: NodeID, +): + results = await services.retrieve_inputs( + rpc_client, node_id=node_id, port_keys=[], timeout_s=10 + ) + assert ( + results.model_dump(mode="python") + == RetrieveDataOutEnveloped.model_json_schema()["examples"][0] + ) + + +@pytest.fixture +def mock_director_v2_update_projects_networks(project_id: ProjectID) -> Iterator[None]: + with respx.mock( + base_url="http://director-v2:8000/v2", + assert_all_called=False, + assert_all_mocked=True, # IMPORTANT: KEEP always True! + ) as mock: + mock.patch(f"/dynamic_services/projects/{project_id}/-/networks").respond( + status.HTTP_204_NO_CONTENT + ) + yield None + + +async def test_update_projects_networks( + mock_director_v2_update_projects_networks: None, + rpc_client: RabbitMQRPCClient, + project_id: ProjectID, +): + await services.update_projects_networks(rpc_client, project_id=project_id) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_utils.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_utils.py index de02572e238..eee04ad6be3 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_utils.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_utils.py @@ -10,7 +10,7 @@ from models_library.docker import DockerGenericTag from models_library.generated_models.docker_rest_api import ContainerState from models_library.generated_models.docker_rest_api import Status2 as ContainerStatus -from models_library.services import RunID +from models_library.services import ServiceRunID from pydantic import PositiveInt from servicelib.utils import logged_gather from starlette import status as http_status @@ -40,9 +40,11 @@ async def docker_client() -> AsyncGenerator[aiodocker.Docker, None]: await docker.close() -async def get_volume_by_label(label: str, run_id: RunID) -> dict[str, Any]: +async def get_volume_by_label( + label: str, service_run_id: ServiceRunID +) -> dict[str, Any]: async with docker_client() as docker: - filters = {"label": [f"source={label}", f"run_id={run_id}"]} + filters = {"label": [f"source={label}", f"run_id={service_run_id}"]} params = {"filters": clean_filters(filters)} data = await docker._query_json( # pylint: disable=protected-access # noqa: SLF001 "volumes", method="GET", params=params @@ -53,7 +55,7 @@ async def get_volume_by_label(label: str, run_id: RunID) -> dict[str, Any]: raise VolumeNotFoundError( volume_count=len(volumes), source_label=label, - run_id=run_id, + service_run_id=service_run_id, volume_names=" ".join(v.get("Name", "UNKNOWN") for v in volumes), status_code=http_status.HTTP_404_NOT_FOUND, ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py index b9a449ecb36..fc67b7072f8 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py @@ -8,7 +8,7 @@ class BaseDynamicSidecarError(OsparcErrorMixin, Exception): class VolumeNotFoundError(BaseDynamicSidecarError): msg_template = ( "Expected 1 got {volume_count} volumes labels with " - "source_label={source_label}, run_id={run_id}: Found {volume_names}" + "source_label={source_label}, service_run_id={service_run_id}: Found {volume_names}" ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py index 795015e1520..1f42b5b848e 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py @@ -10,7 +10,7 @@ from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID -from models_library.services import DynamicServiceKey, RunID, ServiceVersion +from models_library.services import DynamicServiceKey, ServiceRunID, ServiceVersion from models_library.users import UserID from pydantic import ( AliasChoices, @@ -159,7 +159,7 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): DY_SIDECAR_USER_ID: UserID DY_SIDECAR_PROJECT_ID: ProjectID DY_SIDECAR_NODE_ID: NodeID - DY_SIDECAR_RUN_ID: RunID + DY_SIDECAR_RUN_ID: ServiceRunID DY_SIDECAR_USER_SERVICES_HAVE_INTERNET_ACCESS: bool DY_SIDECAR_SERVICE_KEY: DynamicServiceKey | None = None diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/mounted_fs.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/mounted_fs.py index b07bfd87bc5..78ddbf41199 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/mounted_fs.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/mounted_fs.py @@ -5,7 +5,7 @@ from fastapi import FastAPI from models_library.projects_nodes_io import NodeID -from models_library.services import RunID +from models_library.services import ServiceRunID from servicelib.docker_constants import PREFIX_DYNAMIC_SIDECAR_VOLUMES from ..core.docker_utils import get_volume_by_label @@ -36,7 +36,7 @@ class MountedVolumes: def __init__( self, - run_id: RunID, + service_run_id: ServiceRunID, node_id: NodeID, inputs_path: Path, outputs_path: Path, @@ -46,7 +46,7 @@ def __init__( compose_namespace: str, dy_volumes: Path, ) -> None: - self.run_id: RunID = run_id + self.service_run_id: ServiceRunID = service_run_id self.node_id: NodeID = node_id self.inputs_path: Path = inputs_path self.outputs_path: Path = outputs_path @@ -62,14 +62,14 @@ def __init__( def volume_name_inputs(self) -> str: """Same name as the namespace, to easily track components""" return ( - f"{PREFIX_DYNAMIC_SIDECAR_VOLUMES}_{self.run_id}_{self.node_id}" + f"{PREFIX_DYNAMIC_SIDECAR_VOLUMES}_{self.service_run_id}_{self.node_id}" f"_{_name_from_full_path(self.inputs_path)[::-1]}" ) @cached_property def volume_name_outputs(self) -> str: return ( - f"{PREFIX_DYNAMIC_SIDECAR_VOLUMES}_{self.run_id}_{self.node_id}" + f"{PREFIX_DYNAMIC_SIDECAR_VOLUMES}_{self.service_run_id}_{self.node_id}" f"_{_name_from_full_path(self.outputs_path)[::-1]}" ) @@ -78,14 +78,14 @@ def volume_user_preferences(self) -> str | None: if self.user_preferences_path is None: return None return ( - f"{PREFIX_DYNAMIC_SIDECAR_VOLUMES}_{self.run_id}_{self.node_id}" + f"{PREFIX_DYNAMIC_SIDECAR_VOLUMES}_{self.service_run_id}_{self.node_id}" f"_{_name_from_full_path(self.user_preferences_path)[::-1]}" ) def volume_name_state_paths(self) -> Generator[str, None, None]: for state_path in self.state_paths: yield ( - f"{PREFIX_DYNAMIC_SIDECAR_VOLUMES}_{self.run_id}_{self.node_id}" + f"{PREFIX_DYNAMIC_SIDECAR_VOLUMES}_{self.service_run_id}_{self.node_id}" f"_{_name_from_full_path(state_path)[::-1]}" ) @@ -116,39 +116,45 @@ def _ensure_directories(self) -> None: _ensure_path(path) @staticmethod - async def _get_bind_path_from_label(label: str, run_id: RunID) -> Path: - volume_details = await get_volume_by_label(label=label, run_id=run_id) + async def _get_bind_path_from_label( + label: str, service_run_id: ServiceRunID + ) -> Path: + volume_details = await get_volume_by_label( + label=label, service_run_id=service_run_id + ) return Path(volume_details["Mountpoint"]) - async def get_inputs_docker_volume(self, run_id: RunID) -> str: + async def get_inputs_docker_volume(self, service_run_id: ServiceRunID) -> str: bind_path: Path = await self._get_bind_path_from_label( - self.volume_name_inputs, run_id + self.volume_name_inputs, service_run_id ) return f"{bind_path}:{self.inputs_path}" - async def get_outputs_docker_volume(self, run_id: RunID) -> str: + async def get_outputs_docker_volume(self, service_run_id: ServiceRunID) -> str: bind_path: Path = await self._get_bind_path_from_label( - self.volume_name_outputs, run_id + self.volume_name_outputs, service_run_id ) return f"{bind_path}:{self.outputs_path}" - async def get_user_preferences_path_volume(self, run_id: RunID) -> str | None: + async def get_user_preferences_path_volume( + self, service_run_id: ServiceRunID + ) -> str | None: if self.volume_user_preferences is None: return None bind_path: Path = await self._get_bind_path_from_label( - self.volume_user_preferences, run_id + self.volume_user_preferences, service_run_id ) return f"{bind_path}:{self.user_preferences_path}" async def iter_state_paths_to_docker_volumes( - self, run_id: RunID + self, service_run_id: ServiceRunID ) -> AsyncGenerator[str, None]: for volume_state_path, state_path in zip( self.volume_name_state_paths(), self.state_paths, strict=True ): bind_path: Path = await self._get_bind_path_from_label( - volume_state_path, run_id + volume_state_path, service_run_id ) yield f"{bind_path}:{state_path}" @@ -157,7 +163,7 @@ def setup_mounted_fs(app: FastAPI) -> MountedVolumes: settings: ApplicationSettings = app.state.settings app.state.mounted_volumes = MountedVolumes( - run_id=settings.DY_SIDECAR_RUN_ID, + service_run_id=settings.DY_SIDECAR_RUN_ID, node_id=settings.DY_SIDECAR_NODE_ID, inputs_path=settings.DY_SIDECAR_PATH_INPUTS, outputs_path=settings.DY_SIDECAR_PATH_OUTPUTS, diff --git a/services/dynamic-sidecar/tests/conftest.py b/services/dynamic-sidecar/tests/conftest.py index 62b93daa25c..5ad10622acd 100644 --- a/services/dynamic-sidecar/tests/conftest.py +++ b/services/dynamic-sidecar/tests/conftest.py @@ -18,7 +18,7 @@ from faker import Faker from models_library.projects import ProjectID from models_library.projects_nodes import NodeID -from models_library.services import RunID +from models_library.services import ServiceRunID from models_library.services_creation import CreateServiceMetricsAdditionalParams from models_library.users import UserID from pydantic import TypeAdapter @@ -124,8 +124,8 @@ def node_id(faker: Faker) -> NodeID: @pytest.fixture -def run_id() -> RunID: - return RunID.create() +def service_run_id() -> ServiceRunID: + return ServiceRunID.get_resource_tracking_run_id_for_dynamic() @pytest.fixture @@ -173,7 +173,7 @@ def base_mock_envs( state_paths_dirs: list[Path], state_exclude_dirs: list[Path], node_id: NodeID, - run_id: RunID, + service_run_id: ServiceRunID, ensure_shared_store_dir: None, ) -> EnvVarsDict: return { @@ -184,7 +184,7 @@ def base_mock_envs( "DYNAMIC_SIDECAR_SHARED_STORE_DIR": f"{shared_store_dir}", # envs on container "DYNAMIC_SIDECAR_COMPOSE_NAMESPACE": compose_namespace, - "DY_SIDECAR_RUN_ID": f"{run_id}", + "DY_SIDECAR_RUN_ID": service_run_id, "DY_SIDECAR_NODE_ID": f"{node_id}", "DY_SIDECAR_PATH_INPUTS": f"{inputs_dir}", "DY_SIDECAR_PATH_OUTPUTS": f"{outputs_dir}", @@ -216,7 +216,7 @@ def mock_environment( state_paths_dirs: list[Path], state_exclude_dirs: list[Path], node_id: NodeID, - run_id: RunID, + service_run_id: ServiceRunID, inputs_dir: Path, compose_namespace: str, outputs_dir: Path, @@ -242,7 +242,7 @@ def mock_environment( "DY_SIDECAR_PATH_INPUTS": f"{inputs_dir}", "DY_SIDECAR_PATH_OUTPUTS": f"{outputs_dir}", "DY_SIDECAR_PROJECT_ID": f"{project_id}", - "DY_SIDECAR_RUN_ID": run_id, + "DY_SIDECAR_RUN_ID": service_run_id, "DY_SIDECAR_STATE_EXCLUDE": json_dumps(state_exclude_dirs), "DY_SIDECAR_STATE_PATHS": json_dumps(state_paths_dirs), "DY_SIDECAR_USER_ID": f"{user_id}", diff --git a/services/dynamic-sidecar/tests/unit/test_core_docker_utils.py b/services/dynamic-sidecar/tests/unit/test_core_docker_utils.py index a7d2254425c..bf647faa5e4 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_docker_utils.py +++ b/services/dynamic-sidecar/tests/unit/test_core_docker_utils.py @@ -10,7 +10,7 @@ from aiodocker.containers import DockerContainer from faker import Faker from models_library.generated_models.docker_rest_api import ContainerState -from models_library.services import RunID +from models_library.services import ServiceRunID from pydantic import PositiveInt from simcore_service_dynamic_sidecar.core.docker_utils import ( _get_containers_inspect_from_names, @@ -28,17 +28,19 @@ def volume_name() -> str: @pytest.fixture -def run_id() -> RunID: - return RunID.create() +def service_run_id() -> ServiceRunID: + return ServiceRunID.get_resource_tracking_run_id_for_dynamic() @pytest.fixture -async def volume_with_label(volume_name: str, run_id: RunID) -> AsyncIterable[None]: +async def volume_with_label( + volume_name: str, service_run_id: ServiceRunID +) -> AsyncIterable[None]: async with aiodocker.Docker() as docker_client: volume = await docker_client.volumes.create( { "Name": "test_volume_name_1", - "Labels": {"source": volume_name, "run_id": run_id}, + "Labels": {"source": volume_name, "run_id": service_run_id}, } ) @@ -77,17 +79,17 @@ async def started_services(container_names: list[str]) -> AsyncIterator[None]: async def test_volume_with_label( - volume_with_label: None, volume_name: str, run_id: RunID + volume_with_label: None, volume_name: str, service_run_id: ServiceRunID ) -> None: - assert await get_volume_by_label(volume_name, run_id) + assert await get_volume_by_label(volume_name, service_run_id) -async def test_volume_label_missing(run_id: RunID) -> None: +async def test_volume_label_missing(service_run_id: ServiceRunID) -> None: with pytest.raises(VolumeNotFoundError) as exc_info: - await get_volume_by_label("not_exist", run_id) + await get_volume_by_label("not_exist", service_run_id) error_msg = f"{exc_info.value}" - assert run_id in error_msg + assert service_run_id in error_msg assert "not_exist" in error_msg diff --git a/services/dynamic-sidecar/tests/unit/test_core_errors.py b/services/dynamic-sidecar/tests/unit/test_core_errors.py index 7b112878c9c..6b27efa3fcd 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_errors.py +++ b/services/dynamic-sidecar/tests/unit/test_core_errors.py @@ -29,7 +29,7 @@ def test_legacy_interface_volume_not_found_error(): raise VolumeNotFoundError( # noqa: TRY301 volume_count=len(volumes), source_label="some", - run_id="run_id", + service_run_id="service_run_id", volume_names=volume_names, status_code=status.HTTP_404_NOT_FOUND, ) @@ -37,6 +37,6 @@ def test_legacy_interface_volume_not_found_error(): print(e) assert ( # noqa: PT017 e.message - == "Expected 1 got 2 volumes labels with source_label=some, run_id=run_id: Found UNKNOWN a_volume" + == "Expected 1 got 2 volumes labels with source_label=some, service_run_id=service_run_id: Found UNKNOWN a_volume" ) assert e.status_code == status.HTTP_404_NOT_FOUND # noqa: PT017 diff --git a/services/dynamic-sidecar/tests/unit/test_core_validation.py b/services/dynamic-sidecar/tests/unit/test_core_validation.py index 886f729dd30..f9af97453bd 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_validation.py +++ b/services/dynamic-sidecar/tests/unit/test_core_validation.py @@ -7,7 +7,7 @@ import pytest from fastapi import FastAPI from models_library.projects_nodes_io import NodeID -from models_library.services_types import RunID +from models_library.services_types import ServiceRunID from pytest_mock import MockerFixture from servicelib.docker_constants import DEFAULT_USER_SERVICES_NETWORK_NAME from simcore_service_dynamic_sidecar.core.validation import ( @@ -156,7 +156,7 @@ def no_internet_spec(project_tests_dir: Path) -> str: @pytest.fixture def fake_mounted_volumes() -> MountedVolumes: return MountedVolumes( - run_id=RunID.create(), + service_run_id=ServiceRunID.get_resource_tracking_run_id_for_dynamic(), node_id=NodeID("a019b83f-7cce-46bf-90cf-d02f7f0f089a"), inputs_path=Path("/"), outputs_path=Path("/"), diff --git a/services/dynamic-sidecar/tests/unit/test_modules_mounted_fs.py b/services/dynamic-sidecar/tests/unit/test_modules_mounted_fs.py index d177b1aecf5..ac3114fc51b 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_mounted_fs.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_mounted_fs.py @@ -9,7 +9,7 @@ from aiodocker.volumes import DockerVolume from fastapi import FastAPI from models_library.projects_nodes_io import NodeID -from models_library.services import RunID +from models_library.services import ServiceRunID from simcore_service_dynamic_sidecar.core.application import AppState from simcore_service_dynamic_sidecar.models.shared_store import SharedStore from simcore_service_dynamic_sidecar.modules.mounted_fs import ( @@ -56,7 +56,7 @@ async def test_expected_paths_and_volumes( inputs_dir: Path, outputs_dir: Path, state_paths_dirs: list[Path], - run_id: RunID, + service_run_id: ServiceRunID, node_id: NodeID, ): assert ( @@ -65,7 +65,7 @@ async def test_expected_paths_and_volumes( { x async for x in mounted_volumes.iter_state_paths_to_docker_volumes( - run_id + service_run_id ) } ) @@ -89,15 +89,16 @@ async def test_expected_paths_and_volumes( # check volume mount point assert ( mounted_volumes.volume_name_outputs - == f"dyv_{run_id}_{node_id}_{_replace_slashes(outputs_dir)[::-1]}" + == f"dyv_{service_run_id}_{node_id}_{_replace_slashes(outputs_dir)[::-1]}" ) assert ( mounted_volumes.volume_name_inputs - == f"dyv_{run_id}_{node_id}_{_replace_slashes(inputs_dir)[::-1]}" + == f"dyv_{service_run_id}_{node_id}_{_replace_slashes(inputs_dir)[::-1]}" ) assert set(mounted_volumes.volume_name_state_paths()) == { - f"dyv_{run_id}_{node_id}_{_replace_slashes(x)[::-1]}" for x in state_paths_dirs + f"dyv_{service_run_id}_{node_id}_{_replace_slashes(x)[::-1]}" + for x in state_paths_dirs } def _get_container_mount(mount_path: str) -> str: @@ -105,15 +106,21 @@ def _get_container_mount(mount_path: str) -> str: # check docker_volume assert ( - _get_container_mount(await mounted_volumes.get_inputs_docker_volume(run_id)) + _get_container_mount( + await mounted_volumes.get_inputs_docker_volume(service_run_id) + ) == f"{mounted_volumes.inputs_path}" ) assert ( - _get_container_mount(await mounted_volumes.get_outputs_docker_volume(run_id)) + _get_container_mount( + await mounted_volumes.get_outputs_docker_volume(service_run_id) + ) == f"{mounted_volumes.outputs_path}" ) assert { _get_container_mount(x) - async for x in mounted_volumes.iter_state_paths_to_docker_volumes(run_id) + async for x in mounted_volumes.iter_state_paths_to_docker_volumes( + service_run_id + ) } == {f"{state_path}" for state_path in state_paths_dirs} diff --git a/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py b/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py index a38658f222b..f4e5c2dd18c 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py @@ -14,7 +14,7 @@ from async_asgi_testclient import TestClient from faker import Faker from fastapi import FastAPI -from models_library.services import RunID +from models_library.services import ServiceRunID from pydantic import PositiveFloat from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict @@ -371,7 +371,7 @@ async def test_regression_io_log_redirect_cb( monkeypatch.setenv("RABBIT_SECURE", "false") mounted_volumes = MountedVolumes( - run_id=RunID.create(), + service_run_id=ServiceRunID.get_resource_tracking_run_id_for_dynamic(), node_id=faker.uuid4(cast_to=None), inputs_path=Path("/"), outputs_path=Path("/"), diff --git a/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py b/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py index ffa4dfbef45..eeea009cc32 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py @@ -16,7 +16,7 @@ import pytest from aiofiles import os from faker import Faker -from models_library.services import RunID +from models_library.services import ServiceRunID from pydantic import ( ByteSize, NonNegativeFloat, @@ -63,7 +63,7 @@ @pytest.fixture def mounted_volumes(faker: Faker, tmp_path: Path) -> Iterator[MountedVolumes]: mounted_volumes = MountedVolumes( - run_id=RunID.create(), + service_run_id=ServiceRunID.get_resource_tracking_run_id_for_dynamic(), node_id=faker.uuid4(cast_to=None), inputs_path=tmp_path / "inputs", outputs_path=tmp_path / "outputs", diff --git a/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__disk_usage.py b/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__disk_usage.py index 06270e171ca..5cac0f59934 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__disk_usage.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__disk_usage.py @@ -16,7 +16,7 @@ MountPathCategory, ) from models_library.projects_nodes_io import NodeID -from models_library.services_types import RunID +from models_library.services_types import ServiceRunID from models_library.users import UserID from psutil._common import sdiskusage from pydantic import ByteSize, TypeAdapter @@ -42,7 +42,7 @@ def _( inputs: Path, outputs: Path, states: list[Path] ) -> dict[MountPathCategory, set[Path]]: mounted_volumes = MountedVolumes( - run_id=RunID.create(), + service_run_id=ServiceRunID.get_resource_tracking_run_id_for_dynamic(), node_id=node_id, inputs_path=dy_volumes / inputs, outputs_path=dy_volumes / outputs, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_licensed_items_checkouts.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_licensed_items_checkouts.py new file mode 100644 index 00000000000..c17ff34655b --- /dev/null +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_licensed_items_checkouts.py @@ -0,0 +1,94 @@ +from fastapi import FastAPI +from models_library.api_schemas_resource_usage_tracker.licensed_items_checkouts import ( + LicensedItemCheckoutGet, + LicensedItemsCheckoutsPage, +) +from models_library.licensed_items import LicensedItemID +from models_library.products import ProductName +from models_library.resource_tracker_licensed_items_checkouts import ( + LicensedItemCheckoutID, +) +from models_library.rest_ordering import OrderBy +from models_library.services_types import ServiceRunID +from models_library.users import UserID +from models_library.wallets import WalletID +from servicelib.rabbitmq import RPCRouter +from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import ( + LICENSES_ERRORS, +) + +from ...services import licensed_items_checkouts + +router = RPCRouter() + + +@router.expose(reraise_if_error_type=LICENSES_ERRORS) +async def get_licensed_item_checkout( + app: FastAPI, + *, + product_name: ProductName, + licensed_item_checkout_id: LicensedItemCheckoutID, +) -> LicensedItemCheckoutGet: + return await licensed_items_checkouts.get_licensed_item_checkout( + db_engine=app.state.engine, + product_name=product_name, + licensed_item_checkout_id=licensed_item_checkout_id, + ) + + +@router.expose(reraise_if_error_type=LICENSES_ERRORS) +async def get_licensed_items_checkouts_page( + app: FastAPI, + *, + product_name: ProductName, + filter_wallet_id: WalletID, + offset: int = 0, + limit: int = 20, + order_by: OrderBy, +) -> LicensedItemsCheckoutsPage: + return await licensed_items_checkouts.list_licensed_items_checkouts( + db_engine=app.state.engine, + product_name=product_name, + filter_wallet_id=filter_wallet_id, + offset=offset, + limit=limit, + order_by=order_by, + ) + + +@router.expose(reraise_if_error_type=LICENSES_ERRORS) +async def checkout_licensed_item( + app: FastAPI, + *, + licensed_item_id: LicensedItemID, + wallet_id: WalletID, + product_name: ProductName, + num_of_seats: int, + service_run_id: ServiceRunID, + user_id: UserID, + user_email: str, +) -> LicensedItemCheckoutGet: + return await licensed_items_checkouts.checkout_licensed_item( + db_engine=app.state.engine, + licensed_item_id=licensed_item_id, + wallet_id=wallet_id, + product_name=product_name, + num_of_seats=num_of_seats, + service_run_id=service_run_id, + user_id=user_id, + user_email=user_email, + ) + + +@router.expose(reraise_if_error_type=LICENSES_ERRORS) +async def release_licensed_item( + app: FastAPI, + *, + licensed_item_checkout_id: LicensedItemCheckoutID, + product_name: ProductName, +) -> LicensedItemCheckoutGet: + return await licensed_items_checkouts.release_licensed_item( + db_engine=app.state.engine, + licensed_item_checkout_id=licensed_item_checkout_id, + product_name=product_name, + ) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/routes.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/routes.py index f1fd1276161..e5da8f44411 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/routes.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/routes.py @@ -8,7 +8,7 @@ from servicelib.rabbitmq import RPCRouter from ...services.modules.rabbitmq import get_rabbitmq_rpc_server -from . import _licensed_items_purchases, _resource_tracker +from . import _licensed_items_checkouts, _licensed_items_purchases, _resource_tracker _logger = logging.getLogger(__name__) @@ -16,6 +16,7 @@ ROUTERS: list[RPCRouter] = [ _resource_tracker.router, _licensed_items_purchases.router, + _licensed_items_checkouts.router, ] diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/credit_transactions.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/credit_transactions.py index b9fd942fee0..ac461b37c8c 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/credit_transactions.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/credit_transactions.py @@ -9,11 +9,11 @@ PricingPlanId, PricingUnitCostId, PricingUnitId, - ServiceRunId, ) from models_library.resource_tracker_licensed_items_purchases import ( LicensedItemPurchaseID, ) +from models_library.services_types import ServiceRunID from models_library.users import UserID from models_library.wallets import WalletID from pydantic import BaseModel, ConfigDict @@ -31,7 +31,7 @@ class CreditTransactionCreate(BaseModel): osparc_credits: Decimal transaction_status: CreditTransactionStatus transaction_classification: CreditClassification - service_run_id: ServiceRunId | None + service_run_id: ServiceRunID | None payment_transaction_id: str | None created_at: datetime last_heartbeat_at: datetime @@ -39,13 +39,13 @@ class CreditTransactionCreate(BaseModel): class CreditTransactionCreditsUpdate(BaseModel): - service_run_id: ServiceRunId + service_run_id: ServiceRunID osparc_credits: Decimal last_heartbeat_at: datetime class CreditTransactionCreditsAndStatusUpdate(BaseModel): - service_run_id: ServiceRunId + service_run_id: ServiceRunID osparc_credits: Decimal transaction_status: CreditTransactionStatus @@ -63,7 +63,7 @@ class CreditTransactionDB(BaseModel): osparc_credits: Decimal transaction_status: CreditTransactionStatus transaction_classification: CreditClassification - service_run_id: ServiceRunId | None + service_run_id: ServiceRunID | None payment_transaction_id: str | None created: datetime last_heartbeat_at: datetime diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/licensed_items_checkouts.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/licensed_items_checkouts.py new file mode 100644 index 00000000000..774e4505230 --- /dev/null +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/licensed_items_checkouts.py @@ -0,0 +1,46 @@ +from datetime import datetime + +from models_library.licensed_items import LicensedItemID +from models_library.products import ProductName +from models_library.resource_tracker_licensed_items_checkouts import ( + LicensedItemCheckoutID, +) +from models_library.services_types import ServiceRunID +from models_library.users import UserID +from models_library.wallets import WalletID +from pydantic import BaseModel, ConfigDict + + +class LicensedItemCheckoutDB(BaseModel): + licensed_item_checkout_id: LicensedItemCheckoutID + licensed_item_id: LicensedItemID + wallet_id: WalletID + user_id: UserID + user_email: str + product_name: ProductName + service_run_id: ServiceRunID + started_at: datetime + stopped_at: datetime | None + num_of_seats: int + modified: datetime + + model_config = ConfigDict(from_attributes=True) + + +class CreateLicensedItemCheckoutDB(BaseModel): + licensed_item_id: LicensedItemID + wallet_id: WalletID + user_id: UserID + user_email: str + product_name: ProductName + service_run_id: ServiceRunID + started_at: datetime + num_of_seats: int + + model_config = ConfigDict(from_attributes=True) + + +class UpdateLicensedItemCheckoutDB(BaseModel): + stopped_at: datetime + + model_config = ConfigDict(from_attributes=True) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/service_runs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/service_runs.py index f78662defef..638a0bcb918 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/service_runs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/service_runs.py @@ -10,10 +10,10 @@ PricingUnitCostId, PricingUnitId, ResourceTrackerServiceType, - ServiceRunId, ServiceRunStatus, ) from models_library.services import ServiceKey, ServiceVersion +from models_library.services_types import ServiceRunID from models_library.users import UserID from models_library.wallets import WalletID from pydantic import BaseModel, ConfigDict, NonNegativeInt @@ -21,7 +21,7 @@ class ServiceRunCreate(BaseModel): product_name: ProductName - service_run_id: ServiceRunId + service_run_id: ServiceRunID wallet_id: WalletID | None wallet_name: str | None pricing_plan_id: PricingPlanId | None @@ -51,12 +51,12 @@ class ServiceRunCreate(BaseModel): class ServiceRunLastHeartbeatUpdate(BaseModel): - service_run_id: ServiceRunId + service_run_id: ServiceRunID last_heartbeat_at: datetime class ServiceRunStoppedAtUpdate(BaseModel): - service_run_id: ServiceRunId + service_run_id: ServiceRunID stopped_at: datetime service_run_status: ServiceRunStatus service_run_status_msg: str | None @@ -64,7 +64,7 @@ class ServiceRunStoppedAtUpdate(BaseModel): class ServiceRunDB(BaseModel): product_name: ProductName - service_run_id: ServiceRunId + service_run_id: ServiceRunID wallet_id: WalletID | None wallet_name: str | None pricing_plan_id: PricingPlanId | None @@ -113,7 +113,7 @@ class OsparcCreditsAggregatedByServiceKeyDB(BaseModel): class ServiceRunForCheckDB(BaseModel): - service_run_id: ServiceRunId + service_run_id: ServiceRunID last_heartbeat_at: datetime missed_heartbeat_counter: NonNegativeInt modified: datetime diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/background_task_periodic_heartbeat_check.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/background_task_periodic_heartbeat_check.py index fba9332502e..98a18522e9e 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/background_task_periodic_heartbeat_check.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/background_task_periodic_heartbeat_check.py @@ -1,21 +1,25 @@ import asyncio import logging -from datetime import datetime, timedelta, timezone +from datetime import UTC, datetime, timedelta from fastapi import FastAPI from models_library.resource_tracker import ( CreditTransactionStatus, ResourceTrackerServiceType, - ServiceRunId, ServiceRunStatus, ) +from models_library.services_types import ServiceRunID from pydantic import NonNegativeInt, PositiveInt from sqlalchemy.ext.asyncio import AsyncEngine from ..core.settings import ApplicationSettings from ..models.credit_transactions import CreditTransactionCreditsAndStatusUpdate from ..models.service_runs import ServiceRunStoppedAtUpdate -from .modules.db import credit_transactions_db, service_runs_db +from .modules.db import ( + credit_transactions_db, + licensed_items_checkouts_db, + service_runs_db, +) from .utils import compute_service_run_credit_costs, make_negative _logger = logging.getLogger(__name__) @@ -28,7 +32,7 @@ async def _check_service_heartbeat( base_start_timestamp: datetime, resource_usage_tracker_missed_heartbeat_interval: timedelta, resource_usage_tracker_missed_heartbeat_counter_fail: NonNegativeInt, - service_run_id: ServiceRunId, + service_run_id: ServiceRunID, last_heartbeat_at: datetime, missed_heartbeat_counter: NonNegativeInt, modified_at: datetime, @@ -74,7 +78,7 @@ async def _check_service_heartbeat( async def _close_unhealthy_service( db_engine: AsyncEngine, - service_run_id: ServiceRunId, + service_run_id: ServiceRunID, base_start_timestamp: datetime, ): # 1. Close the service_run @@ -116,6 +120,11 @@ async def _close_unhealthy_service( db_engine, data=update_credit_transaction ) + # 3. Release license seats in case some were checked out but not properly released. + await licensed_items_checkouts_db.force_release_license_seats_by_run_id( + db_engine, service_run_id=service_run_id + ) + async def periodic_check_of_running_services_task(app: FastAPI) -> None: _logger.info("Periodic check started") @@ -124,7 +133,7 @@ async def periodic_check_of_running_services_task(app: FastAPI) -> None: app_settings: ApplicationSettings = app.state.settings _db_engine = app.state.engine - base_start_timestamp = datetime.now(tz=timezone.utc) + base_start_timestamp = datetime.now(tz=UTC) # Get all current running services (across all products) total_count: PositiveInt = await service_runs_db.total_service_runs_with_running_status_across_all_products( diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/licensed_items_checkouts.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/licensed_items_checkouts.py new file mode 100644 index 00000000000..753ea3f638f --- /dev/null +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/licensed_items_checkouts.py @@ -0,0 +1,208 @@ +from datetime import UTC, datetime +from typing import Annotated + +from fastapi import Depends +from models_library.api_schemas_resource_usage_tracker.licensed_items_checkouts import ( + LicensedItemCheckoutGet, + LicensedItemsCheckoutsPage, +) +from models_library.licensed_items import LicensedItemID +from models_library.products import ProductName +from models_library.resource_tracker import ServiceRunStatus +from models_library.resource_tracker_licensed_items_checkouts import ( + LicensedItemCheckoutID, +) +from models_library.rest_ordering import OrderBy +from models_library.services_types import ServiceRunID +from models_library.users import UserID +from models_library.wallets import WalletID +from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import ( + CanNotCheckoutNotEnoughAvailableSeatsError, + CanNotCheckoutServiceIsNotRunningError, + NotEnoughAvailableSeatsError, +) +from sqlalchemy.ext.asyncio import AsyncEngine + +from ..api.rest.dependencies import get_resource_tracker_db_engine +from ..models.licensed_items_checkouts import ( + CreateLicensedItemCheckoutDB, + LicensedItemCheckoutDB, +) +from .modules.db import ( + licensed_items_checkouts_db, + licensed_items_purchases_db, + service_runs_db, +) + + +async def list_licensed_items_checkouts( + db_engine: Annotated[AsyncEngine, Depends(get_resource_tracker_db_engine)], + *, + product_name: ProductName, + filter_wallet_id: WalletID, + offset: int, + limit: int, + order_by: OrderBy, +) -> LicensedItemsCheckoutsPage: + total, licensed_items_checkouts_list_db = await licensed_items_checkouts_db.list_( + db_engine, + product_name=product_name, + filter_wallet_id=filter_wallet_id, + offset=offset, + limit=limit, + order_by=order_by, + ) + return LicensedItemsCheckoutsPage( + total=total, + items=[ + LicensedItemCheckoutGet( + licensed_item_checkout_id=licensed_item_checkout_db.licensed_item_checkout_id, + licensed_item_id=licensed_item_checkout_db.licensed_item_id, + wallet_id=licensed_item_checkout_db.wallet_id, + user_id=licensed_item_checkout_db.user_id, + product_name=licensed_item_checkout_db.product_name, + service_run_id=licensed_item_checkout_db.service_run_id, + started_at=licensed_item_checkout_db.started_at, + stopped_at=licensed_item_checkout_db.stopped_at, + num_of_seats=licensed_item_checkout_db.num_of_seats, + ) + for licensed_item_checkout_db in licensed_items_checkouts_list_db + ], + ) + + +async def get_licensed_item_checkout( + db_engine: Annotated[AsyncEngine, Depends(get_resource_tracker_db_engine)], + *, + product_name: ProductName, + licensed_item_checkout_id: LicensedItemCheckoutID, +) -> LicensedItemCheckoutGet: + licensed_item_checkout_db: LicensedItemCheckoutDB = ( + await licensed_items_checkouts_db.get( + db_engine, + product_name=product_name, + licensed_item_checkout_id=licensed_item_checkout_id, + ) + ) + + return LicensedItemCheckoutGet( + licensed_item_checkout_id=licensed_item_checkout_db.licensed_item_checkout_id, + licensed_item_id=licensed_item_checkout_db.licensed_item_id, + wallet_id=licensed_item_checkout_db.wallet_id, + user_id=licensed_item_checkout_db.user_id, + product_name=licensed_item_checkout_db.product_name, + service_run_id=licensed_item_checkout_db.service_run_id, + started_at=licensed_item_checkout_db.started_at, + stopped_at=licensed_item_checkout_db.stopped_at, + num_of_seats=licensed_item_checkout_db.num_of_seats, + ) + + +async def checkout_licensed_item( + db_engine: Annotated[AsyncEngine, Depends(get_resource_tracker_db_engine)], + *, + licensed_item_id: LicensedItemID, + wallet_id: WalletID, + product_name: ProductName, + num_of_seats: int, + service_run_id: ServiceRunID, + user_id: UserID, + user_email: str, +) -> LicensedItemCheckoutGet: + + _active_purchased_seats: int = await licensed_items_purchases_db.get_active_purchased_seats_for_item_and_wallet( + db_engine, + licensed_item_id=licensed_item_id, + wallet_id=wallet_id, + product_name=product_name, + ) + + _currently_used_seats = ( + await licensed_items_checkouts_db.get_currently_used_seats_for_item_and_wallet( + db_engine, + licensed_item_id=licensed_item_id, + wallet_id=wallet_id, + product_name=product_name, + ) + ) + + available_seats = _active_purchased_seats - _currently_used_seats + if available_seats <= 0: + raise NotEnoughAvailableSeatsError( + license_item_id=licensed_item_id, available_num_of_seats=available_seats + ) + + if available_seats - num_of_seats < 0: + raise CanNotCheckoutNotEnoughAvailableSeatsError( + license_item_id=licensed_item_id, + available_num_of_seats=available_seats, + num_of_seats=num_of_seats, + ) + + # Check if the service run ID is currently running + service_run = await service_runs_db.get_service_run_by_id( + db_engine, service_run_id=service_run_id + ) + if ( + service_run is None + or service_run.service_run_status != ServiceRunStatus.RUNNING + ): + raise CanNotCheckoutServiceIsNotRunningError( + license_item_id=licensed_item_id, service_run=service_run + ) + + _create_item_checkout = CreateLicensedItemCheckoutDB( + licensed_item_id=licensed_item_id, + wallet_id=wallet_id, + user_id=user_id, + user_email=user_email, + product_name=product_name, + service_run_id=service_run_id, + started_at=datetime.now(tz=UTC), + num_of_seats=num_of_seats, + ) + licensed_item_checkout_db = await licensed_items_checkouts_db.create( + db_engine, data=_create_item_checkout + ) + + # Return checkout ID + return LicensedItemCheckoutGet( + licensed_item_checkout_id=licensed_item_checkout_db.licensed_item_checkout_id, + licensed_item_id=licensed_item_checkout_db.licensed_item_id, + wallet_id=licensed_item_checkout_db.wallet_id, + user_id=licensed_item_checkout_db.user_id, + product_name=licensed_item_checkout_db.product_name, + service_run_id=licensed_item_checkout_db.service_run_id, + started_at=licensed_item_checkout_db.started_at, + stopped_at=licensed_item_checkout_db.stopped_at, + num_of_seats=licensed_item_checkout_db.num_of_seats, + ) + + +async def release_licensed_item( + db_engine: Annotated[AsyncEngine, Depends(get_resource_tracker_db_engine)], + *, + licensed_item_checkout_id: LicensedItemCheckoutID, + product_name: ProductName, +) -> LicensedItemCheckoutGet: + + licensed_item_checkout_db: LicensedItemCheckoutDB = ( + await licensed_items_checkouts_db.update( + db_engine, + licensed_item_checkout_id=licensed_item_checkout_id, + product_name=product_name, + stopped_at=datetime.now(tz=UTC), + ) + ) + + return LicensedItemCheckoutGet( + licensed_item_checkout_id=licensed_item_checkout_db.licensed_item_checkout_id, + licensed_item_id=licensed_item_checkout_db.licensed_item_id, + wallet_id=licensed_item_checkout_db.wallet_id, + user_id=licensed_item_checkout_db.user_id, + product_name=licensed_item_checkout_db.product_name, + service_run_id=licensed_item_checkout_db.service_run_id, + started_at=licensed_item_checkout_db.started_at, + stopped_at=licensed_item_checkout_db.stopped_at, + num_of_seats=licensed_item_checkout_db.num_of_seats, + ) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/licensed_items_checkouts_db.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/licensed_items_checkouts_db.py new file mode 100644 index 00000000000..5035a637199 --- /dev/null +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/licensed_items_checkouts_db.py @@ -0,0 +1,259 @@ +import logging +from datetime import datetime +from typing import cast + +import sqlalchemy as sa +from models_library.licensed_items import LicensedItemID +from models_library.products import ProductName +from models_library.resource_tracker_licensed_items_checkouts import ( + LicensedItemCheckoutID, +) +from models_library.rest_ordering import OrderBy, OrderDirection +from models_library.services_types import ServiceRunID +from models_library.wallets import WalletID +from pydantic import NonNegativeInt +from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import ( + LicensedItemCheckoutNotFoundError, +) +from simcore_postgres_database.models.resource_tracker_licensed_items_checkouts import ( + resource_tracker_licensed_items_checkouts, +) +from simcore_postgres_database.utils_repos import ( + pass_or_acquire_connection, + transaction_context, +) +from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine + +from ....models.licensed_items_checkouts import ( + CreateLicensedItemCheckoutDB, + LicensedItemCheckoutDB, +) + +_logger = logging.getLogger(__name__) + + +_SELECTION_ARGS = ( + resource_tracker_licensed_items_checkouts.c.licensed_item_checkout_id, + resource_tracker_licensed_items_checkouts.c.licensed_item_id, + resource_tracker_licensed_items_checkouts.c.wallet_id, + resource_tracker_licensed_items_checkouts.c.user_id, + resource_tracker_licensed_items_checkouts.c.user_email, + resource_tracker_licensed_items_checkouts.c.product_name, + resource_tracker_licensed_items_checkouts.c.service_run_id, + resource_tracker_licensed_items_checkouts.c.started_at, + resource_tracker_licensed_items_checkouts.c.stopped_at, + resource_tracker_licensed_items_checkouts.c.num_of_seats, + resource_tracker_licensed_items_checkouts.c.modified, +) + +assert set(LicensedItemCheckoutDB.model_fields) == { + c.name for c in _SELECTION_ARGS +} # nosec + + +async def create( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + data: CreateLicensedItemCheckoutDB, +) -> LicensedItemCheckoutDB: + async with transaction_context(engine, connection) as conn: + result = await conn.execute( + resource_tracker_licensed_items_checkouts.insert() + .values( + licensed_item_id=data.licensed_item_id, + wallet_id=data.wallet_id, + user_id=data.user_id, + user_email=data.user_email, + product_name=data.product_name, + service_run_id=data.service_run_id, + started_at=data.started_at, + stopped_at=None, + num_of_seats=data.num_of_seats, + modified=sa.func.now(), + ) + .returning(*_SELECTION_ARGS) + ) + row = result.first() + return LicensedItemCheckoutDB.model_validate(row) + + +async def list_( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + product_name: ProductName, + filter_wallet_id: WalletID, + offset: NonNegativeInt, + limit: NonNegativeInt, + order_by: OrderBy, +) -> tuple[int, list[LicensedItemCheckoutDB]]: + base_query = ( + sa.select(*_SELECTION_ARGS) + .select_from(resource_tracker_licensed_items_checkouts) + .where( + (resource_tracker_licensed_items_checkouts.c.product_name == product_name) + & ( + resource_tracker_licensed_items_checkouts.c.wallet_id + == filter_wallet_id + ) + ) + ) + + # Select total count from base_query + subquery = base_query.subquery() + count_query = sa.select(sa.func.count()).select_from(subquery) + + # Ordering and pagination + if order_by.direction == OrderDirection.ASC: + list_query = base_query.order_by( + sa.asc(getattr(resource_tracker_licensed_items_checkouts.c, order_by.field)) + ) + else: + list_query = base_query.order_by( + sa.desc( + getattr(resource_tracker_licensed_items_checkouts.c, order_by.field) + ) + ) + list_query = list_query.offset(offset).limit(limit) + + async with pass_or_acquire_connection(engine, connection) as conn: + total_count = await conn.scalar(count_query) + if total_count is None: + total_count = 0 + + result = await conn.stream(list_query) + items: list[LicensedItemCheckoutDB] = [ + LicensedItemCheckoutDB.model_validate(row) async for row in result + ] + + return cast(int, total_count), items + + +async def get( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + licensed_item_checkout_id: LicensedItemCheckoutID, + product_name: ProductName, +) -> LicensedItemCheckoutDB: + base_query = ( + sa.select(*_SELECTION_ARGS) + .select_from(resource_tracker_licensed_items_checkouts) + .where( + ( + resource_tracker_licensed_items_checkouts.c.licensed_item_checkout_id + == licensed_item_checkout_id + ) + & (resource_tracker_licensed_items_checkouts.c.product_name == product_name) + ) + ) + + async with pass_or_acquire_connection(engine, connection) as conn: + result = await conn.stream(base_query) + row = await result.first() + if row is None: + raise LicensedItemCheckoutNotFoundError( + licensed_item_checkout_id=licensed_item_checkout_id + ) + return LicensedItemCheckoutDB.model_validate(row) + + +async def update( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + licensed_item_checkout_id: LicensedItemCheckoutID, + product_name: ProductName, + stopped_at: datetime, +) -> LicensedItemCheckoutDB: + update_stmt = ( + resource_tracker_licensed_items_checkouts.update() + .values( + modified=sa.func.now(), + stopped_at=stopped_at, + ) + .where( + ( + resource_tracker_licensed_items_checkouts.c.licensed_item_checkout_id + == licensed_item_checkout_id + ) + & (resource_tracker_licensed_items_checkouts.c.product_name == product_name) + & (resource_tracker_licensed_items_checkouts.c.stopped_at.is_(None)) + ) + .returning(sa.literal_column("*")) + ) + + async with transaction_context(engine, connection) as conn: + result = await conn.execute(update_stmt) + row = result.first() + if row is None: + raise LicensedItemCheckoutNotFoundError( + licensed_item_checkout_id=licensed_item_checkout_id + ) + return LicensedItemCheckoutDB.model_validate(row) + + +async def get_currently_used_seats_for_item_and_wallet( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + licensed_item_id: LicensedItemID, + wallet_id: WalletID, + product_name: ProductName, +) -> int: + sum_stmt = sa.select( + sa.func.sum(resource_tracker_licensed_items_checkouts.c.num_of_seats) + ).where( + (resource_tracker_licensed_items_checkouts.c.wallet_id == wallet_id) + & ( + resource_tracker_licensed_items_checkouts.c.licensed_item_id + == licensed_item_id + ) + & (resource_tracker_licensed_items_checkouts.c.product_name == product_name) + & (resource_tracker_licensed_items_checkouts.c.stopped_at.is_(None)) + ) + + async with pass_or_acquire_connection(engine, connection) as conn: + total_sum = await conn.scalar(sum_stmt) + if total_sum is None: + return 0 + return cast(int, total_sum) + + +async def force_release_license_seats_by_run_id( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + service_run_id: ServiceRunID, +) -> None: + """ + Purpose: This function is utilized by a periodic heartbeat check task that monitors whether running services are + sending heartbeat signals. If heartbeat signals are not received within a specified timeframe and a service is + deemed unhealthy, this function ensures the proper release of any licensed seats that were not correctly released by + the unhealthy service. + Currently, this functionality is primarily used to handle the release of a single seat allocated to the VIP model. + """ + update_stmt = ( + resource_tracker_licensed_items_checkouts.update() + .values( + modified=sa.func.now(), + stopped_at=sa.func.now(), + ) + .where( + ( + resource_tracker_licensed_items_checkouts.c.service_run_id + == service_run_id + ) + & (resource_tracker_licensed_items_checkouts.c.stopped_at.is_(None)) + ) + .returning(sa.literal_column("*")) + ) + + async with transaction_context(engine, connection) as conn: + result = await conn.execute(update_stmt) + released_seats = result.fetchall() + if released_seats: + _logger.error( + "Force release of %s seats: %s", len(released_seats), released_seats + ) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/licensed_items_purchases_db.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/licensed_items_purchases_db.py index e9951042ddc..2fd8718784e 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/licensed_items_purchases_db.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/licensed_items_purchases_db.py @@ -1,6 +1,8 @@ +from datetime import UTC, datetime from typing import cast import sqlalchemy as sa +from models_library.licensed_items import LicensedItemID from models_library.products import ProductName from models_library.resource_tracker_licensed_items_purchases import ( LicensedItemPurchaseID, @@ -114,6 +116,8 @@ async def list_( async with pass_or_acquire_connection(engine, connection) as conn: total_count = await conn.scalar(count_query) + if total_count is None: + total_count = 0 result = await conn.stream(list_query) items: list[LicensedItemsPurchasesDB] = [ @@ -150,3 +154,36 @@ async def get( licensed_item_purchase_id=licensed_item_purchase_id ) return LicensedItemsPurchasesDB.model_validate(row) + + +async def get_active_purchased_seats_for_item_and_wallet( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + licensed_item_id: LicensedItemID, + wallet_id: WalletID, + product_name: ProductName, +) -> int: + """ + Exclude expired seats + """ + _current_time = datetime.now(tz=UTC) + + sum_stmt = sa.select( + sa.func.sum(resource_tracker_licensed_items_purchases.c.num_of_seats) + ).where( + (resource_tracker_licensed_items_purchases.c.wallet_id == wallet_id) + & ( + resource_tracker_licensed_items_purchases.c.licensed_item_id + == licensed_item_id + ) + & (resource_tracker_licensed_items_purchases.c.product_name == product_name) + & (resource_tracker_licensed_items_purchases.c.start_at <= _current_time) + & (resource_tracker_licensed_items_purchases.c.expire_at >= _current_time) + ) + + async with pass_or_acquire_connection(engine, connection) as conn: + total_sum = await conn.scalar(sum_stmt) + if total_sum is None: + return 0 + return cast(int, total_sum) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/service_runs_db.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/service_runs_db.py index a4ea563803d..c1bf23df530 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/service_runs_db.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/service_runs_db.py @@ -9,10 +9,10 @@ from models_library.resource_tracker import ( CreditClassification, CreditTransactionStatus, - ServiceRunId, ServiceRunStatus, ) from models_library.rest_ordering import OrderBy, OrderDirection +from models_library.services_types import ServiceRunID from models_library.users import UserID from models_library.wallets import WalletID from pydantic import PositiveInt @@ -46,7 +46,7 @@ async def create_service_run( connection: AsyncConnection | None = None, *, data: ServiceRunCreate, -) -> ServiceRunId: +) -> ServiceRunID: async with transaction_context(engine, connection) as conn: insert_stmt = ( resource_tracker_service_runs.insert() @@ -88,7 +88,7 @@ async def create_service_run( row = result.first() if row is None: raise ServiceRunNotCreatedDBError(data=data) - return cast(ServiceRunId, row[0]) + return cast(ServiceRunID, row[0]) async def update_service_run_last_heartbeat( @@ -160,7 +160,7 @@ async def get_service_run_by_id( engine: AsyncEngine, connection: AsyncConnection | None = None, *, - service_run_id: ServiceRunId, + service_run_id: ServiceRunID, ) -> ServiceRunDB | None: async with transaction_context(engine, connection) as conn: stmt = sa.select(resource_tracker_service_runs).where( @@ -376,7 +376,9 @@ async def get_osparc_credits_aggregated_by_service( subquery = base_query.subquery() count_query = sa.select(sa.func.count()).select_from(subquery) - count_result = await conn.execute(count_query) + count_result = await conn.scalar(count_query) + if count_result is None: + count_result = 0 # Default ordering and pagination list_query = ( @@ -387,7 +389,7 @@ async def get_osparc_credits_aggregated_by_service( list_result = await conn.execute(list_query) return ( - cast(int, count_result.scalar()), + cast(int, count_result), [ OsparcCreditsAggregatedByServiceKeyDB.model_validate(row) for row in list_result.fetchall() @@ -427,10 +429,7 @@ async def export_service_runs_table_to_s3( resource_tracker_service_runs.c.stopped_at, resource_tracker_credit_transactions.c.osparc_credits, resource_tracker_credit_transactions.c.transaction_status, - sa.func.coalesce( - _project_tags_subquery.c.project_tags, - sa.cast(sa.text("'{}'"), sa.ARRAY(sa.String)), - ).label("project_tags"), + _project_tags_subquery.c.project_tags.label("project_tags"), ) .select_from( resource_tracker_service_runs.join( @@ -590,7 +589,7 @@ async def update_service_missed_heartbeat_counter( engine: AsyncEngine, connection: AsyncConnection | None = None, *, - service_run_id: ServiceRunId, + service_run_id: ServiceRunID, last_heartbeat_at: datetime, missed_heartbeat_counter: int, ) -> ServiceRunDB | None: diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/process_message_running_service.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/process_message_running_service.py index e9234f65435..88553f51705 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/process_message_running_service.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/process_message_running_service.py @@ -33,7 +33,12 @@ ServiceRunLastHeartbeatUpdate, ServiceRunStoppedAtUpdate, ) -from .modules.db import credit_transactions_db, pricing_plans_db, service_runs_db +from .modules.db import ( + credit_transactions_db, + licensed_items_checkouts_db, + pricing_plans_db, + service_runs_db, +) from .modules.rabbitmq import RabbitMQClient, get_rabbitmq_client from .utils import ( compute_service_run_credit_costs, @@ -269,9 +274,15 @@ async def _process_stop_event( running_service = await service_runs_db.update_service_run_stopped_at( db_engine, data=update_service_run_stopped_at ) + await licensed_items_checkouts_db.force_release_license_seats_by_run_id( + db_engine, service_run_id=msg.service_run_id + ) if running_service is None: - _logger.error("Nothing to update. This should not happen investigate.") + _logger.error( + "Nothing to update. This should not happen investigate. service_run_id: %s", + msg.service_run_id, + ) return if running_service.wallet_id and running_service.pricing_unit_cost is not None: diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/utils.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/utils.py index 6047ac2e904..2556322000e 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/utils.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/utils.py @@ -14,7 +14,8 @@ WalletCreditsLimitReachedMessage, WalletCreditsMessage, ) -from models_library.resource_tracker import ServiceRunId, ServiceRunStatus +from models_library.resource_tracker import ServiceRunStatus +from models_library.services_types import ServiceRunID from models_library.users import UserID from models_library.wallets import WalletID from pydantic import PositiveInt @@ -58,7 +59,7 @@ async def sum_credit_transactions_and_publish_to_rabbitmq( async def _publish_to_rabbitmq_wallet_credits_limit_reached( rabbitmq_client: RabbitMQClient, - service_run_id: ServiceRunId, + service_run_id: ServiceRunID, user_id: UserID, project_id: ProjectID, node_id: NodeID, diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_licensed_items_checkouts.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_licensed_items_checkouts.py new file mode 100644 index 00000000000..b1036c49aef --- /dev/null +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_licensed_items_checkouts.py @@ -0,0 +1,139 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name +# pylint:disable=too-many-arguments + + +from datetime import UTC, datetime, timedelta +from decimal import Decimal +from typing import Generator + +import pytest +import sqlalchemy as sa +from models_library.api_schemas_resource_usage_tracker.licensed_items_checkouts import ( + LicensedItemCheckoutGet, + LicensedItemsCheckoutsPage, +) +from models_library.resource_tracker_licensed_items_purchases import ( + LicensedItemsPurchasesCreate, +) +from servicelib.rabbitmq import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker import ( + licensed_items_checkouts, + licensed_items_purchases, +) +from simcore_postgres_database.models.resource_tracker_licensed_items_checkouts import ( + resource_tracker_licensed_items_checkouts, +) +from simcore_postgres_database.models.resource_tracker_service_runs import ( + resource_tracker_service_runs, +) + +pytest_simcore_core_services_selection = [ + "postgres", + "rabbit", +] +pytest_simcore_ops_services_selection = [ + "adminer", +] + + +_USER_ID_1 = 1 +_WALLET_ID = 6 + + +@pytest.fixture() +def resource_tracker_service_run_id( + postgres_db: sa.engine.Engine, random_resource_tracker_service_run +) -> Generator[str, None, None]: + with postgres_db.connect() as con: + result = con.execute( + resource_tracker_service_runs.insert() + .values( + **random_resource_tracker_service_run( + user_id=_USER_ID_1, wallet_id=_WALLET_ID + ) + ) + .returning(resource_tracker_service_runs.c.service_run_id) + ) + row = result.first() + assert row + + yield row[0] + + con.execute(resource_tracker_licensed_items_checkouts.delete()) + con.execute(resource_tracker_service_runs.delete()) + + +async def test_rpc_licensed_items_checkouts_workflow( + mocked_redis_server: None, + resource_tracker_service_run_id: str, + rpc_client: RabbitMQRPCClient, +): + # List licensed items checkouts + output = await licensed_items_checkouts.get_licensed_items_checkouts_page( + rpc_client, + product_name="osparc", + filter_wallet_id=_WALLET_ID, + ) + assert output.total == 0 + assert output.items == [] + + # Purchase license item + _create_data = LicensedItemsPurchasesCreate( + product_name="osparc", + licensed_item_id="beb16d18-d57d-44aa-a638-9727fa4a72ef", + wallet_id=_WALLET_ID, + wallet_name="My Wallet", + pricing_plan_id=1, + pricing_unit_id=1, + pricing_unit_cost_id=1, + pricing_unit_cost=Decimal(10), + start_at=datetime.now(tz=UTC), + expire_at=datetime.now(tz=UTC) + timedelta(days=1), + num_of_seats=5, + purchased_by_user=_USER_ID_1, + user_email="test@test.com", + purchased_at=datetime.now(tz=UTC), + ) + created_item = await licensed_items_purchases.create_licensed_item_purchase( + rpc_client, data=_create_data + ) + + # Checkout with num of seats + checkout = await licensed_items_checkouts.checkout_licensed_item( + rpc_client, + licensed_item_id=created_item.licensed_item_id, + wallet_id=_WALLET_ID, + product_name="osparc", + num_of_seats=3, + service_run_id=resource_tracker_service_run_id, + user_id=_USER_ID_1, + user_email="test@test.com", + ) + + # List licensed items checkouts + output = await licensed_items_checkouts.get_licensed_items_checkouts_page( + rpc_client, + product_name="osparc", + filter_wallet_id=_WALLET_ID, + ) + assert output.total == 1 + assert isinstance(output, LicensedItemsCheckoutsPage) + + # Get licensed items checkouts + output = await licensed_items_checkouts.get_licensed_item_checkout( + rpc_client, + product_name="osparc", + licensed_item_checkout_id=output.items[0].licensed_item_checkout_id, + ) + assert isinstance(output, LicensedItemCheckoutGet) + + # Release num of seats + license_item_checkout = await licensed_items_checkouts.release_licensed_item( + rpc_client, + licensed_item_checkout_id=checkout.licensed_item_checkout_id, + product_name="osparc", + ) + assert license_item_checkout + assert isinstance(license_item_checkout.stopped_at, datetime) diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_licensed_items_checkouts_db.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_licensed_items_checkouts_db.py new file mode 100644 index 00000000000..5f0fc5a1f5b --- /dev/null +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_licensed_items_checkouts_db.py @@ -0,0 +1,139 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name +# pylint:disable=too-many-arguments + + +from datetime import UTC, datetime +from typing import Generator +from unittest import mock + +import pytest +import sqlalchemy as sa +from models_library.basic_types import IDStr +from models_library.rest_ordering import OrderBy +from simcore_postgres_database.models.resource_tracker_licensed_items_checkouts import ( + resource_tracker_licensed_items_checkouts, +) +from simcore_postgres_database.models.resource_tracker_service_runs import ( + resource_tracker_service_runs, +) +from simcore_service_resource_usage_tracker.models.licensed_items_checkouts import ( + CreateLicensedItemCheckoutDB, +) +from simcore_service_resource_usage_tracker.services.modules.db import ( + licensed_items_checkouts_db, +) + +pytest_simcore_core_services_selection = [ + "postgres", +] +pytest_simcore_ops_services_selection = [ + "adminer", +] + + +_USER_ID_1 = 1 +_WALLET_ID = 6 + + +@pytest.fixture() +def resource_tracker_service_run_id( + postgres_db: sa.engine.Engine, random_resource_tracker_service_run +) -> Generator[str, None, None]: + with postgres_db.connect() as con: + result = con.execute( + resource_tracker_service_runs.insert() + .values( + **random_resource_tracker_service_run( + user_id=_USER_ID_1, wallet_id=_WALLET_ID + ) + ) + .returning(resource_tracker_service_runs.c.service_run_id) + ) + row = result.first() + assert row + + yield row[0] + + con.execute(resource_tracker_licensed_items_checkouts.delete()) + con.execute(resource_tracker_service_runs.delete()) + + +async def test_licensed_items_checkouts_db__force_release_license_seats_by_run_id( + mocked_redis_server: None, + mocked_setup_rabbitmq: mock.Mock, + resource_tracker_service_run_id, + initialized_app, +): + engine = initialized_app.state.engine + + # SETUP + _create_license_item_checkout_db_1 = CreateLicensedItemCheckoutDB( + licensed_item_id="beb16d18-d57d-44aa-a638-9727fa4a72ef", + wallet_id=_WALLET_ID, + user_id=_USER_ID_1, + user_email="test@test.com", + product_name="osparc", + service_run_id=resource_tracker_service_run_id, + started_at=datetime.now(tz=UTC), + num_of_seats=1, + ) + await licensed_items_checkouts_db.create( + engine, data=_create_license_item_checkout_db_1 + ) + + _create_license_item_checkout_db_2 = _create_license_item_checkout_db_1.model_dump() + _create_license_item_checkout_db_2[ + "licensed_item_id" + ] = "b1b96583-333f-44d6-b1e0-5c0a8af555bf" + await licensed_items_checkouts_db.create( + engine, + data=CreateLicensedItemCheckoutDB.model_construct( + **_create_license_item_checkout_db_2 + ), + ) + + _create_license_item_checkout_db_3 = _create_license_item_checkout_db_1.model_dump() + _create_license_item_checkout_db_3[ + "licensed_item_id" + ] = "38a5ce59-876f-482a-ace1-d3b2636feac6" + checkout = await licensed_items_checkouts_db.create( + engine, + data=CreateLicensedItemCheckoutDB.model_construct( + **_create_license_item_checkout_db_3 + ), + ) + + _helper_time = datetime.now(UTC) + await licensed_items_checkouts_db.update( + engine, + licensed_item_checkout_id=checkout.licensed_item_checkout_id, + product_name="osparc", + stopped_at=_helper_time, + ) + + # TEST FORCE RELEASE LICENSE SEATS + await licensed_items_checkouts_db.force_release_license_seats_by_run_id( + engine, service_run_id=resource_tracker_service_run_id + ) + + # ASSERT + total, items = await licensed_items_checkouts_db.list_( + engine, + product_name="osparc", + filter_wallet_id=_WALLET_ID, + offset=0, + limit=5, + order_by=OrderBy(field=IDStr("started_at")), + ) + assert total == 3 + assert len(items) == 3 + + _helper_count = 0 + for item in items: + assert isinstance(item.stopped_at, datetime) + if item.stopped_at > _helper_time: + _helper_count += 1 + + assert _helper_count == 2 diff --git a/services/static-webserver/client/source/class/osparc/Application.js b/services/static-webserver/client/source/class/osparc/Application.js index 20750d2f941..463ddbd3492 100644 --- a/services/static-webserver/client/source/class/osparc/Application.js +++ b/services/static-webserver/client/source/class/osparc/Application.js @@ -462,7 +462,7 @@ qx.Class.define("osparc.Application", { if (osparc.auth.Data.getInstance().isGuest()) { const msg = osparc.utils.Utils.createAccountMessage(); osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); - } else if ("expirationDate" in profile) { + } else if (profile["expirationDate"]) { const now = new Date(); const today = new Date(now.toISOString().slice(0, 10)); const expirationDay = new Date(profile["expirationDate"]); diff --git a/services/static-webserver/client/source/class/osparc/auth/Data.js b/services/static-webserver/client/source/class/osparc/auth/Data.js index 306d3032558..2a4b27a9646 100644 --- a/services/static-webserver/client/source/class/osparc/auth/Data.js +++ b/services/static-webserver/client/source/class/osparc/auth/Data.js @@ -147,6 +147,17 @@ qx.Class.define("osparc.auth.Data", { return this.getUsername(); }, + getFullName: function() { + let name = ""; + if (this.getFirstName()) { + name += this.getFirstName(); + } + if (this.getLastName()) { + name += " " + this.getLastName(); + } + return name; + }, + getFriendlyRole: function() { const role = this.getRole(); let friendlyRole = role.replace(/_/g, " "); diff --git a/services/static-webserver/client/source/class/osparc/auth/Manager.js b/services/static-webserver/client/source/class/osparc/auth/Manager.js index ca497e5eabb..fdd082cff96 100644 --- a/services/static-webserver/client/source/class/osparc/auth/Manager.js +++ b/services/static-webserver/client/source/class/osparc/auth/Manager.js @@ -243,7 +243,7 @@ qx.Class.define("osparc.auth.Manager", { username: profile["userName"], firstName: profile["first_name"], lastName: profile["last_name"], - expirationDate: "expirationDate" in profile ? new Date(profile["expirationDate"]) : null + expirationDate: profile["expirationDate"] ? new Date(profile["expirationDate"]) : null }); }, diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js index 56972d7eb6f..fdbcebeaec2 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js @@ -140,6 +140,8 @@ qx.Class.define("osparc.auth.ui.LoginView", { `; } const disclaimer = osparc.announcement.AnnouncementUIFactory.createLoginAnnouncement(this.tr("Disclaimer"), text); + disclaimer.getChildren()[0].setFont("text-14"); // title + disclaimer.getChildren()[1].setFont("text-12"); // description this.add(disclaimer); this.add(new qx.ui.core.Spacer(), { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js index 0d058644bce..2be758253a0 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js @@ -16,7 +16,7 @@ ************************************************************************ */ qx.Class.define("osparc.dashboard.CardBase", { - extend: qx.ui.form.ToggleButton, + extend: qx.ui.core.Widget, implement: [qx.ui.form.IModel, osparc.filter.IFilterable], include: [qx.ui.form.MModelProperty, osparc.filter.MFilterable], type: "abstract", @@ -33,6 +33,8 @@ qx.Class.define("osparc.dashboard.CardBase", { "pointerout", "focusout" ].forEach(e => this.addListener(e, this._onPointerOut, this)); + + this.addListener("changeSelected", this.__evalSelectedButton, this); }, events: { @@ -83,7 +85,7 @@ qx.Class.define("osparc.dashboard.CardBase", { filterText: function(checks, text) { if (text) { - const includesSome = checks.some(check => check.toLowerCase().trim().includes(text.toLowerCase())); + const includesSome = checks.some(check => check && check.toLowerCase().trim().includes(text.toLowerCase())); return !includesSome; } return false; @@ -146,54 +148,59 @@ qx.Class.define("osparc.dashboard.CardBase", { return false; }, - // groups -> [orgMembs, orgs, [productEveryone], [everyone]]; - setIconAndTooltip: function(shareIcon, accessRights, groups) { - shareIcon.setSource(osparc.dashboard.CardBase.SHARE_ICON); - if (osparc.data.model.Study.canIWrite(accessRights)) { - shareIcon.set({ - toolTipText: qx.locale.Manager.tr("Share") - }); + populateShareIcon: async function(shareIcon, accessRights) { + const gids = Object.keys(accessRights).map(key => parseInt(key)); + + const groupsStore = osparc.store.Groups.getInstance(); + + // Icon + const groupEveryone = groupsStore.getEveryoneGroup(); + const groupProductEveryone = groupsStore.getEveryoneProductGroup(); + const organizations = groupsStore.getOrganizations(); + const organizationIds = Object.keys(organizations).map(key => parseInt(key)); + if (gids.includes(groupEveryone.getGroupId()) || gids.includes(groupProductEveryone.getGroupId())) { + shareIcon.setSource(osparc.dashboard.CardBase.SHARED_ALL); + } else if (organizationIds.filter(value => gids.includes(value)).length) { // find intersection + shareIcon.setSource(osparc.dashboard.CardBase.SHARED_ORGS); + } else if (gids.length === 1) { + shareIcon.setSource(osparc.dashboard.CardBase.SHARE_ICON); + } else { + shareIcon.setSource(osparc.dashboard.CardBase.SHARED_USER); } - let sharedGrps = []; - const myGroupId = osparc.auth.Data.getInstance().getGroupId(); - for (let i=0; i { + const idx = gids.indexOf(group.getGroupId()); + if (idx > -1) { + sharedGrps.push(group); + gids.splice(idx, 1); } - const sharedGrp = []; - const gids = Object.keys(accessRights); - for (let j=0; j group.getGroupId() === gid); - if (grp) { - sharedGrp.push(grp); + }); + // once the groups were removed, the remaining group ids are users' primary groups ids + const usersStore = osparc.store.Users.getInstance(); + const myGroupId = groupsStore.getMyGroupId(); + for (let i=0; i hint.show(), this); shareIcon.addListener("mouseout", () => hint.exclude(), this); }, - - // groups -> [orgMembs, orgs, [productEveryone], [everyone]]; - populateShareIcon: function(shareIcon, accessRights) { - const groupsStore = osparc.store.Groups.getInstance(); - const orgMembs = Object.values(groupsStore.getReachableUsers()); - const orgs = Object.values(groupsStore.getOrganizations()); - const productEveryone = [groupsStore.getEveryoneProductGroup()]; - const everyone = [groupsStore.getEveryoneGroup()]; - const groups = [orgMembs, orgs, productEveryone, everyone]; - osparc.dashboard.CardBase.setIconAndTooltip(shareIcon, accessRights, groups); - }, }, properties: { @@ -237,6 +233,20 @@ qx.Class.define("osparc.dashboard.CardBase", { nullable: true }, + selected: { + check: "Boolean", + init: false, + nullable: false, + event: "changeSelected", + }, + + icon: { + check: "String", + init: null, + nullable: true, + apply: "_applyIcon", + }, + resourceData: { check: "Object", nullable: false, @@ -246,7 +256,8 @@ qx.Class.define("osparc.dashboard.CardBase", { resourceType: { check: ["study", "template", "service"], - nullable: false, + init: true, + nullable: true, event: "changeResourceType" }, @@ -365,7 +376,7 @@ qx.Class.define("osparc.dashboard.CardBase", { check: "Boolean", init: false, nullable: false, - apply: "_applyMultiSelectionMode" + apply: "__applyMultiSelectionMode" }, fetching: { @@ -444,6 +455,35 @@ qx.Class.define("osparc.dashboard.CardBase", { }); }, + __applyMultiSelectionMode: function(value) { + if (!value) { + this.setSelected(false); + } + this.__evalSelectedButton(); + }, + + __evalSelectedButton: function() { + if ( + this.hasChildControl("menu-button") && + this.hasChildControl("tick-selected") && + this.hasChildControl("tick-unselected") + ) { + const menuButton = this.getChildControl("menu-button"); + const tick = this.getChildControl("tick-selected"); + const untick = this.getChildControl("tick-unselected"); + if (this.isResourceType("study") && this.isMultiSelectionMode()) { + const selected = this.getSelected(); + menuButton.setVisibility("excluded"); + tick.setVisibility(selected ? "visible" : "excluded"); + untick.setVisibility(selected ? "excluded" : "visible"); + } else { + menuButton.setVisibility("visible"); + tick.setVisibility("excluded"); + untick.setVisibility("excluded"); + } + } + }, + __applyUuid: function(value, old) { const resourceType = this.getResourceType() || "study"; osparc.utils.Utils.setIdToWidget(this, resourceType + "BrowserListItem_" + value); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ToggleButtonContainer.js b/services/static-webserver/client/source/class/osparc/dashboard/CardContainer.js similarity index 67% rename from services/static-webserver/client/source/class/osparc/dashboard/ToggleButtonContainer.js rename to services/static-webserver/client/source/class/osparc/dashboard/CardContainer.js index bbabe433161..047b047e8f7 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ToggleButtonContainer.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/CardContainer.js @@ -6,9 +6,9 @@ */ /** - * Container for GridButtonItems and ListButtonItems (ToggleButtons), with some convenient methods. + * Container for GridButtons and ListButtons (CardBase, FolderButtonBase and WorkspaceButtonBase), with some convenient methods. */ -qx.Class.define("osparc.dashboard.ToggleButtonContainer", { +qx.Class.define("osparc.dashboard.CardContainer", { extend: qx.ui.container.Composite, construct: function() { @@ -22,20 +22,30 @@ qx.Class.define("osparc.dashboard.ToggleButtonContainer", { "changeVisibility": "qx.event.type.Data" }, + statics: { + isValidCard: function(widget) { + return ( + widget instanceof osparc.dashboard.CardBase || + widget instanceof osparc.dashboard.FolderButtonBase || + widget instanceof osparc.dashboard.WorkspaceButtonBase + ); + }, + }, + members: { __lastSelectedIdx: null, // overridden add: function(child, options) { - if (child instanceof qx.ui.form.ToggleButton) { + if (this.self().isValidCard(child)) { if (osparc.dashboard.ResourceContainerManager.cardExists(this, child)) { return; } this.base(arguments, child, options); - child.addListener("changeValue", () => this.fireDataEvent("changeSelection", this.getSelection()), this); + child.addListener("changeSelected", () => this.fireDataEvent("changeSelection", this.getSelection()), this); child.addListener("changeVisibility", () => this.fireDataEvent("changeVisibility", this.__getVisibles()), this); } else { - console.error("ToggleButtonContainer only allows ToggleButton as its children."); + console.error("CardContainer only allows CardBase as its children."); } }, @@ -43,7 +53,7 @@ qx.Class.define("osparc.dashboard.ToggleButtonContainer", { * Resets the selection so no toggle button is checked. */ resetSelection: function() { - this.getChildren().map(button => button.setValue(false)); + this.getChildren().map(button => button.setSelected(false)); this.__lastSelectedIdx = null; this.fireDataEvent("changeSelection", this.getSelection()); }, @@ -52,7 +62,7 @@ qx.Class.define("osparc.dashboard.ToggleButtonContainer", { * Returns an array that contains all buttons that are checked. */ getSelection: function() { - return this.getChildren().filter(button => button.getValue()); + return this.getChildren().filter(button => button.getSelected()); }, /** @@ -63,18 +73,18 @@ qx.Class.define("osparc.dashboard.ToggleButtonContainer", { }, /** - * Sets the given button's value to true (checks it) and unchecks all other buttons. If the given button is not present, - * every button in the container will get a false value (unchecked). - * @param {qx.ui.form.ToggleButton} child Button that will be checked + * Sets the given button's select prop to true (checks it) and unchecks all other buttons. If the given button is not present, + * every button in the container will get a unselected (unchecked). + * @param {qx.ui.form.CardBase} child Button that will be checked */ selectOne: function(child) { - this.getChildren().map(button => button.setValue(button === child)); + this.getChildren().map(button => button.setSelected(button === child)); this.setLastSelectedIndex(this.getIndex(child)); }, /** * Gets the index in the container of the given button. - * @param {qx.ui.form.ToggleButton} child Button that will be checked + * @param {qx.ui.form.CardBase} child Button that will be checked */ getIndex: function(child) { return this.getChildren().findIndex(button => button === child); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/DragDropHelpers.js b/services/static-webserver/client/source/class/osparc/dashboard/DragDropHelpers.js new file mode 100644 index 00000000000..b67d7669de2 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/dashboard/DragDropHelpers.js @@ -0,0 +1,254 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2024 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.dashboard.DragDropHelpers", { + type: "static", + + statics: { + moveStudy: { + dragStart: function(event, studyItem, studyDataOrigin) { + event.addAction("move"); + event.addType("osparc-moveStudy"); + event.addData("osparc-moveStudy", { + "studyDataOrigin": studyDataOrigin, + }); + + // init drag indicator + const dragWidget = osparc.dashboard.DragWidget.getInstance(); + dragWidget.getChildControl("dragged-resource").set({ + label: studyDataOrigin["name"], + icon: "@FontAwesome5Solid/file/16", + }); + dragWidget.start(); + + // make it semi transparent while being dragged + studyItem.setOpacity(0.2); + }, + + dragOver: function(event, folderItem, workspaceDestId) { + let compatible = false; + const studyDataOrigin = event.getData("osparc-moveStudy")["studyDataOrigin"]; + const workspaceIdOrigin = studyDataOrigin["workspaceId"]; + const workspaceOrigin = osparc.store.Workspaces.getInstance().getWorkspace(workspaceIdOrigin); + const workspaceDest = osparc.store.Workspaces.getInstance().getWorkspace(workspaceDestId); + // Compatibility checks: + // - Drag over "Shared Workspaces" (0) + // - No + // - My Workspace -> My Workspace (1) + // - Yes + // - My Workspace -> Shared Workspace (2) + // - Delete on Study + // - Write on dest Workspace + // - Shared Workspace -> My Workspace (3) + // - Delete on origin Workspace + // - Shared Workspace -> Shared Workspace (4) + // - Delete on origin Workspace + // - Write on dest Workspace + if (workspaceDestId === -1) { // (0) + compatible = false; + } else if (workspaceIdOrigin === null && workspaceDestId === null) { // (1) + compatible = true; + } else if (workspaceIdOrigin === null && workspaceDest) { // (2) + compatible = osparc.data.model.Study.canIDelete(studyDataOrigin["accessRights"]) && workspaceDest.getMyAccessRights()["write"]; + } else if (workspaceOrigin && workspaceDestId === null) { // (3) + compatible = workspaceOrigin.getMyAccessRights()["delete"]; + } else if (workspaceOrigin && workspaceDest) { // (4) + compatible = workspaceOrigin.getMyAccessRights()["delete"] && workspaceDest.getMyAccessRights()["write"]; + } + + if (!compatible) { + // do not allow + event.preventDefault(); + } + + const dragWidget = osparc.dashboard.DragWidget.getInstance(); + dragWidget.setDropAllowed(compatible); + + folderItem.getChildControl("icon").setTextColor(compatible ? "strong-main" : "text"); + }, + + drop: function(event, folderItem, destWorkspaceId, destFolderId) { + const studyData = event.getData("osparc-moveStudy")["studyDataOrigin"]; + const studyToFolderData = { + studyData, + destWorkspaceId, + destFolderId, + }; + folderItem.getChildControl("icon").resetTextColor(); + return studyToFolderData; + }, + }, + + moveFolder: { + dragStart: function(event, folderItem, folderOrigin) { + event.addAction("move"); + event.addType("osparc-moveFolder"); + event.addData("osparc-moveFolder", { + "folderOrigin": folderOrigin, + }); + + // init drag indicator + const dragWidget = osparc.dashboard.DragWidget.getInstance(); + dragWidget.getChildControl("dragged-resource").set({ + label: folderOrigin.getName(), + icon: "@FontAwesome5Solid/folder/16", + }); + dragWidget.start(); + + // make it semi transparent while being dragged + folderItem.setOpacity(0.2); + }, + + dragOver: function(event, folderItem, workspaceDestId, folderDestId) { + let compatible = false; + const folderOrigin = event.getData("osparc-moveFolder")["folderOrigin"]; + const workspaceIdOrigin = folderOrigin.getWorkspaceId(); + const workspaceOrigin = osparc.store.Workspaces.getInstance().getWorkspace(workspaceIdOrigin); + const workspaceDest = osparc.store.Workspaces.getInstance().getWorkspace(workspaceDestId); + // Compatibility checks: + // - Drag over "Shared Workspaces" (0) + // - No + // - My Workspace -> My Workspace (1) + // - Yes + // - My Workspace -> Shared Workspace (2) + // - ~~Delete on Study~~ + // - Write on dest Workspace + // - Shared Workspace -> My Workspace (3) + // - Delete on origin Workspace + // - Shared Workspace -> Shared Workspace (4) + // - Delete on origin Workspace + // - Write on dest Workspace + if (workspaceDestId === -1) { // (0) + compatible = false; + } else if (folderOrigin.getFolderId() === folderDestId) { + compatible = false; + } else if (workspaceIdOrigin === null && workspaceDestId === null) { // (1) + compatible = true; + } else if (workspaceIdOrigin === null && workspaceDest) { // (2) + compatible = workspaceDest.getMyAccessRights()["write"]; + } else if (workspaceOrigin && workspaceDestId === null) { // (3) + compatible = workspaceOrigin.getMyAccessRights()["delete"]; + } else if (workspaceOrigin && workspaceDest) { // (4) + compatible = workspaceOrigin.getMyAccessRights()["delete"] && workspaceDest.getMyAccessRights()["write"]; + } + + if (!compatible) { + // do not allow + event.preventDefault(); + } + + const dragWidget = osparc.dashboard.DragWidget.getInstance(); + dragWidget.setDropAllowed(compatible); + + folderItem.getChildControl("icon").setTextColor(compatible ? "strong-main" : "text"); + }, + + drop: function(event, folderItem, destWorkspaceId, destFolderId) { + const folderOrigin = event.getData("osparc-moveFolder")["folderOrigin"]; + const folderToFolderData = { + folderId: folderOrigin.getFolderId(), + workspaceId: folderOrigin.getWorkspaceId(), + destWorkspaceId, + destFolderId, + }; + folderItem.getChildControl("icon").resetTextColor(); + return folderToFolderData; + }, + }, + + trashStudy: { + dragOver: function(event) { + let compatible = false; + const studyDataOrigin = event.getData("osparc-moveStudy")["studyDataOrigin"]; + const workspaceIdOrigin = studyDataOrigin["workspaceId"]; + const workspaceOrigin = osparc.store.Workspaces.getInstance().getWorkspace(workspaceIdOrigin); + // Compatibility checks: + // - My Workspace -> Trash (0) + // - Delete on Study + // - Shared Workspace -> Trash (1) + // - Delete on Shared Workspace + if (workspaceIdOrigin === null) { // (0) + compatible = osparc.data.model.Study.canIDelete(studyDataOrigin["accessRights"]); + } else if (workspaceIdOrigin !== null) { // (1) + compatible = workspaceOrigin.getMyAccessRights()["delete"]; + } + + if (!compatible) { + // do not allow + event.preventDefault(); + } + + const dragWidget = osparc.dashboard.DragWidget.getInstance(); + dragWidget.setDropAllowed(compatible); + }, + + drop: function(event) { + return event.getData("osparc-moveStudy")["studyDataOrigin"]; + }, + }, + + trashFolder: { + dragOver: function(event) { + let compatible = false; + const folderOrigin = event.getData("osparc-moveFolder")["folderOrigin"]; + const workspaceIdOrigin = folderOrigin.getWorkspaceId(); + const workspaceOrigin = osparc.store.Workspaces.getInstance().getWorkspace(workspaceIdOrigin); + // Compatibility checks: + // - My Workspace -> Trash (0) + // - Yes + // - Shared Workspace -> Trash (1) + // - Delete on Shared Workspace + if (workspaceIdOrigin === null) { // (0) + compatible = true; + } else if (workspaceIdOrigin !== null) { // (1) + compatible = workspaceOrigin.getMyAccessRights()["delete"]; + } + + if (!compatible) { + // do not allow + event.preventDefault(); + } + + const dragWidget = osparc.dashboard.DragWidget.getInstance(); + dragWidget.setDropAllowed(compatible); + }, + + drop: function(event) { + const folderOrigin = event.getData("osparc-moveFolder")["folderOrigin"]; + return folderOrigin.getFolderId(); + }, + }, + + dragLeave: function(item) { + const dragWidget = osparc.dashboard.DragWidget.getInstance(); + dragWidget.setDropAllowed(false); + + if (item) { + item.getChildControl("icon").resetTextColor(); + } + }, + + dragEnd: function(draggedItem) { + // bring back opacity after drag + draggedItem.setOpacity(1); + + // hide drag indicator + const dragWidget = osparc.dashboard.DragWidget.getInstance(); + dragWidget.end(); + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/DragWidget.js b/services/static-webserver/client/source/class/osparc/dashboard/DragWidget.js new file mode 100644 index 00000000000..4685f93caaf --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/dashboard/DragWidget.js @@ -0,0 +1,103 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2024 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.dashboard.DragWidget", { + extend: qx.ui.core.Widget, + type: "singleton", + + construct: function() { + this.base(arguments); + + this._setLayout(new qx.ui.layout.HBox(10).set({ + alignY: "middle", + })); + + this.set({ + appearance: "strong-ui", + opacity: 0.9, + padding: 10, + zIndex: 1000, + decorator: "rounded", + visibility: "excluded", + }); + + const root = qx.core.Init.getApplication().getRoot(); + root.add(this); + + this.initDropAllowed(); + }, + + properties: { + dropAllowed: { + check: "Boolean", + nullable: false, + init: null, + apply: "__dropAllowed", + }, + }, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "allowed-icon": + control = new qx.ui.basic.Image(); + this._add(control); + break; + case "dragged-resource": + control = new qx.ui.basic.Atom().set({ + font: "text-14", + }); + this._add(control); + break; + } + return control || this.base(arguments, id); + }, + + __dropAllowed: function(allowed) { + this.getChildControl("allowed-icon").set({ + source: allowed ? "@FontAwesome5Solid/check/14" : "@FontAwesome5Solid/times/14", + textColor: allowed ? "default-button-text" : "danger-red", + }); + }, + + __onMouseMoveDragging: function(e) { + if (this.getContentElement()) { + // place it next to the "dragdrop-own-cursor" indicator + const domEl = this.getContentElement().getDomElement(); + domEl.style.left = `${e.pageX + 15}px`; + domEl.style.top = `${e.pageY + 5}px`; + } + }, + + start: function() { + this.show(); + document.addEventListener("mousemove", this.__onMouseMoveDragging.bind(this), false); + + const cursor = qx.ui.core.DragDropCursor.getInstance(); + cursor.setAppearance("dragdrop-no-cursor"); + }, + + end: function() { + this.exclude(); + document.removeEventListener("mousemove", this.__onMouseMoveDragging.bind(this), false); + + const cursor = qx.ui.core.DragDropCursor.getInstance(); + cursor.setAppearance("dragdrop-cursor"); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonBase.js b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonBase.js index ff567a659cb..435e63b2129 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonBase.js @@ -16,7 +16,7 @@ ************************************************************************ */ qx.Class.define("osparc.dashboard.FolderButtonBase", { - extend: qx.ui.form.ToggleButton, + extend: qx.ui.core.Widget, implement: [qx.ui.form.IModel, osparc.filter.IFilterable], include: [qx.ui.form.MModelProperty, osparc.filter.MFilterable], type: "abstract", diff --git a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js index ac919b73579..f496b14501a 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js @@ -33,7 +33,7 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", { appearance: "pb-study" }); - this.addListener("changeValue", e => this.__itemSelected(e.getData()), this); + this.addListener("tap", this.__itemSelected, this); this.setPriority(osparc.dashboard.CardBase.CARD_PRIORITY.ITEM); @@ -50,6 +50,8 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", { "untrashFolderRequested": "qx.event.type.Data", "deleteFolderRequested": "qx.event.type.Data", "changeContext": "qx.event.type.Data", + "studyToFolderRequested": "qx.event.type.Data", + "folderToFolderRequested": "qx.event.type.Data", }, properties: { @@ -152,6 +154,54 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", { osparc.utils.Utils.setIdToWidget(this, "folderItem_" + folder.getFolderId()); this.__addMenuButton(); + + this.__attachDragHandlers(); + this.__attachDropHandlers(); + }, + + __attachDragHandlers: function() { + this.setDraggable(true); + + this.addListener("dragstart", e => { + const folderOrigin = this.getFolder(); + osparc.dashboard.DragDropHelpers.moveFolder.dragStart(e, this, folderOrigin); + }); + + this.addListener("dragend", () => { + osparc.dashboard.DragDropHelpers.dragEnd(this); + }); + }, + + __attachDropHandlers: function() { + this.setDroppable(true); + + this.addListener("dragover", e => { + const folderDest = this.getFolder(); + if (e.supportsType("osparc-moveStudy")) { + osparc.dashboard.DragDropHelpers.moveStudy.dragOver(e, this, folderDest.getWorkspaceId(), folderDest.getFolderId()); + } else if (e.supportsType("osparc-moveFolder")) { + osparc.dashboard.DragDropHelpers.moveFolder.dragOver(e, this, folderDest.getWorkspaceId(), folderDest.getFolderId()); + } + }); + + this.addListener("dragleave", () => { + osparc.dashboard.DragDropHelpers.dragLeave(this); + }); + + this.addListener("dragend", () => { + osparc.dashboard.DragDropHelpers.dragLeave(this); + }); + + this.addListener("drop", e => { + const folderDest = this.getFolder(); + if (e.supportsType("osparc-moveStudy")) { + const studyToFolderData = osparc.dashboard.DragDropHelpers.moveStudy.drop(e, this, folderDest.getWorkspaceId(), folderDest.getFolderId()); + this.fireDataEvent("studyToFolderRequested", studyToFolderData); + } else if (e.supportsType("osparc-moveFolder")) { + const folderToFolderData = osparc.dashboard.DragDropHelpers.moveFolder.drop(e, this, folderDest.getWorkspaceId(), folderDest.getFolderId()); + this.fireDataEvent("folderToFolderRequested", folderToFolderData); + } + }); }, __applyWorkspaceId: function(workspaceId) { @@ -188,9 +238,9 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", { const menuButton = this.getChildControl("menu-button"); menuButton.setVisibility("visible"); - const menu = new qx.ui.menu.Menu().set({ - position: "bottom-right" - }); + const menu = new qx.ui.menu.Menu(); + menu.setPosition("bottom-right"); + osparc.utils.Utils.prettifyMenu(menu); const studyBrowserContext = osparc.store.Store.getInstance().getStudyBrowserContext(); if ( @@ -222,7 +272,7 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", { menu.addSeparator(); const trashButton = new qx.ui.menu.Button(this.tr("Trash"), "@FontAwesome5Solid/trash/12"); - trashButton.addListener("execute", () => this.__trashFolderRequested(), this); + trashButton.addListener("execute", () => this.fireDataEvent("trashFolderRequested", this.getFolderId()), this); menu.add(trashButton); } else if (studyBrowserContext === "trash") { const restoreButton = new qx.ui.menu.Button(this.tr("Restore"), "@MaterialIcons/restore_from_trash/16"); @@ -240,13 +290,12 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", { menuButton.setMenu(menu); }, - __itemSelected: function(newVal) { + __itemSelected: function() { const studyBrowserContext = osparc.store.Store.getInstance().getStudyBrowserContext(); // do not allow selecting workspace - if (studyBrowserContext !== "trash" && newVal) { + if (studyBrowserContext !== "trash") { this.fireDataEvent("folderSelected", this.getFolderId()); } - this.setValue(false); }, __editFolder: function() { @@ -276,24 +325,6 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", { folderEditor.addListener("cancel", () => win.close()); }, - __trashFolderRequested: function() { - const trashDays = osparc.store.StaticInfo.getInstance().getTrashRetentionDays(); - let msg = this.tr("Are you sure you want to move the Folder and all its content to the trash?"); - msg += "

" + this.tr("It will be permanently deleted after ") + trashDays + " days."; - const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ - caption: this.tr("Move to Trash"), - confirmText: this.tr("Move to Trash"), - confirmAction: "delete" - }); - confirmationWin.center(); - confirmationWin.open(); - confirmationWin.addListener("close", () => { - if (confirmationWin.getConfirmed()) { - this.fireDataEvent("trashFolderRequested", this.getFolderId()); - } - }, this); - }, - __deleteFolderRequested: function() { const msg = this.tr("Are you sure you want to delete") + " " + this.getTitle() + "?"; const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ diff --git a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonNew.js b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonNew.js index 6fe4c7d9bba..42bdb7128b4 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonNew.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonNew.js @@ -30,7 +30,7 @@ qx.Class.define("osparc.dashboard.FolderButtonNew", { appearance: "pb-new" }); - this.addListener("changeValue", e => this.__itemSelected(e.getData()), this); + this.addListener("tap", this.__itemSelected, this); this.setPriority(osparc.dashboard.CardBase.CARD_PRIORITY.NEW); @@ -77,22 +77,19 @@ qx.Class.define("osparc.dashboard.FolderButtonNew", { this.getChildControl("title"); }, - __itemSelected: function(newVal) { - if (newVal) { - const newFolder = true; - const folderEditor = new osparc.editor.FolderEditor(newFolder); - const title = this.tr("New Folder"); - const win = osparc.ui.window.Window.popUpInWindow(folderEditor, title, 300, 120); - folderEditor.addListener("createFolder", () => { - const name = folderEditor.getLabel(); - this.fireDataEvent("createFolder", { - name, - }); - win.close(); + __itemSelected: function() { + const newFolder = true; + const folderEditor = new osparc.editor.FolderEditor(newFolder); + const title = this.tr("New Folder"); + const win = osparc.ui.window.Window.popUpInWindow(folderEditor, title, 300, 120); + folderEditor.addListener("createFolder", () => { + const name = folderEditor.getLabel(); + this.fireDataEvent("createFolder", { + name, }); - folderEditor.addListener("cancel", () => win.close()); - } - this.setValue(false); + win.close(); + }); + folderEditor.addListener("cancel", () => win.close()); } } }); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js index ad0a78c20c1..e1b7c72ff71 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js @@ -28,6 +28,8 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { construct: function() { this.base(arguments); + this._setLayout(new qx.ui.layout.Canvas()); + this.set({ width: this.self().ITEM_WIDTH, height: this.self().ITEM_HEIGHT, @@ -35,8 +37,6 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { allowGrowX: false }); - this._setLayout(new qx.ui.layout.Canvas()); - this.getChildControl("main-layout"); }, @@ -107,7 +107,6 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { }, members: { - // overridden _createChildControlImpl: function(id) { let layout; diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js index e9019262342..003648f7629 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js @@ -31,8 +31,6 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { this.base(arguments); this.setPriority(osparc.dashboard.CardBase.CARD_PRIORITY.ITEM); - - this.addListener("changeValue", this.__itemSelected, this); }, statics: { @@ -176,45 +174,6 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { return control || this.base(arguments, id); }, - // overridden - _applyMultiSelectionMode: function(value) { - if (value) { - const menuButton = this.getChildControl("menu-button"); - menuButton.setVisibility("excluded"); - this.__itemSelected(); - } else { - this.__showMenuOnly(); - } - }, - - __itemSelected: function() { - if (this.isItemNotClickable()) { - this.setValue(false); - return; - } - - if (this.isResourceType("study") && this.isMultiSelectionMode()) { - const selected = this.getValue(); - - const tick = this.getChildControl("tick-selected"); - tick.setVisibility(selected ? "visible" : "excluded"); - - const untick = this.getChildControl("tick-unselected"); - untick.setVisibility(selected ? "excluded" : "visible"); - } else { - this.__showMenuOnly(); - } - }, - - __showMenuOnly: function() { - const menuButton = this.getChildControl("menu-button"); - menuButton.setVisibility("visible"); - const tick = this.getChildControl("tick-selected"); - tick.setVisibility("excluded"); - const untick = this.getChildControl("tick-unselected"); - untick.setVisibility("excluded"); - }, - // overridden _applyLastChangeDate: function(value, old) { if (value && (this.isResourceType("study") || this.isResourceType("template"))) { @@ -277,7 +236,7 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { const menuButton = this.getChildControl("menu-button"); if (menu) { menuButton.setMenu(menu); - menu.setPosition("top-left"); + menu.setPosition("bottom-left"); osparc.utils.Utils.prettifyMenu(menu); osparc.utils.Utils.setIdToWidget(menu, "studyItemMenuMenu"); this.evaluateMenuButtons(); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonLoadMore.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonLoadMore.js index a10d57dcaa2..af3bf1ae666 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonLoadMore.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonLoadMore.js @@ -50,10 +50,6 @@ qx.Class.define("osparc.dashboard.GridButtonLoadMore", { this.setEnabled(!value); }, - _onToggleChange: function(e) { - this.setValue(false); - }, - _shouldApplyFilter: function() { return false; }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonNew.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonNew.js index 4a2a3577e31..3cb8a8c92b7 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonNew.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonNew.js @@ -67,10 +67,6 @@ qx.Class.define("osparc.dashboard.GridButtonNew", { }, members: { - _onToggleChange: function(e) { - this.setValue(false); - }, - _shouldApplyFilter: function(data) { return false; }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonPlaceholder.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonPlaceholder.js index 89f9c94270a..b6eb9906ef7 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonPlaceholder.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonPlaceholder.js @@ -23,9 +23,6 @@ qx.Class.define("osparc.dashboard.GridButtonPlaceholder", { this.setPriority(osparc.dashboard.CardBase.CARD_PRIORITY.PLACEHOLDER); - // make unselectable - this.addListener("changeValue", () => this.setValue(false), this); - this.set({ cursor: "not-allowed" }); @@ -122,10 +119,6 @@ qx.Class.define("osparc.dashboard.GridButtonPlaceholder", { return true; }, - _onToggleChange: function() { - this.setValue(false); - }, - _shouldApplyFilter: function(data) { if (data.text) { const checks = [ diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GroupedToggleButtonContainer.js b/services/static-webserver/client/source/class/osparc/dashboard/GroupedCardContainer.js similarity index 95% rename from services/static-webserver/client/source/class/osparc/dashboard/GroupedToggleButtonContainer.js rename to services/static-webserver/client/source/class/osparc/dashboard/GroupedCardContainer.js index d5dc5505d09..2223517302c 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GroupedToggleButtonContainer.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GroupedCardContainer.js @@ -15,7 +15,7 @@ ************************************************************************ */ -qx.Class.define("osparc.dashboard.GroupedToggleButtonContainer", { +qx.Class.define("osparc.dashboard.GroupedCardContainer", { extend: qx.ui.core.Widget, construct: function() { @@ -118,7 +118,7 @@ qx.Class.define("osparc.dashboard.GroupedToggleButtonContainer", { const expanded = this.isExpanded(); const showAllBtn = this.__showAllButton; if (expanded) { - contentContainer = new osparc.dashboard.ToggleButtonContainer(); + contentContainer = new osparc.dashboard.CardContainer(); showAllBtn.show(); } else { const spacing = osparc.dashboard.GridButtonBase.SPACING; @@ -176,7 +176,7 @@ qx.Class.define("osparc.dashboard.GroupedToggleButtonContainer", { // overridden add: function(child, idx) { - if (child instanceof qx.ui.form.ToggleButton) { + if (osparc.dashboard.CardContainer.isValidCard(child)) { const container = this.getContentContainer(); if (osparc.dashboard.ResourceContainerManager.cardExists(container, child)) { return; @@ -189,7 +189,7 @@ qx.Class.define("osparc.dashboard.GroupedToggleButtonContainer", { } this.__childVisibilityChanged(); } else { - console.error("ToggleButtonContainer only allows ToggleButton as its children."); + console.error("CardContainer only allows CardBase as its children."); } }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonBase.js index 86decb00157..d99d33f6608 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonBase.js @@ -27,16 +27,17 @@ qx.Class.define("osparc.dashboard.ListButtonBase", { construct: function() { this.base(arguments); - this.set({ - minHeight: osparc.dashboard.ListButtonBase.ITEM_HEIGHT, - allowGrowX: true - }); const layout = new qx.ui.layout.Grid(); layout.setSpacing(10); layout.setColumnFlex(osparc.dashboard.ListButtonBase.POS.SPACER, 1); this._setLayout(layout); + this.set({ + minHeight: osparc.dashboard.ListButtonBase.ITEM_HEIGHT, + allowGrowX: true + }); + this.getChildControl("spacer"); }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js index 9c433550185..5a80947d803 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js @@ -28,8 +28,6 @@ qx.Class.define("osparc.dashboard.ListButtonItem", { this.base(arguments); this.setPriority(osparc.dashboard.CardBase.CARD_PRIORITY.ITEM); - - this.addListener("changeValue", this.__itemSelected, this); }, statics: { @@ -262,48 +260,16 @@ qx.Class.define("osparc.dashboard.ListButtonItem", { }); }, - // overridden - _applyMultiSelectionMode: function(value) { - if (value) { - const menuButton = this.getChildControl("menu-button"); - menuButton.setVisibility("excluded"); - this.__itemSelected(); - } else { - this.__showMenuOnly(); - } - }, - - __itemSelected: function() { - if (this.isItemNotClickable()) { - this.setValue(false); - return; - } - - if (this.isResourceType("study") && this.isMultiSelectionMode()) { - const selected = this.getValue(); - - const tick = this.getChildControl("tick-selected"); - tick.setVisibility(selected ? "visible" : "excluded"); - - const untick = this.getChildControl("tick-unselected"); - untick.setVisibility(selected ? "excluded" : "visible"); - } else { - this.__showMenuOnly(); - } - }, - - __showMenuOnly: function() { - const menu = this.getChildControl("menu-button"); - this.getChildControl("menu-selection-stack").setSelection([menu]); - }, - - _applyMenu: function(value, old) { + _applyMenu: function(menu, old) { const menuButton = this.getChildControl("menu-button"); - if (value) { - menuButton.setMenu(value); - osparc.utils.Utils.setIdToWidget(value, "studyItemMenuMenu"); + if (menu) { + menuButton.setMenu(menu); + menu.setPosition("bottom-left"); + osparc.utils.Utils.prettifyMenu(menu); + osparc.utils.Utils.setIdToWidget(menu, "studyItemMenuMenu"); + this.evaluateMenuButtons(); } - menuButton.setVisibility(value ? "visible" : "excluded"); + menuButton.setVisibility(menu ? "visible" : "excluded"); } } }); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonLoadMore.js b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonLoadMore.js index cbf818c8cdc..1f0fad3e4a6 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonLoadMore.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonLoadMore.js @@ -49,10 +49,6 @@ qx.Class.define("osparc.dashboard.ListButtonLoadMore", { this.setEnabled(!value); }, - _onToggleChange: function(e) { - this.setValue(false); - }, - _shouldApplyFilter: function() { return false; }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonNew.js b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonNew.js index d9bb0679f46..7ae28a96cf4 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonNew.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonNew.js @@ -51,10 +51,6 @@ qx.Class.define("osparc.dashboard.ListButtonNew", { }, members: { - _onToggleChange: function(e) { - this.setValue(false); - }, - _shouldApplyFilter: function(data) { return false; }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonPlaceholder.js b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonPlaceholder.js index 7074ded3194..d813261ef3c 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonPlaceholder.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonPlaceholder.js @@ -23,9 +23,6 @@ qx.Class.define("osparc.dashboard.ListButtonPlaceholder", { this.setPriority(osparc.dashboard.CardBase.CARD_PRIORITY.PLACEHOLDER); - // make unselectable - this.addListener("changeValue", () => this.setValue(false), this); - this.__layout = this.getChildControl("progress-layout") this.set({ appearance: "pb-new", @@ -108,10 +105,6 @@ qx.Class.define("osparc.dashboard.ListButtonPlaceholder", { return true; }, - _onToggleChange: function() { - this.setValue(false); - }, - _shouldApplyFilter: function(data) { if (data.text) { const checks = [ diff --git a/services/static-webserver/client/source/class/osparc/dashboard/MoveResourceTo.js b/services/static-webserver/client/source/class/osparc/dashboard/MoveResourceTo.js index ecd26def377..aefa81ef810 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/MoveResourceTo.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/MoveResourceTo.js @@ -73,7 +73,7 @@ qx.Class.define("osparc.dashboard.MoveResourceTo", { switch (id) { case "current-location": { control = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); - const intro = new qx.ui.basic.Label(this.tr("Current location")); + const intro = new qx.ui.basic.Label(this.tr("Current location:")); control.add(intro); const workspace = osparc.store.Workspaces.getInstance().getWorkspace(this.__currentWorkspaceId); const workspaceText = workspace ? workspace.getName() : "My Workspace"; diff --git a/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js b/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js index 7c01ff5c74d..4f1c9c9d736 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js @@ -27,7 +27,7 @@ qx.Class.define("osparc.dashboard.NewStudies", { this._setLayout(new qx.ui.layout.VBox(10)); - const flatList = this.__flatList = new osparc.dashboard.ToggleButtonContainer(); + const flatList = this.__flatList = new osparc.dashboard.CardContainer(); [ "changeSelection", "changeVisibility" @@ -86,7 +86,7 @@ qx.Class.define("osparc.dashboard.NewStudies", { this._add(groupContainer); }); } else { - const flatList = this.__flatList = new osparc.dashboard.ToggleButtonContainer(); + const flatList = this.__flatList = new osparc.dashboard.CardContainer(); osparc.utils.Utils.setIdToWidget(flatList, listId); [ "changeSelection", @@ -138,7 +138,7 @@ qx.Class.define("osparc.dashboard.NewStudies", { }, __createGroupContainer: function(groupId, headerLabel, headerColor = "text") { - const groupContainer = new osparc.dashboard.GroupedToggleButtonContainer().set({ + const groupContainer = new osparc.dashboard.GroupedCardContainer().set({ groupId: groupId.toString(), headerLabel, headerIcon: "", @@ -171,7 +171,7 @@ qx.Class.define("osparc.dashboard.NewStudies", { const newPlanButton = new osparc.dashboard.GridButtonNew(title, desc); newPlanButton.setCardKey(templateInfo.idToWidget); osparc.utils.Utils.setIdToWidget(newPlanButton, templateInfo.idToWidget); - newPlanButton.addListener("execute", () => newStudyClicked()); + newPlanButton.addListener("tap", () => newStudyClicked()); return newPlanButton; }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js index c007ca05f7e..8c3cfd23637 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js @@ -279,6 +279,8 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { resourcesContainer.addListener("trashFolderRequested", e => this._trashFolderRequested(e.getData())); resourcesContainer.addListener("untrashFolderRequested", e => this._untrashFolderRequested(e.getData())); resourcesContainer.addListener("deleteFolderRequested", e => this._deleteFolderRequested(e.getData())); + resourcesContainer.addListener("studyToFolderRequested", e => this._studyToFolderRequested(e.getData())); + resourcesContainer.addListener("folderToFolderRequested", e => this._folderToFolderRequested(e.getData())); resourcesContainer.addListener("folderSelected", e => { const folderId = e.getData(); this._folderSelected(folderId); @@ -524,6 +526,14 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { throw new Error("Abstract method called!"); }, + _studyToFolderRequested: function(studyId) { + throw new Error("Abstract method called!"); + }, + + _folderToFolderRequested: function(folderId) { + throw new Error("Abstract method called!"); + }, + _workspaceSelected: function(workspaceId) { throw new Error("Abstract method called!"); }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js index 679c2b45cf1..55ac1f85697 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js @@ -33,10 +33,10 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { this.__groupedContainersList = []; if (resourceType === "study") { - const workspacesContainer = this.__workspacesContainer = new osparc.dashboard.ToggleButtonContainer(); + const workspacesContainer = this.__workspacesContainer = new osparc.dashboard.CardContainer(); this._add(workspacesContainer); - const foldersContainer = this.__foldersContainer = new osparc.dashboard.ToggleButtonContainer(); + const foldersContainer = this.__foldersContainer = new osparc.dashboard.CardContainer(); this._add(foldersContainer); } @@ -83,6 +83,8 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { "untrashWorkspaceRequested": "qx.event.type.Data", "deleteWorkspaceRequested": "qx.event.type.Data", "changeContext": "qx.event.type.Data", + "studyToFolderRequested": "qx.event.type.Data", + "folderToFolderRequested": "qx.event.type.Data", }, statics: { @@ -118,7 +120,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { __groupedContainers: null, addNonResourceCard: function(card) { - if (card instanceof qx.ui.form.ToggleButton) { + if (osparc.dashboard.CardContainer.isValidCard(card)) { if (this.getGroupBy()) { // it will always go to the no-group group const noGroupContainer = this.__getGroupContainer("no-group"); @@ -129,12 +131,12 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { this.self().sortListByPriority(this.__nonGroupedContainer); } } else { - console.error("ToggleButtonContainer only allows ToggleButton as its children."); + console.error("CardContainer only allows CardBase as its children."); } }, removeNonResourceCard: function(card) { - if (card instanceof qx.ui.form.ToggleButton) { + if (osparc.dashboard.CardContainer.isValidCard(card)) { if (this.getGroupBy()) { const noGroupContainer = this.__getGroupContainer("no-group"); if (noGroupContainer.getContentContainer().getChildren().indexOf(card) > -1) { @@ -144,7 +146,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { this.__nonGroupedContainer.remove(card); } } else { - console.error("ToggleButtonContainer only allows ToggleButton as its children."); + console.error("CardContainer only allows CardBase as its children."); } }, @@ -161,7 +163,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { }, __createGroupContainer: function(groupId, headerLabel, headerColor = "text") { - const groupContainer = new osparc.dashboard.GroupedToggleButtonContainer().set({ + const groupContainer = new osparc.dashboard.GroupedCardContainer().set({ groupId: groupId.toString(), headerLabel, headerIcon: "", @@ -317,7 +319,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { }, __createFlatList: function() { - const flatList = new osparc.dashboard.ToggleButtonContainer(); + const flatList = new osparc.dashboard.CardContainer(); const setContainerSpacing = () => { const spacing = this.getMode() === "grid" ? osparc.dashboard.GridButtonBase.SPACING : osparc.dashboard.ListButtonBase.SPACING; flatList.getLayout().set({ @@ -429,6 +431,8 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { "untrashFolderRequested", "deleteFolderRequested", "changeContext", + "studyToFolderRequested", + "folderToFolderRequested", ].forEach(eName => card.addListener(eName, e => this.fireDataEvent(eName, e.getData()))); return card; }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js index 097247cc847..ad896f44cfd 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js @@ -35,6 +35,8 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { events: { "trashContext": "qx.event.type.Event", + "trashStudyRequested": "qx.event.type.Data", + "trashFolderRequested": "qx.event.type.Data", "changeSharedWith": "qx.event.type.Data", "changeSelectedTags": "qx.event.type.Data", "changeServiceType": "qx.event.type.Data" @@ -114,9 +116,36 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { } }); this.evaluateTrashEmpty(); + this.__attachDropHandlers(trashButton); return trashButton; }, + __attachDropHandlers: function(trashButton) { + trashButton.setDroppable(true); + + trashButton.addListener("dragover", e => { + if (e.supportsType("osparc-moveStudy")) { + osparc.dashboard.DragDropHelpers.trashStudy.dragOver(e); + } else if (e.supportsType("osparc-moveFolder")) { + osparc.dashboard.DragDropHelpers.trashFolder.dragOver(e); + } + }); + + trashButton.addListener("dragleave", () => { + osparc.dashboard.DragDropHelpers.dragLeave(); + }); + + trashButton.addListener("drop", e => { + if (e.supportsType("osparc-moveStudy")) { + const studyData = osparc.dashboard.DragDropHelpers.trashStudy.drop(e); + this.fireDataEvent("trashStudyRequested", studyData); + } else if (e.supportsType("osparc-moveFolder")) { + const folderId = osparc.dashboard.DragDropHelpers.trashFolder.drop(e); + this.fireDataEvent("trashFolderRequested", folderId); + } + }); + }, + evaluateTrashEmpty: function() { const studiesParams = { url: { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js index 5fbaa4ebaf7..7ae65ff0bd1 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js @@ -86,7 +86,7 @@ qx.Class.define("osparc.dashboard.ServiceBrowser", { const cards = this._resourcesContainer.reloadCards("services"); cards.forEach(card => { card.setMultiSelectionMode(this.getMultiSelection()); - card.addListener("execute", () => this.__itemClicked(card), this); + card.addListener("tap", () => this.__itemClicked(card), this); this._populateCardMenu(card); }); osparc.filter.UIFilterController.dispatch("searchBarFilter"); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js index 0a25257f247..397f2682acb 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js @@ -48,7 +48,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, events: { - "publishTemplate": "qx.event.type.Data" + "publishTemplate": "qx.event.type.Data", }, properties: { @@ -100,6 +100,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __foldersList: null, __loadingFolders: null, __loadingWorkspaces: null, + __dragWidget: null, // overridden initResources: function() { @@ -554,7 +555,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.__reloadFolders(); }, - __showMoveToWorkspaceWarningMessage: function() { + __showMoveToDifferentWorkspaceWarningMessage: function() { const msg = this.tr("The permissions will be taken from the new workspace."); const win = new osparc.ui.window.Confirmation(msg).set({ caption: this.tr("Move"), @@ -575,27 +576,35 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const data = e.getData(); const destWorkspaceId = data["workspaceId"]; const destFolderId = data["folderId"]; - const moveFolder = () => { - osparc.store.Folders.getInstance().moveFolderToWorkspace(folderId, destWorkspaceId) // first move to workspace - .then(() => osparc.store.Folders.getInstance().moveFolderToFolder(folderId, destFolderId)) // then move to folder - .then(() => this.__reloadFolders()) - .catch(err => console.error(err)); - } - if (destWorkspaceId === currentWorkspaceId) { - moveFolder(); - } else { - const confirmationWin = this.__showMoveToWorkspaceWarningMessage(); - confirmationWin.addListener("close", () => { - if (confirmationWin.getConfirmed()) { - moveFolder(); - } - }, this); - } + this._folderToFolderRequested(folderId, currentWorkspaceId, destWorkspaceId, destFolderId); }); moveFolderTo.addListener("cancel", () => win.close()); }, - _trashFolderRequested: function(folderId) { + __doMoveFolder: function(folderId, destWorkspaceId, destFolderId) { + osparc.store.Folders.getInstance().moveFolderToWorkspace(folderId, destWorkspaceId) // first move to workspace + .then(() => osparc.store.Folders.getInstance().moveFolderToFolder(folderId, destFolderId)) // then move to folder + .then(() => this.__reloadFolders()) + .catch(err => { + console.error(err); + osparc.FlashMessenger.getInstance().logAs(err, "ERROR"); + }); + }, + + _folderToFolderRequested: function(folderId, workspaceId, destWorkspaceId, destFolderId) { + if (destWorkspaceId === workspaceId) { + this.__doMoveFolder(folderId, destWorkspaceId, destFolderId); + } else { + const confirmationWin = this.__showMoveToDifferentWorkspaceWarningMessage(); + confirmationWin.addListener("close", () => { + if (confirmationWin.getConfirmed()) { + this.__doMoveFolder(folderId, destWorkspaceId, destFolderId); + } + }, this); + } + }, + + __folderToTrash: function(folderId) { osparc.store.Folders.getInstance().trashFolder(folderId, this.getCurrentWorkspaceId()) .then(() => { this.__reloadFolders(); @@ -606,7 +615,25 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { .catch(err => { console.error(err); osparc.FlashMessenger.getInstance().logAs(err, "ERROR"); - }) + }); + }, + + _trashFolderRequested: function(folderId) { + const trashDays = osparc.store.StaticInfo.getInstance().getTrashRetentionDays(); + let msg = this.tr("Are you sure you want to move the Folder and all its content to the trash?"); + msg += "

" + this.tr("It will be permanently deleted after ") + trashDays + " days."; + const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ + caption: this.tr("Move to Trash"), + confirmText: this.tr("Move to Trash"), + confirmAction: "warning", + }); + confirmationWin.center(); + confirmationWin.open(); + confirmationWin.addListener("close", () => { + if (confirmationWin.getConfirmed()) { + this.__folderToTrash(folderId); + } + }, this); }, _untrashFolderRequested: function(folder) { @@ -638,34 +665,48 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __configureStudyCards: function(cards) { cards.forEach(card => { card.setMultiSelectionMode(this.getMultiSelection()); - card.addListener("tap", e => { - if (card.isItemNotClickable()) { - card.setValue(false); - } else { - this.__itemClicked(card, e.getNativeEvent().shiftKey); - } - }, this); + card.addListener("tap", e => this.__studyCardClicked(card, e.getNativeEvent().shiftKey), this); this._populateCardMenu(card); + + this.__attachDragHandlers(card); }); }, - __itemClicked: function(item, isShiftPressed) { - const studiesCont = this._resourcesContainer.getFlatList(); + __attachDragHandlers: function(card) { + card.setDraggable(true); - if (isShiftPressed) { - const lastIdx = studiesCont.getLastSelectedIndex(); - const currentIdx = studiesCont.getIndex(item); - const minMax = [Math.min(lastIdx, currentIdx), Math.max(lastIdx, currentIdx)]; - for (let i=minMax[0]; i<=minMax[1]; i++) { - const card = studiesCont.getChildren()[i]; - if (card.isVisible()) { - card.setValue(true); - } - } + card.addListener("dragstart", e => { + const studyDataOrigin = card.getResourceData(); + osparc.dashboard.DragDropHelpers.moveStudy.dragStart(e, card, studyDataOrigin); + }); + + card.addListener("dragend", () => { + osparc.dashboard.DragDropHelpers.dragEnd(card); + }); + }, + + __studyCardClicked: function(item, isShiftPressed) { + if (item.isItemNotClickable()) { + item.setSelected(false); + return; } - studiesCont.setLastSelectedIndex(studiesCont.getIndex(item)); - if (!item.isMultiSelectionMode()) { + if (item.isMultiSelectionMode()) { + item.setSelected(!item.getSelected()); + const studiesCont = this._resourcesContainer.getFlatList(); + if (isShiftPressed) { + const lastIdx = studiesCont.getLastSelectedIndex(); + const currentIdx = studiesCont.getIndex(item); + const minMax = [Math.min(lastIdx, currentIdx), Math.max(lastIdx, currentIdx)]; + for (let i=minMax[0]; i<=minMax[1]; i++) { + const card = studiesCont.getChildren()[i]; + if (card.isVisible()) { + card.setSelected(true); + } + } + } + studiesCont.setLastSelectedIndex(studiesCont.getIndex(item)); + } else { const studyData = this.__getStudyData(item.getUuid(), false); this._openResourceDetails(studyData); this.resetSelection(); @@ -860,7 +901,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { newStudyBtn.setCardKey("new-study"); newStudyBtn.subscribeToFilterGroup("searchBarFilter"); osparc.utils.Utils.setIdToWidget(newStudyBtn, "newStudyBtn"); - newStudyBtn.addListener("execute", () => this.__newStudyBtnClicked(newStudyBtn)); + newStudyBtn.addListener("tap", () => this.__newStudyBtnClicked(newStudyBtn)); this._resourcesContainer.addNonResourceCard(newStudyBtn); }, @@ -880,8 +921,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (product in newStudiesData) { newStudyBtn.setEnabled(true); - newStudyBtn.addListener("execute", () => { - newStudyBtn.setValue(false); + newStudyBtn.addListener("tap", () => { osparc.data.Resources.get("templates") .then(templates => { if (templates) { @@ -930,7 +970,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const newStudyFromServiceButton = (mode === "grid") ? new osparc.dashboard.GridButtonNew(title, desc) : new osparc.dashboard.ListButtonNew(title, desc); newStudyFromServiceButton.setCardKey("new-"+key); osparc.utils.Utils.setIdToWidget(newStudyFromServiceButton, newButtonInfo.idToWidget); - newStudyFromServiceButton.addListener("execute", () => this.__newStudyFromServiceBtnClicked(newStudyFromServiceButton, latestMetadata["key"], latestMetadata["version"], newButtonInfo.newStudyLabel)); + newStudyFromServiceButton.addListener("tap", () => this.__newStudyFromServiceBtnClicked(newStudyFromServiceButton, latestMetadata["key"], latestMetadata["version"], newButtonInfo.newStudyLabel)); this._resourcesContainer.addNonResourceCard(newStudyFromServiceButton); }) } @@ -987,6 +1027,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this._addResourceFilter(); this.__connectContexts(); + this.__connectDropHandlers(); this.__addNewStudyButtons(); @@ -1134,6 +1175,34 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this._resourceFilter.contextChanged(context, workspaceId, folderId); }, + __connectDropHandlers: function() { + const workspacesAndFoldersTree = this._resourceFilter.getWorkspacesAndFoldersTree(); + workspacesAndFoldersTree.addListener("studyToFolderRequested", e => { + const { + studyData, + destWorkspaceId, + destFolderId, + } = e.getData(); + this._moveStudyToFolderReqested(studyData, destWorkspaceId, destFolderId); + }); + workspacesAndFoldersTree.addListener("folderToFolderRequested", e => { + const { + folderId, + workspaceId, + destWorkspaceId, + destFolderId, + } = e.getData(); + this._folderToFolderRequested(folderId, workspaceId, destWorkspaceId, destFolderId); + }); + + this._resourceFilter.addListener("trashStudyRequested", e => { + this.__trashStudyRequested(e.getData()); + }); + this._resourceFilter.addListener("trashFolderRequested", e => { + this._trashFolderRequested(e.getData()); + }); + }, + __addSortByButton: function() { const sortByButton = new osparc.dashboard.SortedByMenuButton(); sortByButton.set({ @@ -1153,8 +1222,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const loadMoreBtn = this._loadingResourcesBtn = (mode === "grid") ? new osparc.dashboard.GridButtonLoadMore() : new osparc.dashboard.ListButtonLoadMore(); loadMoreBtn.setCardKey("load-more"); osparc.utils.Utils.setIdToWidget(loadMoreBtn, "studiesLoading"); - loadMoreBtn.addListener("execute", () => { - loadMoreBtn.setValue(false); + loadMoreBtn.addListener("tap", () => { this._moreResourcesRequired(); }); return loadMoreBtn; @@ -1209,13 +1277,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const selection = this._resourcesContainer.getSelection(); selection.forEach(button => { const studyData = button.getResourceData(); - this.__moveStudyToWorkspace(studyData, destWorkspaceId) // first move to workspace - .then(() => this.__moveStudyToFolder(studyData, destFolderId)) // then move to folder - .then(() => this.__removeFromStudyList(studyData["uuid"])) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }); + this.__doMoveStudy(studyData, destWorkspaceId, destFolderId); }); this.resetSelection(); this.setMultiSelection(false); @@ -1223,7 +1285,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (destWorkspaceId === currentWorkspaceId) { moveStudies(); } else { - const confirmationWin = this.__showMoveToWorkspaceWarningMessage(); + const confirmationWin = this.__showMoveToDifferentWorkspaceWarningMessage(); confirmationWin.addListener("close", () => { if (confirmationWin.getConfirmed()) { moveStudies(); @@ -1324,7 +1386,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (osparc.dashboard.ResourceBrowserBase.isCardButtonItem(studyItem)) { studyItem.setMultiSelectionMode(value); if (value === false) { - studyItem.setValue(false); + studyItem.setSelected(false); } } }); @@ -1347,7 +1409,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __newStudyBtnClicked: function(button) { - button.setValue(false); const minStudyData = osparc.data.model.Study.createMinStudyObject(); const existingNames = this._resourcesList.map(study => study["name"]); const title = osparc.utils.Utils.getUniqueName(minStudyData.name, existingNames); @@ -1388,7 +1449,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __newStudyFromServiceBtnClicked: function(button, key, version, newStudyLabel) { - button.setValue(false); this._showLoadingPage(this.tr("Creating ") + osparc.product.Utils.getStudyAlias()); const contextProps = { workspaceId: this.getCurrentWorkspaceId(), @@ -1616,6 +1676,29 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return studyBillingSettingsButton; }, + __doMoveStudy: function(studyData, destWorkspaceId, destFolderId) { + this.__moveStudyToWorkspace(studyData, destWorkspaceId) // first move to workspace + .then(() => this.__moveStudyToFolder(studyData, destFolderId)) // then move to folder + .then(() => this.__removeFromStudyList(studyData["uuid"])) + .catch(err => { + console.error(err); + osparc.FlashMessenger.logAs(err.message, "ERROR"); + }); + }, + + _moveStudyToFolderReqested: function(studyData, destWorkspaceId, destFolderId) { + if (studyData["workspaceId"] === destWorkspaceId) { + this.__doMoveStudy(studyData, destWorkspaceId, destFolderId); + } else { + const confirmationWin = this.__showMoveToDifferentWorkspaceWarningMessage(); + confirmationWin.addListener("close", () => { + if (confirmationWin.getConfirmed()) { + this.__doMoveStudy(studyData, destWorkspaceId, destFolderId); + } + }, this); + } + }, + __getMoveStudyToMenuButton: function(studyData) { const moveToButton = new qx.ui.menu.Button(this.tr("Move to..."), "@FontAwesome5Solid/folder/12"); moveToButton["moveToButton"] = true; @@ -1630,25 +1713,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const data = e.getData(); const destWorkspaceId = data["workspaceId"]; const destFolderId = data["folderId"]; - const moveStudy = () => { - this.__moveStudyToWorkspace(studyData, destWorkspaceId) // first move to workspace - .then(() => this.__moveStudyToFolder(studyData, destFolderId)) // then move to folder - .then(() => this.__removeFromStudyList(studyData["uuid"])) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }); - }; - if (destWorkspaceId === currentWorkspaceId) { - moveStudy(); - } else { - const confirmationWin = this.__showMoveToWorkspaceWarningMessage(); - confirmationWin.addListener("close", () => { - if (confirmationWin.getConfirmed()) { - moveStudy(); - } - }, this); - } + this._moveStudyToFolderReqested(studyData, destWorkspaceId, destFolderId); }, this); moveStudyTo.addListener("cancel", () => win.close()); }, this); @@ -1667,11 +1732,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } }; return osparc.data.Resources.fetch("studies", "moveToWorkspace", params) - .then(() => studyData["workspaceId"] = destWorkspaceId) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }); + .then(() => studyData["workspaceId"] = destWorkspaceId); }, __moveStudyToFolder: function(studyData, destFolderId) { @@ -1686,11 +1747,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } }; return osparc.data.Resources.fetch("studies", "moveToFolder", params) - .then(() => studyData["folderId"] = destFolderId) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }); + .then(() => studyData["folderId"] = destFolderId); }, __getDuplicateMenuButton: function(studyData) { @@ -2017,7 +2074,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ caption: this.tr("Move to Trash"), confirmText: this.tr("Move to Trash"), - confirmAction: "delete" + confirmAction: "warning", }); osparc.utils.Utils.setIdToWidget(confirmationWin.getConfirmButton(), "confirmDeleteStudyBtn"); return confirmationWin; diff --git a/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js index 0b6fc8ccd26..fab2dc1eb94 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js @@ -125,7 +125,6 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", { __itemClicked: function(card) { if (!card.getBlocked()) { - card.setValue(false); const templateData = this.__getTemplateData(card.getUuid()); this._openResourceDetails(templateData); } diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonBase.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonBase.js index c0c93cc9508..a6fb451fc2d 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonBase.js @@ -16,7 +16,7 @@ ************************************************************************ */ qx.Class.define("osparc.dashboard.WorkspaceButtonBase", { - extend: qx.ui.form.ToggleButton, + extend: qx.ui.core.Widget, implement: [qx.ui.form.IModel, osparc.filter.IFilterable], include: [qx.ui.form.MModelProperty, osparc.filter.MFilterable], type: "abstract", @@ -24,14 +24,14 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonBase", { construct: function() { this.base(arguments); + this._setLayout(new qx.ui.layout.Canvas()); + this.set({ width: this.self().ITEM_WIDTH, height: this.self().ITEM_HEIGHT, padding: 0 }); - this._setLayout(new qx.ui.layout.Canvas()); - this.getChildControl("main-layout"); [ @@ -51,6 +51,13 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonBase", { nullable: true }, + icon: { + check: "String", + init: null, + nullable: true, + apply: "_applyIcon", + }, + resourceType: { check: ["workspace"], init: "workspace", diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js index eb777ca5dd7..91ab3a26233 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js @@ -33,7 +33,7 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonItem", { appearance: "pb-listitem" }); - this.addListener("changeValue", e => this.__itemSelected(e.getData()), this); + this.addListener("tap", this.__itemSelected, this); this.setPriority(osparc.dashboard.CardBase.CARD_PRIORITY.ITEM); @@ -181,9 +181,9 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonItem", { const menuButton = this.getChildControl("menu-button"); menuButton.setVisibility("visible"); - const menu = new qx.ui.menu.Menu().set({ - position: "bottom-right" - }); + const menu = new qx.ui.menu.Menu(); + menu.setPosition("bottom-right"); + osparc.utils.Utils.prettifyMenu(menu); const studyBrowserContext = osparc.store.Store.getInstance().getStudyBrowserContext(); if ( @@ -253,13 +253,12 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonItem", { }) }, - __itemSelected: function(newVal) { + __itemSelected: function() { const studyBrowserContext = osparc.store.Store.getInstance().getStudyBrowserContext(); // do not allow selecting workspace - if (studyBrowserContext !== "trash" && newVal) { + if (studyBrowserContext !== "trash") { this.fireDataEvent("workspaceSelected", this.getWorkspaceId()); } - this.setValue(false); }, __openShareWith: function() { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js index dd65702503b..aa8425858a8 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js @@ -30,7 +30,7 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonNew", { appearance: "pb-new" }); - this.addListener("changeValue", e => this.__itemSelected(e.getData()), this); + this.addListener("tap", this.__itemSelected, this); this.setPriority(osparc.dashboard.CardBase.CARD_PRIORITY.NEW); @@ -54,25 +54,22 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonNew", { }, members: { - __itemSelected: function(newVal) { - if (newVal) { - const workspaceEditor = new osparc.editor.WorkspaceEditor(); - const title = this.tr("New Workspace"); - const win = osparc.ui.window.Window.popUpInWindow(workspaceEditor, title, 500, 500).set({ - modal: true, - clickAwayClose: false, - }); - workspaceEditor.addListener("workspaceCreated", () => this.fireEvent("workspaceCreated")); - workspaceEditor.addListener("workspaceDeleted", () => this.fireEvent("workspaceDeleted")); - workspaceEditor.addListener("workspaceUpdated", () => { - win.close(); - this.fireEvent("workspaceUpdated"); - }, this); - workspaceEditor.addListener("updateAccessRights", () => this.fireEvent("workspaceUpdated")); - win.getChildControl("close-button").addListener("tap", () => workspaceEditor.cancel()); - workspaceEditor.addListener("cancel", () => win.close()); - } - this.setValue(false); + __itemSelected: function() { + const workspaceEditor = new osparc.editor.WorkspaceEditor(); + const title = this.tr("New Workspace"); + const win = osparc.ui.window.Window.popUpInWindow(workspaceEditor, title, 500, 500).set({ + modal: true, + clickAwayClose: false, + }); + workspaceEditor.addListener("workspaceCreated", () => this.fireEvent("workspaceCreated")); + workspaceEditor.addListener("workspaceDeleted", () => this.fireEvent("workspaceDeleted")); + workspaceEditor.addListener("workspaceUpdated", () => { + win.close(); + this.fireEvent("workspaceUpdated"); + }, this); + workspaceEditor.addListener("updateAccessRights", () => this.fireEvent("workspaceUpdated")); + win.getChildControl("close-button").addListener("tap", () => workspaceEditor.cancel()); + workspaceEditor.addListener("cancel", () => win.close()); } } }); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js index 01cea4d878c..604d5e2e7b0 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js @@ -85,6 +85,8 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTree", { events: { "openChanged": "qx.event.type.Event", "locationChanged": "qx.event.type.Data", + "studyToFolderRequested": "qx.event.type.Data", + "folderToFolderRequested": "qx.event.type.Data", }, properties: { @@ -133,7 +135,13 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTree", { item.addListener("changeModel", e => { const model = e.getData(); osparc.utils.Utils.setIdToWidget(item, `workspacesAndFoldersTreeItem_${model.getWorkspaceId()}_${model.getFolderId()}`); - }) + }); + [ + "studyToFolderRequested", + "folderToFolderRequested", + ].forEach(ev => { + item.addListener(ev, e => this.fireDataEvent(ev, e.getData())); + }); } }); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTreeItem.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTreeItem.js index 75f120a86c5..05a4e44a9c0 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTreeItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTreeItem.js @@ -30,6 +30,14 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTreeItem", { this.setNotHoveredStyle(); this.__attachEventHandlers(); + + this.__attachDragHandlers(); + this.__attachDropHandlers(); + }, + + events: { + "studyToFolderRequested": "qx.event.type.Data", + "folderToFolderRequested": "qx.event.type.Data", }, members: { @@ -48,6 +56,77 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTreeItem", { setNotHoveredStyle: function() { osparc.utils.Utils.hideBorder(this); - } + }, + + __getFolder: function() { + const folderId = this.getModel().getFolderId(); + if (folderId === null) { + return null; + } + return osparc.store.Folders.getInstance().getFolder(folderId); + }, + + __attachDragHandlers: function() { + this.setDraggable(true); + + this.addListener("dragstart", e => { + const folderOrigin = this.__getFolder(); + // only folders can be dragged + if (folderOrigin == null) { + e.preventDefault(); + return; + } + osparc.dashboard.DragDropHelpers.moveFolder.dragStart(e, this, folderOrigin); + }); + + this.addListener("dragend", () => { + osparc.dashboard.DragDropHelpers.dragEnd(this); + }); + }, + + __attachDropHandlers: function() { + this.setDroppable(true); + + let draggingOver = false; + this.addListener("dragover", e => { + const workspaceDestId = this.getModel().getWorkspaceId(); + const folderDestId = this.getModel().getFolderId(); + if (e.supportsType("osparc-moveStudy")) { + osparc.dashboard.DragDropHelpers.moveStudy.dragOver(e, this, workspaceDestId, folderDestId); + } else if (e.supportsType("osparc-moveFolder")) { + osparc.dashboard.DragDropHelpers.moveFolder.dragOver(e, this, workspaceDestId, folderDestId); + } + + draggingOver = true; + setTimeout(() => { + if (draggingOver) { + this.setOpen(true); + draggingOver = false; + } + }, 1000); + }); + + this.addListener("dragleave", () => { + osparc.dashboard.DragDropHelpers.dragLeave(this); + draggingOver = false; + }); + this.addListener("dragend", () => { + osparc.dashboard.DragDropHelpers.dragLeave(this); + draggingOver = false; + }); + + this.addListener("drop", e => { + const workspaceDestId = this.getModel().getWorkspaceId(); + const folderDestId = this.getModel().getFolderId(); + if (e.supportsType("osparc-moveStudy")) { + const studyToFolderData = osparc.dashboard.DragDropHelpers.moveStudy.drop(e, this, workspaceDestId, folderDestId); + this.fireDataEvent("studyToFolderRequested", studyToFolderData); + } else if (e.supportsType("osparc-moveFolder")) { + const folderToFolderData = osparc.dashboard.DragDropHelpers.moveFolder.drop(e, this, workspaceDestId, folderDestId); + this.fireDataEvent("folderToFolderRequested", folderToFolderData); + } + draggingOver = false; + }); + }, }, }); diff --git a/services/static-webserver/client/source/class/osparc/data/Resources.js b/services/static-webserver/client/source/class/osparc/data/Resources.js index 534e9bd723a..bc9068bca8c 100644 --- a/services/static-webserver/client/source/class/osparc/data/Resources.js +++ b/services/static-webserver/client/source/class/osparc/data/Resources.js @@ -418,11 +418,11 @@ qx.Class.define("osparc.data.Resources", { }, getWithWallet: { method: "GET", - url: statics.API + "/services/-/resource-usages?wallet_id={walletId}&offset={offset}&limit={limit}&filters={filters}&order_by={orderBy}" + url: statics.API + "/services/-/resource-usages?wallet_id={walletId}&offset={offset}&limit={limit}" }, - getWithWallet2: { + getWithWalletFiltered: { method: "GET", - url: statics.API + "/services/-/resource-usages?wallet_id={walletId}&offset={offset}&limit={limit}" + url: statics.API + "/services/-/resource-usages?wallet_id={walletId}&offset={offset}&limit={limit}&filters={filters}&order_by={orderBy}" }, getUsagePerService: { method: "GET", @@ -878,6 +878,22 @@ qx.Class.define("osparc.data.Resources", { } } }, + /* + * USERS + */ + "users": { + useCache: false, // osparc.store.Groups handles the cache + endpoints: { + get: { + method: "GET", + url: statics.API + "/groups/{gid}/users" + }, + search: { + method: "POST", + url: statics.API + "/users:search" + } + } + }, /* * WALLETS */ @@ -958,15 +974,15 @@ qx.Class.define("osparc.data.Resources", { } } }, - "users": { + "poUsers": { endpoints: { search: { method: "GET", - url: statics.API + "/users:search?email={email}" + url: statics.API + "/admin/users:search?email={email}" }, preRegister: { method: "POST", - url: statics.API + "/users:pre-register" + url: statics.API + "/admin/users:pre-register" } } }, diff --git a/services/static-webserver/client/source/class/osparc/data/Roles.js b/services/static-webserver/client/source/class/osparc/data/Roles.js index b3a87e6c1c4..6b172bf80f1 100644 --- a/services/static-webserver/client/source/class/osparc/data/Roles.js +++ b/services/static-webserver/client/source/class/osparc/data/Roles.js @@ -165,7 +165,7 @@ qx.Class.define("osparc.data.Roles", { } }, - __createIntoFromRoles: function(roles, showWording = true) { + __createRolesLayout: function(roles, showWording = true) { const rolesLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)).set({ alignY: "middle", paddingRight: 10 @@ -202,23 +202,34 @@ qx.Class.define("osparc.data.Roles", { }, createRolesOrgInfo: function() { - return this.__createIntoFromRoles(osparc.data.Roles.ORG); + return this.__createRolesLayout(osparc.data.Roles.ORG); }, createRolesWalletInfo: function() { - return this.__createIntoFromRoles(osparc.data.Roles.WALLET); + return this.__createRolesLayout(osparc.data.Roles.WALLET); }, createRolesStudyInfo: function() { - return this.__createIntoFromRoles(osparc.data.Roles.STUDY); + return this.__createRolesLayout(osparc.data.Roles.STUDY); }, createRolesServicesInfo: function() { - return this.__createIntoFromRoles(osparc.data.Roles.SERVICES); + return this.__createRolesLayout(osparc.data.Roles.SERVICES); }, createRolesWorkspaceInfo: function(showWording = true) { - return this.__createIntoFromRoles(osparc.data.Roles.WORKSPACE, showWording); - } + return this.__createRolesLayout(osparc.data.Roles.WORKSPACE, showWording); + }, + + replaceSpacerWithWidget: function(rolesLayout, widget) { + if (rolesLayout && rolesLayout.getChildren()) { + // remove spacer + rolesLayout.remove(rolesLayout.getChildren()[0]); + // add widget + rolesLayout.addAt(widget, 0, { + flex: 1 + }); + } + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js index 583b31f979e..2ca74c47274 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js +++ b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js @@ -378,7 +378,7 @@ qx.Class.define("osparc.data.model.IframeHandler", { const node = this.getNode(); if (node.getServiceUrl() !== null) { // restart button pushed - if (this.getIFrame().getSource().includes(node.getServiceUrl())) { + if (this.getIFrame() && this.getIFrame().getSource().includes(node.getServiceUrl())) { this.__loadIframe(); } diff --git a/services/static-webserver/client/source/class/osparc/data/model/Service.js b/services/static-webserver/client/source/class/osparc/data/model/Service.js index 203796d2bd0..d7f37db2e00 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Service.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Service.js @@ -43,7 +43,8 @@ qx.Class.define("osparc.data.model.Service", { bootOptions: serviceData.bootOptions, classifiers: serviceData.classifiers || [], quality: serviceData.quality || null, - hits: serviceData.hits || 0 + xType: serviceData.xType || null, + hits: serviceData.hits || 0, }); }, @@ -147,6 +148,13 @@ qx.Class.define("osparc.data.model.Service", { }, // ------ ignore for serializing ------ + xType: { + check: "String", + nullable: true, + init: null, + event: "changeXType", + }, + hits: { check: "Number", init: 0, @@ -158,7 +166,8 @@ qx.Class.define("osparc.data.model.Service", { statics: { IgnoreSerializationProps: [ - "hits" + "xType", + "hits", ] }, diff --git a/services/static-webserver/client/source/class/osparc/data/model/Study.js b/services/static-webserver/client/source/class/osparc/data/model/Study.js index af4b639cd44..f03a01ff741 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Study.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Study.js @@ -274,10 +274,9 @@ qx.Class.define("osparc.data.model.Study", { }, canIWrite: function(studyAccessRights) { - const myGroupId = osparc.auth.Data.getInstance().getGroupId(); const groupsStore = osparc.store.Groups.getInstance(); const orgIDs = groupsStore.getOrganizationIds(); - orgIDs.push(myGroupId); + orgIDs.push(groupsStore.getMyGroupId()); if (orgIDs.length) { return osparc.share.CollaboratorsStudy.canGroupsWrite(studyAccessRights, (orgIDs)); } @@ -285,10 +284,9 @@ qx.Class.define("osparc.data.model.Study", { }, canIDelete: function(studyAccessRights) { - const myGroupId = osparc.auth.Data.getInstance().getGroupId(); const groupsStore = osparc.store.Groups.getInstance(); const orgIDs = groupsStore.getOrganizationIds(); - orgIDs.push(myGroupId); + orgIDs.push(groupsStore.getMyGroupId()); if (orgIDs.length) { return osparc.share.CollaboratorsStudy.canGroupsDelete(studyAccessRights, (orgIDs)); } diff --git a/services/static-webserver/client/source/class/osparc/data/model/User.js b/services/static-webserver/client/source/class/osparc/data/model/User.js index fbdc80c6adf..7294987345c 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/User.js +++ b/services/static-webserver/client/source/class/osparc/data/model/User.js @@ -28,28 +28,30 @@ qx.Class.define("osparc.data.model.User", { construct: function(userData) { this.base(arguments); - let description = ""; - if (userData["first_name"]) { - description = userData["first_name"]; - if (userData["last_name"]) { - description += " " + userData["last_name"]; + const userId = ("id" in userData) ? parseInt(userData["id"]) : parseInt(userData["userId"]); + const groupId = ("gid" in userData) ? parseInt(userData["gid"]) : parseInt(userData["groupId"]); + const username = userData["userName"]; + const email = ("login" in userData) ? userData["login"] : userData["email"]; + const firstName = ("first_name" in userData) ? userData["first_name"] : userData["firstName"]; + const lastName = ("last_name" in userData) ? userData["last_name"] : userData["lastName"]; + let description = [firstName, lastName].join(" ").trim(); // the null values will be replaced by empty strings + if (email) { + if (description) { + description += " - " } - description += " - "; + description += email; } - if (userData["login"]) { - description += userData["login"]; - } - const thumbnail = osparc.utils.Avatar.emailToThumbnail(userData["login"], userData["userName"]); + const thumbnail = osparc.utils.Avatar.emailToThumbnail(email, username); this.set({ - userId: parseInt(userData["id"]), - groupId: parseInt(userData["gid"]), - username: userData["userName"], - firstName: userData["first_name"], - lastName: userData["last_name"], - email: userData["login"], - label: userData["userName"], - description, + userId, + groupId, + username, + firstName, + lastName, + email, thumbnail, + label: username, + description, }); }, diff --git a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js index 4dfad42c6e9..9d70f9a85e0 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js +++ b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js @@ -356,9 +356,6 @@ qx.Class.define("osparc.desktop.WorkbenchView", { this.__workbenchUI.openServiceCatalog({ x: 50, y: 50 - }, { - x: 50, - y: 50 }); }); homeAndNodesTree.add(addNewNodeBtn); diff --git a/services/static-webserver/client/source/class/osparc/desktop/account/MyAccount.js b/services/static-webserver/client/source/class/osparc/desktop/account/MyAccount.js index 40a3e5b5918..0df1dc1d8be 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/account/MyAccount.js +++ b/services/static-webserver/client/source/class/osparc/desktop/account/MyAccount.js @@ -64,16 +64,16 @@ qx.Class.define("osparc.desktop.account.MyAccount", { authData.bind("username", usernameLabel, "value"); layout.add(usernameLabel); - const name = new qx.ui.basic.Label().set({ + const fullNameLabel = new qx.ui.basic.Label().set({ font: "text-13", alignX: "center" }); - layout.add(name); - authData.bind("firstName", name, "value", { - converter: firstName => firstName + " " + authData.getLastName() + layout.add(fullNameLabel); + authData.bind("firstName", fullNameLabel, "value", { + converter: () => authData.getFullName() }); - authData.bind("lastName", name, "value", { - converter: lastName => authData.getFirstName() + " " + lastName + authData.bind("lastName", fullNameLabel, "value", { + converter: () => authData.getFullName() }); if (authData.getRole() !== "user") { diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/CurrentUsage.js b/services/static-webserver/client/source/class/osparc/desktop/credits/CurrentUsage.js index 9f03b4314bc..48597eb99ef 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/CurrentUsage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/CurrentUsage.js @@ -66,7 +66,7 @@ qx.Class.define("osparc.desktop.credits.CurrentUsage", { limit: 10 } }; - osparc.data.Resources.fetch("resourceUsage", "getWithWallet2", params) + osparc.data.Resources.fetch("resourceUsage", "getWithWallet", params) .then(data => { const currentTasks = data.filter(d => (d.project_id === currentStudy.getUuid()) && d.service_run_status === "RUNNING"); let cost = 0; diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTable.js b/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTable.js index 9e81f2c8541..324f4dcb419 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTable.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTable.js @@ -114,7 +114,13 @@ qx.Class.define("osparc.desktop.credits.UsageTable", { column: 7, label: qx.locale.Manager.tr("User"), width: 140 - } + }, + TAGS: { + id: "tags", + column: 7, + label: qx.locale.Manager.tr("Tags"), + width: 140 + }, } } }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTableModel.js b/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTableModel.js index 95b6128c086..c7811a984d9 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTableModel.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/UsageTableModel.js @@ -60,7 +60,8 @@ qx.Class.define("osparc.desktop.credits.UsageTableModel", { // 4: (not used) SORTING BY DURATION 5: "service_run_status", 6: "credit_cost", - 7: "user_email" + 7: "user_email", + 8: "projects_tags", } }, @@ -76,7 +77,7 @@ qx.Class.define("osparc.desktop.credits.UsageTableModel", { // overridden _loadRowCount() { - const endpoint = this.getWalletId() == null ? "get" : "getWithWallet" + const endpoint = this.getWalletId() == null ? "get" : "getWithWalletFiltered" const params = { url: { walletId: this.getWalletId(), @@ -109,7 +110,7 @@ qx.Class.define("osparc.desktop.credits.UsageTableModel", { const lastRow = Math.min(qxLastRow, this._rowCount - 1) // Returns a request promise with given offset and limit const getFetchPromise = (offset, limit=this.self().SERVER_MAX_LIMIT) => { - const endpoint = this.getWalletId() == null ? "get" : "getWithWallet" + const endpoint = this.getWalletId() == null ? "get" : "getWithWalletFiltered" return osparc.data.Resources.fetch("resourceUsage", endpoint, { url: { walletId: this.getWalletId(), @@ -149,7 +150,8 @@ qx.Class.define("osparc.desktop.credits.UsageTableModel", { [usageCols.DURATION.id]: duration, [usageCols.STATUS.id]: qx.lang.String.firstUp(rawRow["service_run_status"].toLowerCase()), [usageCols.COST.id]: rawRow["credit_cost"] ? parseFloat(rawRow["credit_cost"]).toFixed(2) : "", - [usageCols.USER.id]: rawRow["user_email"] + [usageCols.USER.id]: rawRow["user_email"], + [usageCols.TAGS.id]: rawRow["project_tags"], }) }) return data diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js index acd68c25680..91ef4845139 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js @@ -23,13 +23,8 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { this._setLayout(new qx.ui.layout.VBox(10)); - this._add(this.__createIntroText()); - this._add(this.__getMemberInvitation()); - this._add(this.__getRolesToolbar()); - this._add(this.__getMembersFilter()); - this._add(this.__getMembersList(), { - flex: 1 - }); + this.__createNewMemberLayout(); + this.__createMembersList(); }, statics: { @@ -80,7 +75,7 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { members: { __currentOrg: null, __introLabel: null, - __memberInvitation: null, + __addMembersButton: null, __membersModel: null, setCurrentOrg: function(orgModel) { @@ -91,7 +86,29 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { this.__reloadOrgMembers(); }, - __createIntroText: function() { + __createNewMemberLayout: function() { + const vBox = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); + vBox.add(this.__createAddMembersText()); + vBox.add(this.__getMemberInvitation()); + this._add(vBox); + }, + + __createMembersList: function() { + const vBox = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); + const rolesLayout = this.__getRolesToolbar(); + const membersFilter = this.__getMembersFilter(); + membersFilter.setPaddingRight(10); + osparc.data.Roles.replaceSpacerWithWidget(rolesLayout, membersFilter); + vBox.add(rolesLayout); + vBox.add(this.__getMembersList(), { + flex: 1 + }); + this._add(vBox, { + flex: 1 + }); + }, + + __createAddMembersText: function() { const intro = this.__introLabel = new qx.ui.basic.Label().set({ alignX: "left", rich: true, @@ -101,25 +118,39 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { }, __getMemberInvitation: function() { - const hBox = this.__memberInvitation = new qx.ui.container.Composite(new qx.ui.layout.HBox(10).set({ - alignY: "middle" - })); - - const newMemberUserName = new qx.ui.form.TextField().set({ - required: true, - placeholder: this.tr(" New Member's username") - }); - hBox.add(newMemberUserName, { - flex: 1 + const addBtn = this.__addMembersButton = new qx.ui.form.Button().set({ + appearance: "strong-button", + label: this.tr("Add Members..."), + allowGrowX: false, }); - - const addBtn = new qx.ui.form.Button(this.tr("Add")); addBtn.addListener("execute", function() { - this.__addMember(newMemberUserName.getValue()); + const serializedData = this.__currentOrg.serialize(); + serializedData["resourceType"] = "organization"; + const showOrganizations = false; + const collaboratorsManager = new osparc.share.NewCollaboratorsManager(serializedData, showOrganizations); + collaboratorsManager.setCaption("Add Members"); + collaboratorsManager.getActionButton().setLabel(this.tr("Add")); + collaboratorsManager.addListener("addCollaborators", e => { + const selectedMembers = e.getData(); + if (selectedMembers.length) { + const promises = []; + const usersStore = osparc.store.Users.getInstance(); + selectedMembers.forEach(selectedMemberGId => promises.push(usersStore.getUser(selectedMemberGId))); + Promise.all(promises) + .then(users => { + users.forEach(user => this.__addMember(user.getUsername())); + }) + .catch(err => { + console.error(err); + }) + .finally(collaboratorsManager.close()); + } else { + collaboratorsManager.close(); + } + }, this); }, this); - hBox.add(addBtn); - return hBox; + return addBtn; }, __getRolesToolbar: function() { @@ -127,10 +158,8 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { }, __getMembersFilter: function() { - const filter = new osparc.filter.TextFilter("text", "organizationMembersList").set({ - allowStretchX: true, - margin: [0, 10, 5, 10] - }); + const filter = new osparc.filter.TextFilter("text", "organizationMembersList"); + filter.setCompact(true); return filter; }, @@ -212,11 +241,11 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { const canIDelete = organization.getAccessRights()["delete"]; const introText = canIWrite ? - this.tr("You can add new members and promote or demote existing ones.
In order to add new members, type their username or email if this is public.") : + this.tr("You can add new members and change their roles.") : this.tr("You can't add new members to this Organization. Please contact an Administrator or Manager."); this.__introLabel.setValue(introText); - this.__memberInvitation.set({ + this.__addMembersButton.set({ enabled: canIWrite }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js index ff773341ff0..c86917fed36 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js @@ -37,8 +37,12 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { }); this._add(intro); - this._add(this.__getOrganizationsFilter()); - this._add(osparc.data.Roles.createRolesOrgInfo()); + const rolesLayout = osparc.data.Roles.createRolesOrgInfo(); + const orgsFilter = this.__getOrganizationsFilter(); + orgsFilter.setPaddingRight(10); + osparc.data.Roles.replaceSpacerWithWidget(rolesLayout, orgsFilter); + this._add(rolesLayout); + this._add(this.__getOrganizationsList(), { flex: 1 }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/wallets/MembersList.js b/services/static-webserver/client/source/class/osparc/desktop/wallets/MembersList.js index dc27a0cfee3..a55b47e9ec4 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/wallets/MembersList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/wallets/MembersList.js @@ -120,9 +120,6 @@ qx.Class.define("osparc.desktop.wallets.MembersList", { const vBox = this.__memberInvitation = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); vBox.exclude(); - const label = new qx.ui.basic.Label(this.tr("Select from the list below and click Share")); - vBox.add(label); - const addMemberBtn = new qx.ui.form.Button(this.tr("Add Members...")).set({ appearance: "strong-button", allowGrowX: false diff --git a/services/static-webserver/client/source/class/osparc/file/FileLabelWithActions.js b/services/static-webserver/client/source/class/osparc/file/FileLabelWithActions.js index 15eec413914..35837fff2c7 100644 --- a/services/static-webserver/client/source/class/osparc/file/FileLabelWithActions.js +++ b/services/static-webserver/client/source/class/osparc/file/FileLabelWithActions.js @@ -198,7 +198,7 @@ qx.Class.define("osparc.file.FileLabelWithActions", { request .then(data => { this.fireDataEvent("fileDeleted", data); - osparc.FlashMessenger.getInstance().logAs(this.tr("File successfully deleted"), "ERROR"); + osparc.FlashMessenger.getInstance().logAs(this.tr("File successfully deleted"), "INFO"); }); } } diff --git a/services/static-webserver/client/source/class/osparc/filter/NodeTypeFilter.js b/services/static-webserver/client/source/class/osparc/filter/NodeTypeFilter.js index 80becd17ada..a87a0e2d513 100644 --- a/services/static-webserver/client/source/class/osparc/filter/NodeTypeFilter.js +++ b/services/static-webserver/client/source/class/osparc/filter/NodeTypeFilter.js @@ -27,7 +27,7 @@ qx.Class.define("osparc.filter.NodeTypeFilter", { * @extends osparc.filter.TagsFilter */ construct: function(filterId, filterGroupId) { - this.base(arguments, this.tr("Service types"), filterId, filterGroupId); + this.base(arguments, this.tr("Type"), filterId, filterGroupId); this._setLayout(new qx.ui.layout.HBox()); this.__buildMenu(); diff --git a/services/static-webserver/client/source/class/osparc/filter/TextFilter.js b/services/static-webserver/client/source/class/osparc/filter/TextFilter.js index 4566bb86f5d..7e31f4740af 100644 --- a/services/static-webserver/client/source/class/osparc/filter/TextFilter.js +++ b/services/static-webserver/client/source/class/osparc/filter/TextFilter.js @@ -35,8 +35,7 @@ qx.Class.define("osparc.filter.TextFilter", { allowStretchY: false }); - this.__textField = this.getChildControl("textfield"); - + this.getChildControl("textfield"); this.getChildControl("clearbutton"); this.__attachEventHandlers(); @@ -46,18 +45,23 @@ qx.Class.define("osparc.filter.TextFilter", { appearance: { refine: true, init: "textfilter" - } + }, + + compact: { + check: "Boolean", + init: false, + apply: "__applyCompact", + }, }, members: { - __textField: null, - /** * Function that resets the field and dispatches the update. */ reset: function() { - this.__textField.resetValue(); - this.__textField.fireDataEvent("input", ""); + const textField = this.getChildControl("textfield"); + textField.resetValue(); + textField.fireDataEvent("input", ""); }, _createChildControlImpl: function(id) { @@ -78,7 +82,7 @@ qx.Class.define("osparc.filter.TextFilter", { case "clearbutton": control = new osparc.ui.basic.IconButton("@MaterialIcons/close/12", () => { this.reset(); - this.__textField.focus(); + this.getChildControl("textfield").focus(); }); this._add(control, { right: 0, @@ -89,8 +93,21 @@ qx.Class.define("osparc.filter.TextFilter", { return control || this.base(arguments, id); }, + __applyCompact: function(compact) { + this.set({ + allowStretchX: compact, + allowGrowX: compact, + maxHeight: compact ? 30 : null, + margin: compact ? 0 : null, + }); + + this.getChildControl("textfield").set({ + margin: compact ? 0 : null, + }); + }, + __attachEventHandlers: function() { - this.__textField.addListener("input", evt => { + this.getChildControl("textfield").addListener("input", evt => { this._filterChange(evt.getData().trim().toLowerCase()); }); } diff --git a/services/static-webserver/client/source/class/osparc/filter/group/ServiceFilterGroup.js b/services/static-webserver/client/source/class/osparc/filter/group/ServiceFilterGroup.js index 721333e8945..e5a35318f73 100644 --- a/services/static-webserver/client/source/class/osparc/filter/group/ServiceFilterGroup.js +++ b/services/static-webserver/client/source/class/osparc/filter/group/ServiceFilterGroup.js @@ -27,7 +27,7 @@ qx.Class.define("osparc.filter.group.ServiceFilterGroup", { */ construct: function(filterGroupId) { this.base(arguments); - this._setLayout(new qx.ui.layout.HBox(5)); + this._setLayout(new qx.ui.layout.HBox(10)); this.__filterGroupId = filterGroupId; const textFilter = this.__textFilter = new osparc.filter.TextFilter("text", filterGroupId); osparc.utils.Utils.setIdToWidget(textFilter, "serviceFiltersTextFld"); diff --git a/services/static-webserver/client/source/class/osparc/po/PreRegistration.js b/services/static-webserver/client/source/class/osparc/po/PreRegistration.js index f2fe853b1df..8a1f0e767df 100644 --- a/services/static-webserver/client/source/class/osparc/po/PreRegistration.js +++ b/services/static-webserver/client/source/class/osparc/po/PreRegistration.js @@ -99,7 +99,7 @@ qx.Class.define("osparc.po.PreRegistration", { return } - osparc.data.Resources.fetch("users", "preRegister", params) + osparc.data.Resources.fetch("poUsers", "preRegister", params) .then(data => { if (data.length) { findingStatus.setValue(this.tr("Pre-Registered as:")); diff --git a/services/static-webserver/client/source/class/osparc/po/Users.js b/services/static-webserver/client/source/class/osparc/po/Users.js index feef74218f1..eb011712b42 100644 --- a/services/static-webserver/client/source/class/osparc/po/Users.js +++ b/services/static-webserver/client/source/class/osparc/po/Users.js @@ -83,7 +83,7 @@ qx.Class.define("osparc.po.Users", { email: userEmail.getValue() } }; - osparc.data.Resources.fetch("users", "search", params) + osparc.data.Resources.fetch("poUsers", "search", params) .then(data => { findingStatus.setValue(data.length + this.tr(" user(s) found")); this.__populateFoundUsersLayout(data); diff --git a/services/static-webserver/client/source/class/osparc/pricing/UnitEditor.js b/services/static-webserver/client/source/class/osparc/pricing/UnitEditor.js index 26469666570..38f9022172e 100644 --- a/services/static-webserver/client/source/class/osparc/pricing/UnitEditor.js +++ b/services/static-webserver/client/source/class/osparc/pricing/UnitEditor.js @@ -36,7 +36,6 @@ qx.Class.define("osparc.pricing.UnitEditor", { const manager = this.__validator = new qx.ui.form.validation.Manager(); unitName.setRequired(true); costPerUnit.setRequired(true); - specificInfo.setRequired(true); unitExtraInfoCPU.setRequired(true); unitExtraInfoRAM.setRequired(true); unitExtraInfoVRAM.setRequired(true); @@ -114,8 +113,8 @@ qx.Class.define("osparc.pricing.UnitEditor", { specificInfo: { check: "String", - init: "t2.medium", - nullable: false, + init: null, + nullable: true, event: "changeSpecificInfo" }, @@ -307,7 +306,11 @@ qx.Class.define("osparc.pricing.UnitEditor", { const unitName = this.getUnitName(); const costPerUnit = this.getCostPerUnit(); const comment = this.getComment(); + const awsEc2Instances = []; const specificInfo = this.getSpecificInfo(); + if (specificInfo) { + awsEc2Instances.push(specificInfo); + } const extraInfo = {}; extraInfo["CPU"] = this.getUnitExtraInfoCPU(); extraInfo["RAM"] = this.getUnitExtraInfoRAM(); @@ -323,7 +326,7 @@ qx.Class.define("osparc.pricing.UnitEditor", { "costPerUnit": costPerUnit, "comment": comment, "specificInfo": { - "aws_ec2_instances": [specificInfo] + "aws_ec2_instances": awsEc2Instances }, "unitExtraInfo": extraInfo, "default": isDefault diff --git a/services/static-webserver/client/source/class/osparc/service/ServiceList.js b/services/static-webserver/client/source/class/osparc/service/ServiceList.js index 06ca9bca1e7..d216effd964 100644 --- a/services/static-webserver/client/source/class/osparc/service/ServiceList.js +++ b/services/static-webserver/client/source/class/osparc/service/ServiceList.js @@ -29,14 +29,14 @@ qx.Class.define("osparc.service.ServiceList", { */ construct: function(filterGroupId) { this.base(arguments); - this._setLayout(new qx.ui.layout.Flow(5, 5)); + this._setLayout(new qx.ui.layout.VBox(5)); if (filterGroupId) { this.__filterGroup = filterGroupId; } }, events: { - "changeValue": "qx.event.type.Data", + "changeSelected": "qx.event.type.Data", "serviceAdd": "qx.event.type.Data" }, @@ -53,33 +53,26 @@ qx.Class.define("osparc.service.ServiceList", { }, members: { - __buttonGroup: null, __filterGroup: null, _applyModel: function(model) { this._removeAll(); - const group = this.__buttonGroup = new qx.ui.form.RadioGroup().set({ - allowEmptySelection: true - }); + this.__serviceListItem = []; model.toArray().forEach(service => { - const button = new osparc.service.ServiceListItem(service); + const item = new osparc.service.ServiceListItem(service); if (this.__filterGroup !== null) { - button.subscribeToFilterGroup(this.__filterGroup); + item.subscribeToFilterGroup(this.__filterGroup); } - group.add(button); - this._add(button); - button.addListener("dbltap", () => { - this.fireDataEvent("serviceAdd", button.getService()); - }, this); - button.addListener("keypress", e => { + this._add(item); + item.addListener("tap", () => this.__setSelected(item)); + item.addListener("dbltap", () => this.fireDataEvent("serviceAdd", item.getService()), this); + item.addListener("keypress", e => { if (e.getKeyIdentifier() === "Enter") { - this.fireDataEvent("serviceAdd", button.getService()); + this.fireDataEvent("serviceAdd", item.getService()); } }, this); }); - - group.addListener("changeValue", e => this.dispatchEvent(e.clone()), this); }, /** @@ -88,37 +81,41 @@ qx.Class.define("osparc.service.ServiceList", { * @return Returns the model of the selected service or null if selection is empty. */ getSelected: function() { - if (this.__buttonGroup && this.__buttonGroup.getSelection().length) { - return this.__buttonGroup.getSelection()[0].getService(); + const items = this._getChildren(); + for (let i=0; i item.setSelected(item === selectedItem)); + this.fireDataEvent("changeSelected", selectedItem); + }, + /** * Function checking if the selection is empty or not * * @return True if no item is selected, false if there one or more item selected. */ isSelectionEmpty: function() { - if (this.__buttonGroup == null) { - return true; - } - return this.__buttonGroup.getSelection().length === 0; + const selecetedItems = this._getChildren().filter(item => item.getSelected()); + selecetedItems.length === 0; }, /** * Function that selects the first visible button. */ selectFirstVisible: function() { - if (this._hasChildren()) { - const buttons = this._getChildren(); - let current = buttons[0]; - let i = 1; - while (i this.__itemSelected(e.getData())); + this.bind("selected", this, "backgroundColor", { + converter: selected => selected ? "strong-main" : "info" + }); }, properties: { @@ -64,33 +61,6 @@ qx.Class.define("osparc.service.ServiceListItem", { __versionsBox: null, __infoBtn: null, - _createChildControlImpl: function(id) { - let control; - switch (id) { - case "extended-layout": - control = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); - this._add(control, { - row: 1, - column: 0, - colSpan: osparc.dashboard.ListButtonBase.POS.HITS - }); - break; - case "version-layout": { - control = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); - this.getChildControl("extended-layout").add(control); - const versionLabel = new qx.ui.basic.Label(this.tr("Version")); - control.add(versionLabel); - const selectBox = this.__versionsBox = new qx.ui.form.SelectBox(); - control.add(selectBox); - const infoBtn = this.__infoBtn = new qx.ui.form.Button(null, "@MaterialIcons/info_outline/16"); - infoBtn.addListener("execute", () => this.__showServiceDetails(), this); - control.add(infoBtn); - break; - } - } - return control || this.base(arguments, id); - }, - __applyService: function(service) { // BASE if (service.getThumbnail()) { @@ -127,44 +97,6 @@ qx.Class.define("osparc.service.ServiceListItem", { }); }, - __itemSelected: function(selected) { - this.setHeight(selected ? 70 : 35); - const extendedLayout = this.getChildControl("extended-layout"); - const versionLayout = this.getChildControl("version-layout"); - extendedLayout.setVisibility(selected ? "visible" : "excluded"); - versionLayout.setVisibility(selected ? "visible" : "excluded"); - this.__populateVersions(); - }, - - __populateVersions: function() { - const serviceKey = this.getService().getKey(); - const selectBox = this.__versionsBox; - selectBox.removeAll(); - const versions = osparc.service.Utils.getVersions(serviceKey); - const latest = new qx.ui.form.ListItem(this.self().LATEST); - latest.version = this.self().LATEST; - selectBox.add(latest); - versions.forEach(version => { - const listItem = osparc.service.Utils.versionToListItem(serviceKey, version); - selectBox.add(listItem); - }); - osparc.utils.Utils.growSelectBox(selectBox, 200); - selectBox.setSelection([latest]); - }, - - __showServiceDetails: function() { - const key = this.getService().getKey(); - let version = this.__versionsBox.getSelection()[0].version; - if (version === this.self().LATEST) { - version = this.__versionsBox.getChildrenContainer().getSelectables()[1].version; - } - osparc.store.Services.getService(key, version) - .then(serviceMetadata => { - const serviceDetails = new osparc.info.ServiceLarge(serviceMetadata); - osparc.info.ServiceLarge.popUpInWindow(serviceDetails); - }); - }, - _filterText: function(text) { const checks = [ this.getService().getName(), diff --git a/services/static-webserver/client/source/class/osparc/share/AddCollaborators.js b/services/static-webserver/client/source/class/osparc/share/AddCollaborators.js index da0394cd010..788650a1af3 100644 --- a/services/static-webserver/client/source/class/osparc/share/AddCollaborators.js +++ b/services/static-webserver/client/source/class/osparc/share/AddCollaborators.js @@ -51,10 +51,6 @@ qx.Class.define("osparc.share.AddCollaborators", { _createChildControlImpl: function(id) { let control; switch (id) { - case "intro-text": - control = new qx.ui.basic.Label(this.tr("Select from the list below and click Share")); - this._add(control); - break; case "buttons-layout": control = new qx.ui.container.Composite(new qx.ui.layout.HBox()); this._add(control); @@ -89,8 +85,6 @@ qx.Class.define("osparc.share.AddCollaborators", { }, __buildLayout: function() { - this.getChildControl("intro-text"); - const addCollaboratorBtn = this.getChildControl("share-with"); addCollaboratorBtn.addListener("execute", () => { const collaboratorsManager = new osparc.share.NewCollaboratorsManager(this.__serializedDataCopy); diff --git a/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js b/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js index fcbe5befff5..ee149c77422 100644 --- a/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js +++ b/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js @@ -11,7 +11,7 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { construct: function(resourceData, showOrganizations = true) { this.base(arguments, "collaboratorsManager", this.tr("Share with")); this.set({ - layout: new qx.ui.layout.VBox(), + layout: new qx.ui.layout.VBox(5), allowMinimize: false, allowMaximize: false, showMinimize: false, @@ -29,8 +29,8 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { this.__renderLayout(); this.__selectedCollaborators = []; - this.__visibleCollaborators = {}; - this.__reloadCollaborators(); + this.__potentialCollaborators = {}; + this.__reloadPotentialCollaborators(); this.center(); this.open(); @@ -43,34 +43,43 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { members: { __resourceData: null, __showOrganizations: null, - __introLabel: null, __textFilter: null, + __searchButton: null, __collabButtonsContainer: null, - __orgsButton: null, __shareButton: null, __selectedCollaborators: null, - __visibleCollaborators: null, + __potentialCollaborators: null, getActionButton: function() { return this.__shareButton; }, __renderLayout: function() { - const introText = this.tr("In order to start Sharing with other members, you first need to belong to an Organization."); - const introLabel = this.__introLabel = new qx.ui.basic.Label(introText).set({ + const introLabel = new qx.ui.basic.Label().set({ + value: this.tr("Select users or organizations from the list bellow. Search them if they aren't listed."), rich: true, wrap: true, - visibility: "excluded", - padding: 8 + paddingBottom: 5 }); this.add(introLabel); - const filter = this.__textFilter = new osparc.filter.TextFilter("name", "collaboratorsManager").set({ - allowStretchX: true, - margin: [0, 10, 5, 10] - }); + const toolbar = new qx.ui.container.Composite(new qx.ui.layout.HBox(10).set({ + alignY: "middle", + })); + const filter = this.__textFilter = new osparc.filter.TextFilter("name", "collaboratorsManager"); + filter.setCompact(true); this.addListener("appear", () => filter.getChildControl("textfield").focus()); - this.add(filter); + toolbar.add(filter, { + flex: 1 + }); + const searchButton = this.__searchButton = new osparc.ui.form.FetchButton(this.tr("Search"), "@FontAwesome5Solid/search/12").set({ + maxHeight: 30, + }); + const command = new qx.ui.command.Command("Enter"); + searchButton.setCommand(command); + searchButton.addListener("execute", () => this.__searchUsers(), this); + toolbar.add(searchButton); + this.add(toolbar); const collabButtonsContainer = this.__collabButtonsContainer = new qx.ui.container.Composite(new qx.ui.layout.VBox()); const scrollContainer = new qx.ui.container.Scroll(); @@ -82,13 +91,6 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { const buttons = new qx.ui.container.Composite(new qx.ui.layout.HBox().set({ alignX: "right" })); - // Quick access for users that still don't belong to any organization - const orgsButton = this.__orgsButton = new qx.ui.form.Button(this.tr("My Organizations...")).set({ - appearance: "form-button", - visibility: "excluded", - }); - orgsButton.addListener("execute", () => osparc.desktop.organizations.OrganizationsWindow.openWindow(), this); - buttons.add(orgsButton); const shareButton = this.__shareButton = new osparc.ui.form.FetchButton(this.tr("Share")).set({ appearance: "form-button", enabled: false, @@ -98,42 +100,57 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { this.add(buttons); }, - __reloadCollaborators: function() { - let includeProductEveryone = false; + __searchUsers: function() { + const text = this.__textFilter.getChildControl("textfield").getValue(); + this.__searchButton.setFetching(true); + osparc.store.Users.getInstance().searchUsers(text) + .then(users => { + users.forEach(user => user["collabType"] = 2); + this.__addPotentialCollaborators(users); + }) + .catch(err => { + console.error(err); + osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); + }) + .finally(() => this.__searchButton.setFetching(false)); + }, + + __showProductEveryone: function() { + let showProductEveryone = false; if (this.__showOrganizations === false) { - includeProductEveryone = false; + showProductEveryone = false; } else if (this.__resourceData && this.__resourceData["resourceType"] === "study") { // studies can't be shared with ProductEveryone - includeProductEveryone = false; + showProductEveryone = false; } else if (this.__resourceData && this.__resourceData["resourceType"] === "template") { // only users with permissions can share templates with ProductEveryone - includeProductEveryone = osparc.data.Permissions.getInstance().canDo("study.everyone.share"); + showProductEveryone = osparc.data.Permissions.getInstance().canDo("study.everyone.share"); } else if (this.__resourceData && this.__resourceData["resourceType"] === "service") { // all users can share services with ProductEveryone - includeProductEveryone = true; + showProductEveryone = true; } - const potentialCollaborators = osparc.store.Groups.getInstance().getPotentialCollaborators(false, includeProductEveryone) - this.__visibleCollaborators = potentialCollaborators; - const anyCollaborator = Object.keys(potentialCollaborators).length; - // tell the user that belonging to an organization is required to start sharing - this.__introLabel.setVisibility(anyCollaborator ? "excluded" : "visible"); - this.__orgsButton.setVisibility(anyCollaborator ? "excluded" : "visible"); - - // or start sharing - this.__textFilter.setVisibility(anyCollaborator ? "visible" : "excluded"); - this.__collabButtonsContainer.setVisibility(anyCollaborator ? "visible" : "excluded"); - this.__shareButton.setVisibility(anyCollaborator ? "visible" : "excluded"); - this.__addEditors(); + return showProductEveryone; + }, + + __reloadPotentialCollaborators: function() { + const includeProductEveryone = this.__showProductEveryone(); + this.__potentialCollaborators = osparc.store.Groups.getInstance().getPotentialCollaborators(false, includeProductEveryone); + const potentialCollaborators = Object.values(this.__potentialCollaborators); + this.__addPotentialCollaborators(potentialCollaborators); }, __collaboratorButton: function(collaborator) { const collaboratorButton = new osparc.filter.CollaboratorToggleButton(collaborator); + collaboratorButton.groupId = collaborator.getGroupId(); collaboratorButton.addListener("changeValue", e => { const selected = e.getData(); if (selected) { this.__selectedCollaborators.push(collaborator.getGroupId()); } else { - this.__selectedCollaborators.remove(collaborator.getGroupId()); + const idx = this.__selectedCollaborators.indexOf(collaborator.getGroupId()); + if (idx > -1) { + this.__selectedCollaborators.splice(idx, 1); + } } this.__shareButton.setEnabled(Boolean(this.__selectedCollaborators.length)); }, this); @@ -141,11 +158,9 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { return collaboratorButton; }, - __addEditors: function() { - const visibleCollaborators = Object.values(this.__visibleCollaborators); - + __addPotentialCollaborators: function(potentialCollaborators) { // sort them first - visibleCollaborators.sort((a, b) => { + potentialCollaborators.sort((a, b) => { if (a["collabType"] > b["collabType"]) { return 1; } @@ -159,27 +174,35 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { }); let existingCollabs = []; - if (this.__resourceData && this.__resourceData["accessRights"]) { - // study/template/service/wallet - if (this.__resourceData["resourceType"] === "wallet") { + if (this.__resourceData) { + if (this.__resourceData["groupMembers"] && this.__resourceData["resourceType"] === "organization") { + // organization + existingCollabs = Object.keys(this.__resourceData["groupMembers"]); + } else if (this.__resourceData["accessRights"] && this.__resourceData["resourceType"] === "wallet") { + // wallet // array of objects existingCollabs = this.__resourceData["accessRights"].map(collab => collab["gid"]); - } else { + } else if (this.__resourceData["accessRights"]) { + // study/template/service/ // object existingCollabs = Object.keys(this.__resourceData["accessRights"]); } } const existingCollaborators = existingCollabs.map(c => parseInt(c)); - visibleCollaborators.forEach(visibleCollaborator => { - // do not list the visibleCollaborators that are already collaborators - if (existingCollaborators.includes(visibleCollaborator.getGroupId())) { + potentialCollaborators.forEach(potentialCollaborator => { + // do not list the potentialCollaborators that are already collaborators + if (existingCollaborators.includes(potentialCollaborator.getGroupId())) { + return; + } + // do not list those that were already listed + if (this.__collabButtonsContainer.getChildren().find(c => "groupId" in c && c["groupId"] === potentialCollaborator.getGroupId())) { return; } - if (this.__showOrganizations === false && visibleCollaborator["collabType"] !== 2) { + if (this.__showOrganizations === false && potentialCollaborator["collabType"] !== 2) { return; } - this.__collabButtonsContainer.add(this.__collaboratorButton(visibleCollaborator)); + this.__collabButtonsContainer.add(this.__collaboratorButton(potentialCollaborator)); }); }, diff --git a/services/static-webserver/client/source/class/osparc/store/Groups.js b/services/static-webserver/client/source/class/osparc/store/Groups.js index e954de7aba6..e897d6b4285 100644 --- a/services/static-webserver/client/source/class/osparc/store/Groups.js +++ b/services/static-webserver/client/source/class/osparc/store/Groups.js @@ -45,11 +45,6 @@ qx.Class.define("osparc.store.Groups", { check: "osparc.data.model.Group", init: {} }, - - reachableUsers: { - check: "Object", - init: {} - }, }, events: { @@ -115,8 +110,7 @@ qx.Class.define("osparc.store.Groups", { // reset group's group members group.setGroupMembers({}); orgMembers.forEach(orgMember => { - const user = new osparc.data.model.UserMember(orgMember); - this.__addToUsersCache(user, groupId); + this.__addMemberToCache(orgMember, groupId); }); } }); @@ -126,8 +120,9 @@ qx.Class.define("osparc.store.Groups", { return new Promise(resolve => { this.__fetchGroups() .then(orgs => { - // reset Reachable Users - this.resetReachableUsers(); + // reset Users + const usersStore = osparc.store.Users.getInstance(); + usersStore.resetUsers(); const promises = Object.keys(orgs).map(orgId => this.__fetchGroupMembers(orgId)); Promise.all(promises) .then(() => resolve()) @@ -152,8 +147,9 @@ qx.Class.define("osparc.store.Groups", { allGroupsAndUsers[organization.getGroupId()] = organization; }); - Object.values(this.getReachableUsers()).forEach(reachableUser => { - allGroupsAndUsers[reachableUser.getGroupId()] = reachableUser; + const users = osparc.store.Users.getInstance().getUsers(); + users.forEach(user => { + allGroupsAndUsers[user.getGroupId()] = user; }); return allGroupsAndUsers; @@ -174,9 +170,11 @@ qx.Class.define("osparc.store.Groups", { groupMe["collabType"] = 2; groups.push(groupMe); - Object.values(this.getReachableUsers()).forEach(member => { - member["collabType"] = 2; - groups.push(member); + const usersStore = osparc.store.Users.getInstance(); + const users = usersStore.getUsers(); + users.forEach(user => { + user["collabType"] = 2; + groups.push(user); }); Object.values(this.getOrganizations()).forEach(org => { @@ -202,6 +200,12 @@ qx.Class.define("osparc.store.Groups", { const potentialCollaborators = {}; const orgs = this.getOrganizations(); const productEveryone = this.getEveryoneProductGroup(); + + if (includeProductEveryone && productEveryone) { + productEveryone["collabType"] = 0; + potentialCollaborators[productEveryone.getGroupId()] = productEveryone; + } + Object.values(orgs).forEach(org => { if (org.getAccessRights()["read"]) { // maybe because of migration script, some users have access to the product everyone group @@ -213,20 +217,20 @@ qx.Class.define("osparc.store.Groups", { potentialCollaborators[org.getGroupId()] = org; } }); - const members = this.getReachableUsers(); - for (const gid of Object.keys(members)) { - members[gid]["collabType"] = 2; - potentialCollaborators[gid] = members[gid]; - } + if (includeMe) { const myGroup = this.getGroupMe(); myGroup["collabType"] = 2; potentialCollaborators[myGroup.getGroupId()] = myGroup; } - if (includeProductEveryone && productEveryone) { - productEveryone["collabType"] = 0; - potentialCollaborators[productEveryone.getGroupId()] = productEveryone; - } + + const usersStore = osparc.store.Users.getInstance(); + const users = usersStore.getUsers(); + users.forEach(user => { + user["collabType"] = 2; + potentialCollaborators[user.getGroupId()] = user; + }); + return potentialCollaborators; }, @@ -240,16 +244,18 @@ qx.Class.define("osparc.store.Groups", { getUserByUserId: function(userId) { if (userId) { - const visibleMembers = this.getReachableUsers(); - return Object.values(visibleMembers).find(member => member.getUserId() === userId); + const usersStore = osparc.store.Users.getInstance(); + const users = usersStore.getUsers(); + return users.find(user => user.getUserId() === userId); } return null; }, getUserByGroupId: function(groupId) { if (groupId) { - const visibleMembers = this.getReachableUsers(); - return Object.values(visibleMembers).find(member => member.getGroupId() === groupId); + const usersStore = osparc.store.Users.getInstance(); + const users = usersStore.getUsers(); + return users.find(user => user.getGroupId() === groupId); } return null; }, @@ -419,14 +425,15 @@ qx.Class.define("osparc.store.Groups", { delete this.getOrganizations()[groupId]; }, - __addToUsersCache: function(user, orgId = null) { + __addMemberToCache: function(orgMember, orgId = null) { + const userMember = new osparc.data.model.UserMember(orgMember); if (orgId) { const organization = this.getOrganization(orgId); if (organization) { - organization.addGroupMember(user); + organization.addGroupMember(userMember); } } - this.getReachableUsers()[user.getGroupId()] = user; + osparc.store.Users.getInstance().addUser(orgMember); }, __removeUserFromCache: function(userId, orgId) { diff --git a/services/static-webserver/client/source/class/osparc/store/Users.js b/services/static-webserver/client/source/class/osparc/store/Users.js new file mode 100644 index 00000000000..59da1a2fb9a --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/store/Users.js @@ -0,0 +1,77 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2024 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.store.Users", { + extend: qx.core.Object, + type: "singleton", + + properties: { + users: { + check: "Array", + init: [], + nullable: false, + }, + }, + + members: { + fetchUser: function(groupId) { + const params = { + url: { + gid: groupId + } + }; + return osparc.data.Resources.fetch("users", "get", params) + .then(userData => { + const user = this.addUser(userData[0]); + return user; + }); + }, + + getUser: function(groupId, fetchIfNotFound = true) { + const userFound = this.getUsers().find(user => user.getGroupId() === groupId); + if (userFound) { + return new Promise(resolve => resolve(userFound)); + } else if (fetchIfNotFound) { + return this.fetchUser(groupId); + } + return new Promise(reject => reject()); + }, + + addUser: function(userData) { + const user = new osparc.data.model.User(userData); + const userFound = this.getUsers().find(usr => usr.getGroupId() === user.getGroupId()); + if (!userFound) { + this.getUsers().push(user); + } + return user; + }, + + searchUsers: function(text) { + const params = { + data: { + match: text + } + }; + return osparc.data.Resources.fetch("users", "search", params) + .then(usersData => { + const users = []; + usersData.forEach(userData => users.push(this.addUser(userData))); + return users; + }); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/theme/Appearance.js b/services/static-webserver/client/source/class/osparc/theme/Appearance.js index 16facaa2949..82b0c9b5666 100644 --- a/services/static-webserver/client/source/class/osparc/theme/Appearance.js +++ b/services/static-webserver/client/source/class/osparc/theme/Appearance.js @@ -19,6 +19,39 @@ qx.Theme.define("osparc.theme.Appearance", { extend: osparc.theme.common.Appearance, appearances: { + "strong-ui": { + style: () => { + return { + textColor: "default-button-text", + backgroundColor: "product-color", + }; + } + }, + + "dragdrop-no-cursor": { + style: () => { + return { + source: "", + } + } + }, + + "dragdrop-own-cursor": { + style: states => { + let icon = ""; + if (states.move) { + icon = "@FontAwesome5Solid/check/14"; + } else { + icon = "@FontAwesome5Solid/times/14"; + } + return { + source: icon, + position: "right-top", + offset: [12, 0, 0, 12], + } + } + }, + "material-button-invalid": {}, "pb-list": { include: "list", @@ -537,13 +570,12 @@ qx.Theme.define("osparc.theme.Appearance", { "widget/reset-button": {}, "form-button": { + include: "strong-ui", style: function(states) { const style = { decorator: "form-button", cursor: "pointer", - textColor: "default-button-text", padding: 5, - backgroundColor: "default-button" }; if (states.hovered) { style.decorator = "form-button-hovered"; @@ -929,11 +961,19 @@ qx.Theme.define("osparc.theme.Appearance", { } }, - // override in product "strong-button": { include: "form-button" }, + "warning-button": { + include: "form-button", + style: state => ({ + decorator: state.hovered || state.focused ? "form-button-warning-hover" : "form-button-warning", + backgroundColor: state.hovered || state.focused ? "default-button-hover-background" : "warning", + textColor: state.hovered || state.focused ? "default-button-text" : "black", + }) + }, + "danger-button": { include: "form-button", style: state => ({ diff --git a/services/static-webserver/client/source/class/osparc/theme/Decoration.js b/services/static-webserver/client/source/class/osparc/theme/Decoration.js index 7eae08b8f89..a1381421494 100644 --- a/services/static-webserver/client/source/class/osparc/theme/Decoration.js +++ b/services/static-webserver/client/source/class/osparc/theme/Decoration.js @@ -554,6 +554,24 @@ qx.Theme.define("osparc.theme.Decoration", { } }, + // Warning button + "form-button-warning": { + include:"form-button-outlined", + style: { + color: "warning", + width: 1, + style: "solid" + } + }, + "form-button-warning-hover": { + include:"form-button-outlined", + style: { + color: "warning", + width: 1, + style: "solid" + } + }, + // Delete button "form-button-danger": { include:"form-button-outlined", diff --git a/services/static-webserver/client/source/class/osparc/theme/products/osparc/Appearance.js b/services/static-webserver/client/source/class/osparc/theme/products/osparc/Appearance.js deleted file mode 100644 index 4aabe3dfec2..00000000000 --- a/services/static-webserver/client/source/class/osparc/theme/products/osparc/Appearance.js +++ /dev/null @@ -1,26 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2023 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -qx.Theme.define("osparc.theme.products.osparc.Appearance", { - extend: osparc.theme.Appearance, - - appearances: { - "strong-button": { - include: "form-button" - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/theme/products/osparc/ThemeDark.js b/services/static-webserver/client/source/class/osparc/theme/products/osparc/ThemeDark.js index 896d57c93ad..d9fa660e1fc 100644 --- a/services/static-webserver/client/source/class/osparc/theme/products/osparc/ThemeDark.js +++ b/services/static-webserver/client/source/class/osparc/theme/products/osparc/ThemeDark.js @@ -21,6 +21,6 @@ qx.Theme.define("osparc.theme.products.osparc.ThemeDark", { decoration: osparc.theme.Decoration, font: osparc.theme.Font, icon: qx.theme.icon.Oxygen, - appearance: osparc.theme.products.osparc.Appearance + appearance: osparc.theme.Appearance, } }); diff --git a/services/static-webserver/client/source/class/osparc/theme/products/osparc/ThemeLight.js b/services/static-webserver/client/source/class/osparc/theme/products/osparc/ThemeLight.js index 1db9fae0543..8b145f000a9 100644 --- a/services/static-webserver/client/source/class/osparc/theme/products/osparc/ThemeLight.js +++ b/services/static-webserver/client/source/class/osparc/theme/products/osparc/ThemeLight.js @@ -21,6 +21,6 @@ qx.Theme.define("osparc.theme.products.osparc.ThemeLight", { decoration: osparc.theme.Decoration, font: osparc.theme.Font, icon: qx.theme.icon.Oxygen, - appearance: osparc.theme.products.osparc.Appearance + appearance: osparc.theme.Appearance, } }); diff --git a/services/static-webserver/client/source/class/osparc/theme/products/s4l/Appearance.js b/services/static-webserver/client/source/class/osparc/theme/products/s4l/Appearance.js deleted file mode 100644 index fe74eb92aa3..00000000000 --- a/services/static-webserver/client/source/class/osparc/theme/products/s4l/Appearance.js +++ /dev/null @@ -1,26 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2023 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -qx.Theme.define("osparc.theme.products.s4l.Appearance", { - extend: osparc.theme.Appearance, - - appearances: { - "strong-button": { - include: "form-button" - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/theme/products/s4l/ThemeDark.js b/services/static-webserver/client/source/class/osparc/theme/products/s4l/ThemeDark.js index 50364a0ee31..8f067a59936 100644 --- a/services/static-webserver/client/source/class/osparc/theme/products/s4l/ThemeDark.js +++ b/services/static-webserver/client/source/class/osparc/theme/products/s4l/ThemeDark.js @@ -21,6 +21,6 @@ qx.Theme.define("osparc.theme.products.s4l.ThemeDark", { decoration: osparc.theme.Decoration, font: osparc.theme.Font, icon: qx.theme.icon.Oxygen, - appearance: osparc.theme.products.s4l.Appearance + appearance: osparc.theme.Appearance, } }); diff --git a/services/static-webserver/client/source/class/osparc/theme/products/s4l/ThemeLight.js b/services/static-webserver/client/source/class/osparc/theme/products/s4l/ThemeLight.js index 08c99c6d382..6988ca53103 100644 --- a/services/static-webserver/client/source/class/osparc/theme/products/s4l/ThemeLight.js +++ b/services/static-webserver/client/source/class/osparc/theme/products/s4l/ThemeLight.js @@ -21,6 +21,6 @@ qx.Theme.define("osparc.theme.products.s4l.ThemeLight", { decoration: osparc.theme.Decoration, font: osparc.theme.Font, icon: qx.theme.icon.Oxygen, - appearance: osparc.theme.products.s4l.Appearance + appearance: osparc.theme.Appearance, } }); diff --git a/services/static-webserver/client/source/class/osparc/theme/products/tis/Appearance.js b/services/static-webserver/client/source/class/osparc/theme/products/tis/Appearance.js deleted file mode 100644 index b259a72698a..00000000000 --- a/services/static-webserver/client/source/class/osparc/theme/products/tis/Appearance.js +++ /dev/null @@ -1,26 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2023 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -qx.Theme.define("osparc.theme.products.tis.Appearance", { - extend: osparc.theme.Appearance, - - appearances: { - "strong-button": { - include: "form-button" - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/theme/products/tis/ThemeDark.js b/services/static-webserver/client/source/class/osparc/theme/products/tis/ThemeDark.js index 438a284d651..439239fd325 100644 --- a/services/static-webserver/client/source/class/osparc/theme/products/tis/ThemeDark.js +++ b/services/static-webserver/client/source/class/osparc/theme/products/tis/ThemeDark.js @@ -21,6 +21,6 @@ qx.Theme.define("osparc.theme.products.tis.ThemeDark", { decoration: osparc.theme.Decoration, font: osparc.theme.Font, icon: qx.theme.icon.Oxygen, - appearance: osparc.theme.products.tis.Appearance + appearance: osparc.theme.Appearance, } }); diff --git a/services/static-webserver/client/source/class/osparc/theme/products/tis/ThemeLight.js b/services/static-webserver/client/source/class/osparc/theme/products/tis/ThemeLight.js index 10dc3c9a079..f4cb4252cf2 100644 --- a/services/static-webserver/client/source/class/osparc/theme/products/tis/ThemeLight.js +++ b/services/static-webserver/client/source/class/osparc/theme/products/tis/ThemeLight.js @@ -21,6 +21,6 @@ qx.Theme.define("osparc.theme.products.tis.ThemeLight", { decoration: osparc.theme.Decoration, font: osparc.theme.Font, icon: qx.theme.icon.Oxygen, - appearance: osparc.theme.products.tis.Appearance + appearance: osparc.theme.Appearance, } }); diff --git a/services/static-webserver/client/source/class/osparc/ui/window/Confirmation.js b/services/static-webserver/client/source/class/osparc/ui/window/Confirmation.js index 0fe65ef4f74..f8e42362405 100644 --- a/services/static-webserver/client/source/class/osparc/ui/window/Confirmation.js +++ b/services/static-webserver/client/source/class/osparc/ui/window/Confirmation.js @@ -47,7 +47,7 @@ qx.Class.define("osparc.ui.window.Confirmation", { }, confirmAction: { - check: [null, "create", "delete"], + check: [null, "create", "warning", "delete"], init: null, nullable: true, apply: "__applyConfirmAppearance" @@ -74,7 +74,10 @@ qx.Class.define("osparc.ui.window.Confirmation", { const confBtn = this.__confirmButton; switch (confirmationAction) { case "create": - confBtn.setAppearance("form-button"); + confBtn.setAppearance("strong-button"); + break; + case "warning": + confBtn.setAppearance("warning-button"); break; case "delete": confBtn.setAppearance("danger-button"); diff --git a/services/static-webserver/client/source/class/osparc/vipMarket/AnatomicalModelListItem.js b/services/static-webserver/client/source/class/osparc/vipMarket/AnatomicalModelListItem.js index 75b33a3229d..ea3897841fa 100644 --- a/services/static-webserver/client/source/class/osparc/vipMarket/AnatomicalModelListItem.js +++ b/services/static-webserver/client/source/class/osparc/vipMarket/AnatomicalModelListItem.js @@ -163,7 +163,10 @@ qx.Class.define("osparc.vipMarket.AnatomicalModelListItem", { __applyPurchases: function(purchases) { if (purchases.length) { - this.setBackgroundColor("strong-main"); + this.set({ + textColor: "default-button-text", + backgroundColor: "strong-main", + }) } }, diff --git a/services/static-webserver/client/source/class/osparc/vipMarket/Market.js b/services/static-webserver/client/source/class/osparc/vipMarket/Market.js index dbffeefed8e..dd4f9567f4d 100644 --- a/services/static-webserver/client/source/class/osparc/vipMarket/Market.js +++ b/services/static-webserver/client/source/class/osparc/vipMarket/Market.js @@ -22,7 +22,8 @@ qx.Class.define("osparc.vipMarket.Market", { this.base(arguments); const miniWallet = osparc.desktop.credits.BillingCenter.createMiniWalletView().set({ - paddingRight: 10 + paddingRight: 10, + minWidth: 150, }); this.addWidgetOnTopOfTheTabs(miniWallet); diff --git a/services/static-webserver/client/source/class/osparc/vipMarket/VipMarket.js b/services/static-webserver/client/source/class/osparc/vipMarket/VipMarket.js index f7a0125f3c0..79b2626f260 100644 --- a/services/static-webserver/client/source/class/osparc/vipMarket/VipMarket.js +++ b/services/static-webserver/client/source/class/osparc/vipMarket/VipMarket.js @@ -126,7 +126,8 @@ qx.Class.define("osparc.vipMarket.VipMarket", { decorator: "no-border", spacing: 5, minWidth: 250, - maxWidth: 250 + maxWidth: 250, + backgroundColor: "transparent", }); this.getChildControl("left-side").add(control, { flex: 1 @@ -296,7 +297,7 @@ qx.Class.define("osparc.vipMarket.VipMarket", { const found = this.__anatomicalModels.find(model => model["modelId"] === modelId); if (found) { found["purchases"].push(purchaseData); - this.__populateModels(); + this.__populateModels(modelId); anatomicModelDetails.setAnatomicalModelsData(found); } }) @@ -317,7 +318,7 @@ qx.Class.define("osparc.vipMarket.VipMarket", { .catch(err => console.error(err)); }, - __populateModels: function() { + __populateModels: function(selectModelId) { const models = this.__anatomicalModels; this.__anatomicalModelsModel.removeAll(); @@ -358,6 +359,18 @@ qx.Class.define("osparc.vipMarket.VipMarket", { sortModel(sortBy); models.forEach(model => this.__anatomicalModelsModel.append(qx.data.marshal.Json.createModel(model))); }, this); + + // select model after timeout, there is something that changes the selection to empty after populating the list + setTimeout(() => { + const modelsUIList = this.getChildControl("models-list"); + if (selectModelId) { + const entryFound = modelsUIList.getSelectables().find(entry => "getModelId" in entry && entry.getModelId() === selectModelId); + modelsUIList.setSelection([entryFound]); + } else if (modelsUIList.getSelectables().length) { + // select first + modelsUIList.setSelection([modelsUIList.getSelectables()[0]]); + } + }, 100); }, __sendImportModelMessage: function(modelId) { diff --git a/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js b/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js index b9dd0867a4c..d14212aa20f 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js +++ b/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js @@ -54,18 +54,18 @@ qx.Class.define("osparc.workbench.ServiceCatalog", { this.__sortBy = osparc.service.SortServicesButtons.DefaultSorting; - let catalogLayout = new qx.ui.layout.VBox(); + const catalogLayout = new qx.ui.layout.VBox(); this.setLayout(catalogLayout); - let filterLayout = this.__createFilterLayout(); + const filterLayout = this.__createFilterLayout(); this.add(filterLayout); - let list = this.__createListLayout(); + const list = this.__createListLayout(); this.add(list, { flex: 1 }); - let btnLayout = this.__createButtonsLayout(); + const btnLayout = this.__createButtonsLayout(); this.add(btnLayout); this.__createEvents(); @@ -103,9 +103,11 @@ qx.Class.define("osparc.workbench.ServiceCatalog", { }); const filters = new osparc.filter.group.ServiceFilterGroup("serviceCatalog").set({ - maxHeight: 30 + maxHeight: 30, + }); + this.__textFilter = filters.getTextFilter().getChildControl("textfield", true).set({ + minWidth: 150, }); - this.__textFilter = filters.getTextFilter().getChildControl("textfield", true); layout.add(filters); layout.add(new qx.ui.core.Spacer(), { @@ -131,12 +133,10 @@ qx.Class.define("osparc.workbench.ServiceCatalog", { width: 568, backgroundColor: "background-main" }); - const scrolledServices = new qx.ui.container.Scroll().set({ - height: 260 - }); + const scrolledServices = new qx.ui.container.Scroll(); scrolledServices.add(serviceList); - this.__serviceList.addListener("changeValue", e => { + this.__serviceList.addListener("changeSelected", e => { if (e.getData() && e.getData().getService()) { const selectedService = e.getData().getService(); this.__changedSelection(selectedService.getKey()); diff --git a/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js b/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js index 0bde129f15e..bf42bde89f7 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js +++ b/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js @@ -290,12 +290,11 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { }, __openServiceCatalog: function(e) { - const winPos = this.__pointerEventToScreenPos(e); const nodePos = this.__pointerEventToWorkbenchPos(e); - this.openServiceCatalog(winPos, nodePos); + this.openServiceCatalog(nodePos); }, - openServiceCatalog: function(winPos, nodePos) { + openServiceCatalog: function(nodePos) { if (this.getStudy().isReadOnly()) { return null; } @@ -304,11 +303,6 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { return null; } const srvCat = new osparc.workbench.ServiceCatalog(); - const maxLeft = this.getBounds().width - osparc.workbench.ServiceCatalog.Width; - const maxHeight = this.getBounds().height - osparc.workbench.ServiceCatalog.Height; - const posX = Math.min(winPos.x, maxLeft); - const posY = Math.min(winPos.y, maxHeight); - srvCat.moveTo(posX + this.__getLeftOffset(), posY + this.__getTopOffset()); srvCat.addListener("addService", async e => { const { service, @@ -321,6 +315,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { this._createEdgeBetweenNodes(nodeLeftId ? nodeLeftId : newNodeId, nodeRightId ? nodeRightId : newNodeId, true); } }, this); + srvCat.center(); srvCat.open(); return srvCat; }, @@ -770,8 +765,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { let dragNodeId = data.nodeId; if (this.__tempEdgeNodeId === dragNodeId) { - const winPos = this.__unscaleCoordinates(this.__pointerPos.x, this.__pointerPos.y); - const srvCat = this.openServiceCatalog(winPos, this.__pointerPos); + const srvCat = this.openServiceCatalog(this.__pointerPos); if (srvCat) { this.__tempEdgeIsInput === true ? srvCat.setContext(null, dragNodeId) : srvCat.setContext(dragNodeId, null); srvCat.addListener("close", () => this.__removeTempEdge(), this); @@ -1331,10 +1325,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { "text": "\uf090", // in "action": () => { const freePos = this.getStudy().getWorkbench().getFreePosition(nodeUI.getNode(), true); - const srvCat = this.openServiceCatalog({ - x: 50, - y: 50 - }, freePos); + const srvCat = this.openServiceCatalog(freePos); if (srvCat) { srvCat.setContext(null, nodeUI.getNodeId()); } @@ -1344,10 +1335,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { "text": "\uf08b", // out "action": () => { const freePos = this.getStudy().getWorkbench().getFreePosition(nodeUI.getNode(), false); - const srvCat = this.openServiceCatalog({ - x: 50, - y: 50 - }, freePos); + const srvCat = this.openServiceCatalog(freePos); if (srvCat) { srvCat.setContext(nodeUI.getNodeId(), null); } diff --git a/services/storage/src/simcore_service_storage/db_file_meta_data.py b/services/storage/src/simcore_service_storage/db_file_meta_data.py index b742449ee00..593a48f72b2 100644 --- a/services/storage/src/simcore_service_storage/db_file_meta_data.py +++ b/services/storage/src/simcore_service_storage/db_file_meta_data.py @@ -72,7 +72,7 @@ def _list_filter_with_partial_file_id_stmt( file_id_prefix: str | None, partial_file_id: str | None, sha256_checksum: SHA256Str | None, - only_files: bool, + is_directory: bool | None, limit: int | None = None, offset: int | None = None, ): @@ -98,8 +98,8 @@ def _list_filter_with_partial_file_id_stmt( conditions.append(file_meta_data.c.file_id.startswith(file_id_prefix)) if partial_file_id: conditions.append(file_meta_data.c.file_id.ilike(f"%{partial_file_id}%")) - if only_files: - conditions.append(file_meta_data.c.is_directory.is_(False)) + if is_directory is not None: + conditions.append(file_meta_data.c.is_directory.is_(is_directory)) if sha256_checksum: conditions.append(file_meta_data.c.sha256_checksum == sha256_checksum) @@ -119,7 +119,7 @@ async def list_filter_with_partial_file_id( file_id_prefix: str | None, partial_file_id: str | None, sha256_checksum: SHA256Str | None, - only_files: bool, + is_directory: bool | None, limit: int | None = None, offset: int | None = None, ) -> list[FileMetaDataAtDB]: @@ -129,7 +129,7 @@ async def list_filter_with_partial_file_id( file_id_prefix=file_id_prefix, partial_file_id=partial_file_id, sha256_checksum=sha256_checksum, - only_files=only_files, + is_directory=is_directory, limit=limit, offset=offset, ) diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py index b6e7e57f1cc..d41630b5230 100644 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py +++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py @@ -21,6 +21,7 @@ UploadedBytesTransferredCallback, ) from models_library.api_schemas_storage import ( + UNDEFINED_SIZE, UNDEFINED_SIZE_TYPE, LinkType, S3BucketName, @@ -79,7 +80,11 @@ from .s3 import get_s3_client from .s3_utils import S3TransferDataCB, update_task_progress from .settings import Settings -from .simcore_s3_dsm_utils import expand_directory, get_directory_file_id +from .simcore_s3_dsm_utils import ( + compute_file_id_prefix, + expand_directory, + get_directory_file_id, +) from .utils import ( convert_db_to_model, download_to_file_or_raise, @@ -180,8 +185,8 @@ async def list_files( # noqa C901 user_id=uid, project_ids=accessible_projects_ids ), file_id_prefix=None, + is_directory=None, partial_file_id=uuid_filter, - only_files=False, sha256_checksum=None, ) @@ -523,22 +528,32 @@ async def delete_file( if not can.delete: raise FileAccessRightError(access_right="delete", file_id=file_id) - with suppress(FileMetaDataNotFoundError): - # NOTE: deleting might be slow, so better ensure we release the connection - async with self.engine.acquire() as conn: - file: FileMetaDataAtDB = await db_file_meta_data.get( - conn, TypeAdapter(SimcoreS3FileID).validate_python(file_id) - ) + try: await get_s3_client(self.app).delete_objects_recursively( - bucket=file.bucket_name, - prefix=( - ensure_ends_with(file.file_id, "/") - if file.is_directory - else file.file_id - ), + bucket=self.simcore_bucket_name, + prefix=file_id, ) - async with self.engine.acquire() as conn: - await db_file_meta_data.delete(conn, [file.file_id]) + except S3KeyNotFoundError: + _logger.warning("File %s not found in S3", file_id) + # we still need to clean up the database entry (it exists) + # and to invalidate the size of the parent directory + + async with self.engine.acquire() as conn: + await db_file_meta_data.delete(conn, [file_id]) + + if parent_dir_fmds := await db_file_meta_data.list_filter_with_partial_file_id( + conn, + user_or_project_filter=UserOrProjectFilter( + user_id=user_id, project_ids=[] + ), + file_id_prefix=compute_file_id_prefix(file_id, 2), + partial_file_id=None, + is_directory=True, + sha256_checksum=None, + ): + parent_dir_fmd = max(parent_dir_fmds, key=lambda fmd: len(fmd.file_id)) + parent_dir_fmd.file_size = UNDEFINED_SIZE + await db_file_meta_data.upsert(conn, parent_dir_fmd) async def delete_project_simcore_s3( self, user_id: UserID, project_id: ProjectID, node_id: NodeID | None = None @@ -738,7 +753,7 @@ async def search_owned_files( ), file_id_prefix=file_id_prefix, partial_file_id=None, - only_files=True, + is_directory=False, sha256_checksum=sha256_checksum, limit=limit, offset=offset, diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm_utils.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm_utils.py index e4a58549e31..eb5f2f1240c 100644 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm_utils.py +++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm_utils.py @@ -119,3 +119,8 @@ async def _get_fmd( directory_file_id_fmd = await _get_fmd(conn, directory_file_id) return directory_file_id if directory_file_id_fmd else None + + +def compute_file_id_prefix(file_id: str, levels: int): + components = file_id.strip("/").split("/") + return "/".join(components[:levels]) diff --git a/services/storage/tests/unit/test_db_file_meta_data.py b/services/storage/tests/unit/test_db_file_meta_data.py index da94c6a5eeb..c362fabe82f 100644 --- a/services/storage/tests/unit/test_db_file_meta_data.py +++ b/services/storage/tests/unit/test_db_file_meta_data.py @@ -31,7 +31,7 @@ def _check(func_smt, **kwargs): file_id_prefix=None, partial_file_id=None, sha256_checksum=None, - only_files=True, + is_directory=False, ) # WHERE file_meta_data.is_directory IS false ORDER BY file_meta_data.created_at ASC @@ -41,7 +41,7 @@ def _check(func_smt, **kwargs): file_id_prefix=None, partial_file_id=None, sha256_checksum=None, - only_files=True, + is_directory=False, ) # WHERE file_meta_data.user_id = '42' AND file_meta_data.is_directory IS false ORDER BY file_meta_data.created_at ASC @@ -53,7 +53,7 @@ def _check(func_smt, **kwargs): file_id_prefix=None, partial_file_id=None, sha256_checksum=None, - only_files=True, + is_directory=False, ) # WHERE (file_meta_data.user_id = '42' OR file_meta_data.project_id IN ('18d5'..., )) AND file_meta_data.is_directory IS false ORDER BY file_meta_data.created_at ASC @@ -65,7 +65,7 @@ def _check(func_smt, **kwargs): file_id_prefix=None, partial_file_id=None, sha256_checksum=None, - only_files=True, + is_directory=False, limit=10, offset=1, ) @@ -76,9 +76,9 @@ def _check(func_smt, **kwargs): _list_filter_with_partial_file_id_stmt, user_or_project_filter=UserOrProjectFilter(user_id=42, project_ids=[]), file_id_prefix=None, + is_directory=None, partial_file_id="{project_id}/", sha256_checksum=None, - only_files=False, ) # As used in SimcoreS3DataManager.search_owned_files @@ -88,7 +88,7 @@ def _check(func_smt, **kwargs): file_id_prefix="api/", partial_file_id=None, sha256_checksum=faker.sha256(), - only_files=True, + is_directory=False, limit=10, offset=0, ) diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index f9fc415d86a..40d4b72f15e 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -1345,6 +1345,7 @@ async def test_upload_file_is_directory_and_remove_content( client: TestClient, location_id: LocationID, user_id: UserID, + faker: Faker, ): FILE_SIZE_IN_DIR = TypeAdapter(ByteSize).validate_python("1Mib") DIR_NAME = "some-dir" @@ -1386,6 +1387,52 @@ async def test_upload_file_is_directory_and_remove_content( ) assert len(list_of_files) == SUBDIR_COUNT * FILE_COUNT + # DELETE NOT EXISTING + + assert client.app + + delete_url = ( + client.app.router["delete_file"] + .url_for( + location_id=f"{location_id}", + file_id=urllib.parse.quote( + "/".join(list_of_files[0].file_id.split("/")[:2]) + "/does_not_exist", + safe="", + ), + ) + .with_query(user_id=user_id) + ) + response = await client.delete(f"{delete_url}") + _, error = await assert_status(response, status.HTTP_204_NO_CONTENT) + assert error is None + + list_of_files: list[FileMetaDataGet] = await _list_files_legacy( + client, user_id, location_id, directory_file_upload + ) + + assert len(list_of_files) == SUBDIR_COUNT * FILE_COUNT + + # DELETE ONE FILE FROM THE DIRECTORY + + assert client.app + delete_url = ( + client.app.router["delete_file"] + .url_for( + location_id=f"{location_id}", + file_id=urllib.parse.quote(list_of_files[0].file_id, safe=""), + ) + .with_query(user_id=user_id) + ) + response = await client.delete(f"{delete_url}") + _, error = await assert_status(response, status.HTTP_204_NO_CONTENT) + assert error is None + + list_of_files: list[FileMetaDataGet] = await _list_files_legacy( + client, user_id, location_id, directory_file_upload + ) + + assert len(list_of_files) == SUBDIR_COUNT * FILE_COUNT - 1 + # DIRECTORY REMOVAL await delete_directory(directory_file_upload=directory_file_upload) diff --git a/services/storage/tests/unit/test_simcore_s3_dsm_utils.py b/services/storage/tests/unit/test_simcore_s3_dsm_utils.py new file mode 100644 index 00000000000..01869537c08 --- /dev/null +++ b/services/storage/tests/unit/test_simcore_s3_dsm_utils.py @@ -0,0 +1,21 @@ +import pytest +from simcore_service_storage.simcore_s3_dsm_utils import compute_file_id_prefix + + +@pytest.mark.parametrize( + "file_id, levels, expected", + [ + ( + "b21a3b80-d578-4b33-a224-e24ee2e4966a/42b9cc07-60f5-4d29-a063-176d1467901c/my/amazing/sub/folder/with/a/file.bin", + 3, + "b21a3b80-d578-4b33-a224-e24ee2e4966a/42b9cc07-60f5-4d29-a063-176d1467901c/my", + ), + ( + "api/42b9cc07-60f5-4d29-a063-176d1467901c/my/amazing/sub/folder/with/a/file.bin", + 3, + "api/42b9cc07-60f5-4d29-a063-176d1467901c/my", + ), + ], +) +def test_compute_file_id_prefix(file_id, levels, expected): + assert compute_file_id_prefix(file_id, levels) == expected diff --git a/services/web/server/requirements/_base.in b/services/web/server/requirements/_base.in index 308a1604cb3..caf883fc166 100644 --- a/services/web/server/requirements/_base.in +++ b/services/web/server/requirements/_base.in @@ -27,7 +27,6 @@ aiohttp aiohttp_jinja2 aiohttp_security aiohttp_session[secure] -aiohttp-swagger[performance] aiopg[sa] # db aiosmtplib # email asyncpg # db @@ -50,5 +49,6 @@ pydantic[email] # models python-magic # excel python-socketio # web-sockets redis +swagger-ui-py tenacity twilio diff --git a/services/web/server/requirements/_base.txt b/services/web/server/requirements/_base.txt index ada75776f9e..810e6e29fad 100644 --- a/services/web/server/requirements/_base.txt +++ b/services/web/server/requirements/_base.txt @@ -61,15 +61,12 @@ aiohttp==3.8.5 # aiohttp-jinja2 # aiohttp-security # aiohttp-session - # aiohttp-swagger aiohttp-jinja2==1.5 # via -r requirements/_base.in aiohttp-security==0.4.0 # via -r requirements/_base.in aiohttp-session==2.11.0 # via -r requirements/_base.in -aiohttp-swagger==1.0.16 - # via -r requirements/_base.in aiopg==1.4.0 # via # -r requirements/../../../../packages/service-library/requirements/_aiohttp.in @@ -269,7 +266,7 @@ jinja2==3.1.2 # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt # aiohttp-jinja2 - # aiohttp-swagger + # swagger-ui-py json2html==1.3.0 # via -r requirements/_base.in jsondiff==2.0.0 @@ -473,6 +470,7 @@ packaging==24.1 # -r requirements/../../../../packages/simcore-sdk/requirements/_base.in # -r requirements/_base.in # gunicorn + # swagger-ui-py pamqp==3.2.1 # via aiormq passlib==1.7.4 @@ -651,7 +649,7 @@ pyyaml==6.0.1 # -c requirements/../../../../requirements/constraints.txt # -r requirements/../../../../packages/service-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in - # aiohttp-swagger + # swagger-ui-py redis==5.0.4 # via # -c requirements/../../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -748,6 +746,8 @@ sqlalchemy==1.4.47 # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # aiopg # alembic +swagger-ui-py==23.9.23 + # via -r requirements/_base.in tenacity==8.5.0 # via # -r requirements/../../../../packages/service-library/requirements/_base.in @@ -783,37 +783,6 @@ typing-extensions==4.12.2 # pydantic # pydantic-core # typer -ujson==5.5.0 - # via - # -c requirements/../../../../packages/common-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../requirements/constraints.txt - # aiohttp-swagger urllib3==2.2.3 # via # -c requirements/../../../../packages/common-library/requirements/../../../requirements/constraints.txt diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index ce36e2e6e93..2012853da80 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -611,8 +611,9 @@ paths: get: tags: - groups + - users summary: Get All Group Users - description: Gets users in organization groups + description: Gets users in organization or primary groups operationId: get_all_group_users parameters: - name: gid @@ -633,6 +634,7 @@ paths: post: tags: - groups + - users summary: Add Group User description: Adds a user to an organization group using their username, user ID, or email (subject to privacy settings) @@ -659,6 +661,7 @@ paths: get: tags: - groups + - users summary: Get Group User description: Gets specific user in an organization group operationId: get_group_user @@ -689,6 +692,7 @@ paths: patch: tags: - groups + - users summary: Update Group User description: Updates user (access-rights) to an organization group operationId: update_group_user @@ -725,6 +729,7 @@ paths: delete: tags: - groups + - users summary: Delete Group User description: Removes a user from an organization group operationId: delete_group_user @@ -1133,7 +1138,7 @@ paths: /v0/me: get: tags: - - user + - users summary: Get My Profile operationId: get_my_profile responses: @@ -1145,7 +1150,7 @@ paths: $ref: '#/components/schemas/Envelope_MyProfileGet_' put: tags: - - user + - users summary: Replace My Profile description: Use PATCH instead operationId: replace_my_profile @@ -1161,7 +1166,7 @@ paths: deprecated: true patch: tags: - - user + - users summary: Update My Profile operationId: update_my_profile requestBody: @@ -1176,7 +1181,7 @@ paths: /v0/me/preferences/{preference_id}: patch: tags: - - user + - users summary: Set Frontend Preference operationId: set_frontend_preference parameters: @@ -1198,7 +1203,7 @@ paths: /v0/me/tokens: get: tags: - - user + - users summary: List Tokens operationId: list_tokens responses: @@ -1207,17 +1212,17 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Envelope_list_ThirdPartyToken__' + $ref: '#/components/schemas/Envelope_list_MyTokenGet__' post: tags: - - user + - users summary: Create Token operationId: create_token requestBody: content: application/json: schema: - $ref: '#/components/schemas/TokenCreate' + $ref: '#/components/schemas/MyTokenCreate' required: true responses: '201': @@ -1225,11 +1230,11 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Envelope_ThirdPartyToken_' + $ref: '#/components/schemas/Envelope_MyTokenGet_' /v0/me/tokens/{service}: get: tags: - - user + - users summary: Get Token operationId: get_token parameters: @@ -1245,10 +1250,10 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Envelope_ThirdPartyToken_' + $ref: '#/components/schemas/Envelope_MyTokenGet_' delete: tags: - - user + - users summary: Delete Token operationId: delete_token parameters: @@ -1264,7 +1269,7 @@ paths: /v0/me/notifications: get: tags: - - user + - users summary: List User Notifications operationId: list_user_notifications responses: @@ -1276,7 +1281,7 @@ paths: $ref: '#/components/schemas/Envelope_list_UserNotification__' post: tags: - - user + - users summary: Create User Notification operationId: create_user_notification requestBody: @@ -1291,7 +1296,7 @@ paths: /v0/me/notifications/{notification_id}: patch: tags: - - user + - users summary: Mark Notification As Read operationId: mark_notification_as_read parameters: @@ -1313,7 +1318,7 @@ paths: /v0/me/permissions: get: tags: - - user + - users summary: List User Permissions operationId: list_user_permissions responses: @@ -1322,14 +1327,35 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Envelope_list_PermissionGet__' + $ref: '#/components/schemas/Envelope_list_MyPermissionGet__' /v0/users:search: - get: + post: tags: - - user - - po + - users summary: Search Users + description: Search among users who are publicly visible to the caller (i.e., + me) based on their privacy settings. operationId: search_users + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/UsersSearch' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_list_UserGet__' + /v0/admin/users:search: + get: + tags: + - users + - admin + summary: Search Users For Admin + operationId: search_users_for_admin parameters: - name: email in: query @@ -1345,19 +1371,19 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Envelope_list_UserProfile__' - /v0/users:pre-register: + $ref: '#/components/schemas/Envelope_list_UserForAdminGet__' + /v0/admin/users:pre-register: post: tags: - - user - - po - summary: Pre Register User - operationId: pre_register_user + - users + - admin + summary: Pre Register User For Admin + operationId: pre_register_user_for_admin requestBody: content: application/json: schema: - $ref: '#/components/schemas/PreUserProfile' + $ref: '#/components/schemas/PreRegisteredUserGet' required: true responses: '200': @@ -1365,7 +1391,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Envelope_UserProfile_' + $ref: '#/components/schemas/Envelope_UserForAdminGet_' /v0/wallets: get: tags: @@ -3252,6 +3278,121 @@ paths: schema: $ref: '#/components/schemas/EnvelopedError' description: Bad Request + /v0/wallets/{wallet_id}/licensed-items-checkouts: + get: + tags: + - licenses + - wallets + summary: List Licensed Item Checkouts For Wallet + operationId: list_licensed_item_checkouts_for_wallet + parameters: + - name: wallet_id + in: path + required: true + schema: + type: integer + exclusiveMinimum: true + title: Wallet Id + minimum: 0 + - name: order_by + in: query + required: false + schema: + type: string + contentMediaType: application/json + contentSchema: {} + default: '{"field":"started_at","direction":"desc"}' + title: Order By + - name: limit + in: query + required: false + schema: + type: integer + default: 20 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Page_LicensedItemPurchaseGet_' + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '402': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + /v0/licensed-items-checkouts/{licensed_item_checkout_id}: + get: + tags: + - licenses + summary: Get Licensed Item Checkout + operationId: get_licensed_item_checkout + parameters: + - name: licensed_item_checkout_id + in: path + required: true + schema: + type: string + format: uuid + title: Licensed Item Checkout Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_LicensedItemPurchaseGet_' + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '402': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request /v0/projects/{project_uuid}/checkpoint/{ref_id}/iterations: get: tags: @@ -8185,6 +8326,19 @@ components: title: Error type: object title: Envelope[MyProfileGet] + Envelope_MyTokenGet_: + properties: + data: + anyOf: + - $ref: '#/components/schemas/MyTokenGet' + - type: 'null' + error: + anyOf: + - {} + - type: 'null' + title: Error + type: object + title: Envelope[MyTokenGet] Envelope_NodeCreated_: properties: data: @@ -8484,19 +8638,6 @@ components: title: Error type: object title: Envelope[TaskStatus] - Envelope_ThirdPartyToken_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/ThirdPartyToken' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[ThirdPartyToken] Envelope_Union_EmailTestFailed__EmailTestPassed__: properties: data: @@ -8556,11 +8697,11 @@ components: title: Error type: object title: Envelope[Union[WalletGet, NoneType]] - Envelope_UserProfile_: + Envelope_UserForAdminGet_: properties: data: anyOf: - - $ref: '#/components/schemas/UserProfile' + - $ref: '#/components/schemas/UserForAdminGet' - type: 'null' error: anyOf: @@ -8568,7 +8709,7 @@ components: - type: 'null' title: Error type: object - title: Envelope[UserProfile] + title: Envelope[UserForAdminGet] Envelope_WalletGetWithAvailableCredits_: properties: data: @@ -8930,12 +9071,12 @@ components: title: Error type: object title: Envelope[list[LicensedItemGet]] - Envelope_list_OsparcCreditsAggregatedByServiceGet__: + Envelope_list_MyPermissionGet__: properties: data: anyOf: - items: - $ref: '#/components/schemas/OsparcCreditsAggregatedByServiceGet' + $ref: '#/components/schemas/MyPermissionGet' type: array - type: 'null' title: Data @@ -8945,13 +9086,13 @@ components: - type: 'null' title: Error type: object - title: Envelope[list[OsparcCreditsAggregatedByServiceGet]] - Envelope_list_PaymentMethodGet__: + title: Envelope[list[MyPermissionGet]] + Envelope_list_MyTokenGet__: properties: data: anyOf: - items: - $ref: '#/components/schemas/PaymentMethodGet' + $ref: '#/components/schemas/MyTokenGet' type: array - type: 'null' title: Data @@ -8961,13 +9102,29 @@ components: - type: 'null' title: Error type: object - title: Envelope[list[PaymentMethodGet]] - Envelope_list_PermissionGet__: + title: Envelope[list[MyTokenGet]] + Envelope_list_OsparcCreditsAggregatedByServiceGet__: + properties: + data: + anyOf: + - items: + $ref: '#/components/schemas/OsparcCreditsAggregatedByServiceGet' + type: array + - type: 'null' + title: Data + error: + anyOf: + - {} + - type: 'null' + title: Error + type: object + title: Envelope[list[OsparcCreditsAggregatedByServiceGet]] + Envelope_list_PaymentMethodGet__: properties: data: anyOf: - items: - $ref: '#/components/schemas/PermissionGet' + $ref: '#/components/schemas/PaymentMethodGet' type: array - type: 'null' title: Data @@ -8977,7 +9134,7 @@ components: - type: 'null' title: Error type: object - title: Envelope[list[PermissionGet]] + title: Envelope[list[PaymentMethodGet]] Envelope_list_PricingPlanAdminGet__: properties: data: @@ -9186,12 +9343,12 @@ components: title: Error type: object title: Envelope[list[TaskGet]] - Envelope_list_ThirdPartyToken__: + Envelope_list_UserForAdminGet__: properties: data: anyOf: - items: - $ref: '#/components/schemas/ThirdPartyToken' + $ref: '#/components/schemas/UserForAdminGet' type: array - type: 'null' title: Data @@ -9201,13 +9358,13 @@ components: - type: 'null' title: Error type: object - title: Envelope[list[ThirdPartyToken]] - Envelope_list_UserNotification__: + title: Envelope[list[UserForAdminGet]] + Envelope_list_UserGet__: properties: data: anyOf: - items: - $ref: '#/components/schemas/UserNotification' + $ref: '#/components/schemas/UserGet' type: array - type: 'null' title: Data @@ -9217,13 +9374,13 @@ components: - type: 'null' title: Error type: object - title: Envelope[list[UserNotification]] - Envelope_list_UserProfile__: + title: Envelope[list[UserGet]] + Envelope_list_UserNotification__: properties: data: anyOf: - items: - $ref: '#/components/schemas/UserProfile' + $ref: '#/components/schemas/UserNotification' type: array - type: 'null' title: Data @@ -9233,7 +9390,7 @@ components: - type: 'null' title: Error type: object - title: Envelope[list[UserProfile]] + title: Envelope[list[UserNotification]] Envelope_list_Viewer__: properties: data: @@ -9952,6 +10109,8 @@ components: required: - is_inactive title: GetProjectInactivityResponse + example: + is_inactive: 'false' GetWalletAutoRecharge: properties: enabled: @@ -10164,11 +10323,14 @@ components: description: the user gravatar id hash deprecated: true accessRights: - $ref: '#/components/schemas/GroupAccessRights' + anyOf: + - $ref: '#/components/schemas/GroupAccessRights' + - type: 'null' + description: If group is standard, these are these are the access rights + of the user to it.None if primary group. type: object required: - userName - - accessRights title: GroupUserGet example: accessRights: @@ -10342,6 +10504,11 @@ components: name: type: string title: Name + licenseKey: + anyOf: + - type: string + - type: 'null' + title: Licensekey licensedResourceType: $ref: '#/components/schemas/LicensedResourceType' pricingPlanId: @@ -10361,6 +10528,7 @@ components: required: - licensedItemId - name + - licenseKey - licensedResourceType - pricingPlanId - createdAt @@ -10662,6 +10830,19 @@ components: description: Some foundation gid: '16' label: Blue Fundation + MyPermissionGet: + properties: + name: + type: string + title: Name + allowed: + type: boolean + title: Allowed + type: object + required: + - name + - allowed + title: MyPermissionGet MyProfileGet: properties: id: @@ -10792,6 +10973,56 @@ components: title: Hideemail type: object title: MyProfilePrivacyPatch + MyTokenCreate: + properties: + service: + type: string + maxLength: 100 + minLength: 1 + title: Service + description: uniquely identifies the service where this token is used + token_key: + type: string + maxLength: 100 + minLength: 1 + title: Token Key + token_secret: + type: string + maxLength: 100 + minLength: 1 + title: Token Secret + type: object + required: + - service + - token_key + - token_secret + title: MyTokenCreate + MyTokenGet: + properties: + service: + type: string + maxLength: 100 + minLength: 1 + title: Service + token_key: + type: string + maxLength: 100 + minLength: 1 + title: Token Key + token_secret: + anyOf: + - type: string + maxLength: 100 + minLength: 1 + - type: 'null' + title: Token Secret + description: Will be removed + deprecated: true + type: object + required: + - service + - token_key + title: MyTokenGet Node-Input: properties: key: @@ -11761,20 +11992,7 @@ components: - completedAt - completedStatus title: PaymentTransaction - PermissionGet: - properties: - name: - type: string - title: Name - allowed: - type: boolean - title: Allowed - type: object - required: - - name - - allowed - title: PermissionGet - PhoneConfirmationBody: + PhoneConfirmationBody: properties: email: type: string @@ -11869,7 +12087,7 @@ components: - x - y title: Position - PreUserProfile: + PreRegisteredUserGet: properties: firstName: type: string @@ -11912,8 +12130,7 @@ components: extras: type: object title: Extras - description: Keeps extra information provided in the request form. At most - MAX_NUM_EXTRAS fields + description: Keeps extra information provided in the request form. type: object required: - firstName @@ -11924,7 +12141,7 @@ components: - city - postalCode - country - title: PreUserProfile + title: PreRegisteredUserGet Preference: properties: defaultValue: @@ -14167,58 +14384,6 @@ components: - url - thumbnail title: ThirdPartyInfoDict - ThirdPartyToken: - properties: - service: - type: string - title: Service - description: uniquely identifies the service where this token is used - token_key: - type: string - format: uuid - title: Token Key - description: basic token key - token_secret: - anyOf: - - type: string - format: uuid - - type: 'null' - title: Token Secret - type: object - required: - - service - - token_key - title: ThirdPartyToken - description: Tokens used to access third-party services connected to osparc - (e.g. pennsieve, scicrunch, etc) - example: - service: github-api-v1 - token_key: 5f21abf5-c596-47b7-bfd1-c0e436ef1107 - TokenCreate: - properties: - service: - type: string - title: Service - description: uniquely identifies the service where this token is used - token_key: - type: string - format: uuid - title: Token Key - description: basic token key - token_secret: - anyOf: - - type: string - format: uuid - - type: 'null' - title: Token Secret - type: object - required: - - service - - token_key - title: TokenCreate - example: - service: github-api-v1 - token_key: 5f21abf5-c596-47b7-bfd1-c0e436ef1107 UnitExtraInfo-Input: properties: CPU: @@ -14351,6 +14516,137 @@ components: - number - e_tag title: UploadedPart + UserForAdminGet: + properties: + firstName: + anyOf: + - type: string + - type: 'null' + title: Firstname + lastName: + anyOf: + - type: string + - type: 'null' + title: Lastname + email: + type: string + format: email + title: Email + institution: + anyOf: + - type: string + - type: 'null' + title: Institution + phone: + anyOf: + - type: string + - type: 'null' + title: Phone + address: + anyOf: + - type: string + - type: 'null' + title: Address + city: + anyOf: + - type: string + - type: 'null' + title: City + state: + anyOf: + - type: string + - type: 'null' + title: State + description: State, province, canton, ... + postalCode: + anyOf: + - type: string + - type: 'null' + title: Postalcode + country: + anyOf: + - type: string + - type: 'null' + title: Country + extras: + type: object + title: Extras + description: Keeps extra information provided in the request form + invitedBy: + anyOf: + - type: string + - type: 'null' + title: Invitedby + registered: + type: boolean + title: Registered + status: + anyOf: + - $ref: '#/components/schemas/UserStatus' + - type: 'null' + products: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Products + description: List of products this users is included or None if fields is + unset + type: object + required: + - firstName + - lastName + - email + - institution + - phone + - address + - city + - state + - postalCode + - country + - registered + - status + title: UserForAdminGet + UserGet: + properties: + userId: + type: integer + exclusiveMinimum: true + title: Userid + minimum: 0 + groupId: + type: integer + exclusiveMinimum: true + title: Groupid + minimum: 0 + userName: + type: string + maxLength: 100 + minLength: 1 + title: Username + firstName: + anyOf: + - type: string + - type: 'null' + title: Firstname + lastName: + anyOf: + - type: string + - type: 'null' + title: Lastname + email: + anyOf: + - type: string + format: email + - type: 'null' + title: Email + type: object + required: + - userId + - groupId + - userName + title: UserGet UserNotification: properties: user_id: @@ -14472,98 +14768,6 @@ components: required: - read title: UserNotificationPatch - UserProfile: - properties: - firstName: - anyOf: - - type: string - - type: 'null' - title: Firstname - lastName: - anyOf: - - type: string - - type: 'null' - title: Lastname - email: - type: string - format: email - title: Email - institution: - anyOf: - - type: string - - type: 'null' - title: Institution - phone: - anyOf: - - type: string - - type: 'null' - title: Phone - address: - anyOf: - - type: string - - type: 'null' - title: Address - city: - anyOf: - - type: string - - type: 'null' - title: City - state: - anyOf: - - type: string - - type: 'null' - title: State - description: State, province, canton, ... - postalCode: - anyOf: - - type: string - - type: 'null' - title: Postalcode - country: - anyOf: - - type: string - - type: 'null' - title: Country - extras: - type: object - title: Extras - description: Keeps extra information provided in the request form - invitedBy: - anyOf: - - type: string - - type: 'null' - title: Invitedby - registered: - type: boolean - title: Registered - status: - anyOf: - - $ref: '#/components/schemas/UserStatus' - - type: 'null' - products: - anyOf: - - items: - type: string - type: array - - type: 'null' - title: Products - description: List of products this users is included or None if fields is - unset - type: object - required: - - firstName - - lastName - - email - - institution - - phone - - address - - city - - state - - postalCode - - country - - registered - - status - title: UserProfile UserStatus: type: string enum: @@ -14573,6 +14777,25 @@ components: - BANNED - DELETED title: UserStatus + UsersSearch: + properties: + match: + type: string + maxLength: 80 + minLength: 1 + title: Match + description: Search string to match with usernames and public profiles (e.g. + emails, first/last name) + limit: + type: integer + maximum: 50 + minimum: 1 + title: Limit + default: 10 + type: object + required: + - match + title: UsersSearch Viewer: properties: title: diff --git a/services/web/server/src/simcore_service_webserver/application_settings_utils.py b/services/web/server/src/simcore_service_webserver/application_settings_utils.py index 162a927e0ad..d5180c07192 100644 --- a/services/web/server/src/simcore_service_webserver/application_settings_utils.py +++ b/services/web/server/src/simcore_service_webserver/application_settings_utils.py @@ -7,7 +7,7 @@ import functools import logging -from typing import Any +from typing import Any, TypeAlias from aiohttp import web from common_library.pydantic_fields_extension import get_type, is_nullable @@ -19,8 +19,10 @@ _logger = logging.getLogger(__name__) +AppConfigDict: TypeAlias = dict[str, Any] -def convert_to_app_config(app_settings: ApplicationSettings) -> dict[str, Any]: + +def convert_to_app_config(app_settings: ApplicationSettings) -> AppConfigDict: """Maps current ApplicationSettings object into former trafaret-based config""" return { @@ -186,8 +188,8 @@ def convert_to_app_config(app_settings: ApplicationSettings) -> dict[str, Any]: def convert_to_environ_vars( # noqa: C901, PLR0915, PLR0912 - cfg: dict[str, Any] -) -> dict[str, Any]: + cfg: AppConfigDict, +) -> AppConfigDict: """Creates envs dict out of config dict NOTE: ONLY used to support legacy introduced by traferet vs settings_library. diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_core_dynamic_services.py b/services/web/server/src/simcore_service_webserver/director_v2/_core_dynamic_services.py deleted file mode 100644 index bd4c03f31fe..00000000000 --- a/services/web/server/src/simcore_service_webserver/director_v2/_core_dynamic_services.py +++ /dev/null @@ -1,113 +0,0 @@ -""" Operations on dynamic-services - -- This interface HIDES request/responses/exceptions to the director-v2 API service - -""" - -import logging - -from aiohttp import web -from models_library.projects import ProjectID -from models_library.services import ServicePortKey -from pydantic import NonNegativeInt -from servicelib.logging_utils import log_decorator -from yarl import URL - -from ._core_base import DataType, request_director_v2 -from .exceptions import DirectorServiceError -from .settings import DirectorV2Settings, get_plugin_settings - -_log = logging.getLogger(__name__) - - -# NOTE: ANE https://github.com/ITISFoundation/osparc-simcore/issues/3191 -@log_decorator(logger=_log) -async def retrieve( - app: web.Application, service_uuid: str, port_keys: list[ServicePortKey] -) -> DataType: - """Pulls data from connections to the dynamic service inputs""" - settings: DirectorV2Settings = get_plugin_settings(app) - result = await request_director_v2( - app, - "POST", - url=settings.base_url / f"dynamic_services/{service_uuid}:retrieve", - data={"port_keys": port_keys}, - timeout=settings.get_service_retrieve_timeout(), - ) - assert isinstance(result, dict) # nosec - return result - - -# NOTE: ANE https://github.com/ITISFoundation/osparc-simcore/issues/3191 -# notice that this function is identical to retrieve except that it does NOT raises -@log_decorator(logger=_log) -async def request_retrieve_dyn_service( - app: web.Application, service_uuid: str, port_keys: list[str] -) -> None: - settings: DirectorV2Settings = get_plugin_settings(app) - body = {"port_keys": port_keys} - - try: - await request_director_v2( - app, - "POST", - url=settings.base_url / f"dynamic_services/{service_uuid}:retrieve", - data=body, - timeout=settings.get_service_retrieve_timeout(), - ) - except DirectorServiceError as exc: - _log.warning( - "Unable to call :retrieve endpoint on service %s, keys: [%s]: error: [%s:%s]", - service_uuid, - port_keys, - exc.status, - exc.reason, - ) - - -@log_decorator(logger=_log) -async def restart_dynamic_service(app: web.Application, node_uuid: str) -> None: - """User restart the dynamic dynamic service started in the node_uuid - - NOTE that this operation will NOT restart all sidecar services - (``simcore-service-dynamic-sidecar`` or ``reverse-proxy caddy`` services) but - ONLY those containers in the compose-spec (i.e. the ones exposed to the user) - """ - settings: DirectorV2Settings = get_plugin_settings(app) - await request_director_v2( - app, - "POST", - url=settings.base_url / f"dynamic_services/{node_uuid}:restart", - expected_status=web.HTTPOk, - timeout=settings.DIRECTOR_V2_RESTART_DYNAMIC_SERVICE_TIMEOUT, - ) - - -@log_decorator(logger=_log) -async def update_dynamic_service_networks_in_project( - app: web.Application, project_id: ProjectID -) -> None: - settings: DirectorV2Settings = get_plugin_settings(app) - backend_url = ( - URL(settings.base_url) / f"dynamic_services/projects/{project_id}/-/networks" - ) - await request_director_v2( - app, "PATCH", backend_url, expected_status=web.HTTPNoContent - ) - - -@log_decorator(logger=_log) -async def get_project_inactivity( - app: web.Application, - project_id: ProjectID, - max_inactivity_seconds: NonNegativeInt, -) -> DataType: - settings: DirectorV2Settings = get_plugin_settings(app) - backend_url = ( - URL(settings.base_url) / f"dynamic_services/projects/{project_id}/inactivity" - ).update_query(max_inactivity_seconds=max_inactivity_seconds) - result = await request_director_v2( - app, "GET", backend_url, expected_status=web.HTTPOk - ) - assert isinstance(result, dict) # nosec - return result diff --git a/services/web/server/src/simcore_service_webserver/director_v2/api.py b/services/web/server/src/simcore_service_webserver/director_v2/api.py index f56de16b543..2ecbb1446fd 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/api.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/api.py @@ -16,13 +16,6 @@ is_pipeline_running, stop_pipeline, ) -from ._core_dynamic_services import ( - get_project_inactivity, - request_retrieve_dyn_service, - restart_dynamic_service, - retrieve, - update_dynamic_service_networks_in_project, -) from ._core_utils import is_healthy from .exceptions import DirectorServiceError @@ -34,15 +27,10 @@ "DirectorServiceError", "get_batch_tasks_outputs", "get_computation_task", - "get_project_inactivity", "get_project_run_policy", "is_healthy", "is_pipeline_running", - "request_retrieve_dyn_service", - "restart_dynamic_service", - "retrieve", "set_project_run_policy", "stop_pipeline", - "update_dynamic_service_networks_in_project", ) # nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/director_v2/settings.py b/services/web/server/src/simcore_service_webserver/director_v2/settings.py index 21cb368ff50..31fc096a5dd 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/settings.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/settings.py @@ -33,14 +33,6 @@ def base_url(self) -> URL: # - Mostly in floats (aiohttp.Client/) but sometimes in ints # - Typically in seconds but occasionally in ms - DIRECTOR_V2_RESTART_DYNAMIC_SERVICE_TIMEOUT: PositiveInt = Field( - 1 * _MINUTE, - description="timeout of containers restart", - validation_alias=AliasChoices( - "DIRECTOR_V2_RESTART_DYNAMIC_SERVICE_TIMEOUT", - ), - ) - DIRECTOR_V2_STORAGE_SERVICE_UPLOAD_DOWNLOAD_TIMEOUT: PositiveInt = Field( _HOUR, description=( diff --git a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py index ef8f2b1f703..5773052010b 100644 --- a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py +++ b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py @@ -3,7 +3,11 @@ from functools import partial from aiohttp import web -from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_directorv2.dynamic_services import ( + DynamicServiceGet, + GetProjectInactivityResponse, + RetrieveDataOutEnveloped, +) from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( DynamicServiceStart, DynamicServiceStop, @@ -18,7 +22,9 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.rabbitmq_messages import ProgressRabbitMessageProject, ProgressType +from models_library.services import ServicePortKey from models_library.users import UserID +from pydantic import NonNegativeInt from pydantic.types import PositiveInt from servicelib.progress_bar import ProgressBarData from servicelib.rabbitmq import RabbitMQClient, RPCServerError @@ -148,3 +154,55 @@ async def stop_dynamic_services_in_project( ] await logged_gather(*services_to_stop) + + +async def get_project_inactivity( + app: web.Application, + *, + project_id: ProjectID, + max_inactivity_seconds: NonNegativeInt, +) -> GetProjectInactivityResponse: + return await services.get_project_inactivity( + get_rabbitmq_rpc_client(app), + project_id=project_id, + max_inactivity_seconds=max_inactivity_seconds, + ) + + +async def restart_user_services(app: web.Application, *, node_id: NodeID) -> None: + """Restarts the user service(s) started by the the node_uuid's sidecar + + NOTE: this operation will NOT restart + sidecar services (``dy-sidecar`` or ``dy-proxy`` services), + but ONLY user services (the ones defined by the compose spec). + """ + settings: DynamicSchedulerSettings = get_plugin_settings(app) + await services.restart_user_services( + get_rabbitmq_rpc_client(app), + node_id=node_id, + timeout_s=int( + settings.DYNAMIC_SCHEDULER_RESTART_USER_SERVICES_TIMEOUT.total_seconds() + ), + ) + + +async def retrieve_inputs( + app: web.Application, node_id: NodeID, port_keys: list[ServicePortKey] +) -> RetrieveDataOutEnveloped: + settings: DynamicSchedulerSettings = get_plugin_settings(app) + return await services.retrieve_inputs( + get_rabbitmq_rpc_client(app), + node_id=node_id, + port_keys=port_keys, + timeout_s=int( + settings.DYNAMIC_SCHEDULER_SERVICE_UPLOAD_DOWNLOAD_TIMEOUT.total_seconds() + ), + ) + + +async def update_projects_networks( + app: web.Application, *, project_id: ProjectID +) -> None: + await services.update_projects_networks( + get_rabbitmq_rpc_client(app), project_id=project_id + ) diff --git a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/settings.py b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/settings.py index 91dac1317b6..5f33995a89e 100644 --- a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/settings.py +++ b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/settings.py @@ -26,6 +26,20 @@ class DynamicSchedulerSettings(BaseCustomSettings, MixinServiceSettings): ), ) + DYNAMIC_SCHEDULER_RESTART_USER_SERVICES_TIMEOUT: datetime.timedelta = Field( + datetime.timedelta(minutes=1), description="timeout for user services restart" + ) + + DYNAMIC_SCHEDULER_SERVICE_UPLOAD_DOWNLOAD_TIMEOUT: datetime.timedelta = Field( + datetime.timedelta(hours=1), + description=( + "When dynamic services upload and download data from storage, " + "sometimes very big payloads are involved. In order to handle " + "such payloads it is required to have long timeouts which " + "allow the service to finish the operation." + ), + ) + def get_plugin_settings(app: web.Application) -> DynamicSchedulerSettings: settings = app[APP_SETTINGS_KEY].WEBSERVER_DYNAMIC_SCHEDULER diff --git a/services/web/server/src/simcore_service_webserver/exporter/_handlers.py b/services/web/server/src/simcore_service_webserver/exporter/_handlers.py index cdb075638bd..97749637f54 100644 --- a/services/web/server/src/simcore_service_webserver/exporter/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/exporter/_handlers.py @@ -49,7 +49,7 @@ async def export_project(request: web.Request): project_uuid, ProjectStatus.EXPORTING, user_id, - await get_user_fullname(request.app, user_id), + await get_user_fullname(request.app, user_id=user_id), ): await retrieve_and_notify_project_locked_state( user_id, project_uuid, request.app diff --git a/services/web/server/src/simcore_service_webserver/folders/_exceptions_handlers.py b/services/web/server/src/simcore_service_webserver/folders/_exceptions_handlers.py index 5d98db3647d..8b571562c8d 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_exceptions_handlers.py +++ b/services/web/server/src/simcore_service_webserver/folders/_exceptions_handlers.py @@ -8,7 +8,11 @@ exception_handling_decorator, to_exceptions_handlers_map, ) -from ..projects.exceptions import ProjectRunningConflictError, ProjectStoppingError +from ..projects.exceptions import ( + ProjectInvalidRightsError, + ProjectRunningConflictError, + ProjectStoppingError, +) from ..workspaces.errors import ( WorkspaceAccessForbiddenError, WorkspaceFolderInconsistencyError, @@ -53,6 +57,10 @@ status.HTTP_409_CONFLICT, "Invalid folder value set: {reason}", ), + ProjectInvalidRightsError: HttpErrorInfo( + status.HTTP_403_FORBIDDEN, + "Access Denied: You do not have permission to move the project with UUID: {project_uuid}. Tip: Copy and paste the UUID into the search bar to locate the project.", + ), # Trashing ProjectRunningConflictError: HttpErrorInfo( status.HTTP_409_CONFLICT, diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py index 38ad97f44ed..32ff9e4d3a5 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py @@ -10,7 +10,7 @@ import sqlalchemy as sa from aiohttp import web -from common_library.unset import UnSet, as_dict_exclude_unset +from common_library.exclude import UnSet, as_dict_exclude_unset from models_library.folders import ( FolderDB, FolderID, diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_guests.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_guests.py index cf92d38292c..8649d2e2451 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_guests.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_guests.py @@ -5,6 +5,7 @@ import asyncpg.exceptions from aiohttp import web from models_library.projects import ProjectID +from models_library.users import UserID, UserNameID from redis.asyncio import Redis from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE from simcore_postgres_database.errors import DatabaseError @@ -19,7 +20,7 @@ from ..users.api import ( delete_user_without_projects, get_guest_user_ids_and_names, - get_user, + get_user_primary_group_id, get_user_role, ) from ..users.exceptions import UserNotFoundError @@ -44,7 +45,9 @@ async def _delete_all_projects_for_user(app: web.Application, user_id: int) -> N """ # recover user's primary_gid try: - project_owner: dict = await get_user(app=app, user_id=user_id) + project_owner_primary_gid = await get_user_primary_group_id( + app=app, user_id=user_id + ) except exceptions.UserNotFoundError: _logger.warning( "Could not recover user data for user '%s', stopping removal of projects!", @@ -52,8 +55,6 @@ async def _delete_all_projects_for_user(app: web.Application, user_id: int) -> N ) return - user_primary_gid = int(project_owner["primary_gid"]) - # fetch all projects for the user user_project_uuids = await ProjectDBAPI.get_from_app_context( app @@ -62,7 +63,7 @@ async def _delete_all_projects_for_user(app: web.Application, user_id: int) -> N _logger.info( "Removing or transfering projects of user with %s, %s: %s", f"{user_id=}", - f"{project_owner=}", + f"{project_owner_primary_gid=}", f"{user_project_uuids=}", ) @@ -90,7 +91,7 @@ async def _delete_all_projects_for_user(app: web.Application, user_id: int) -> N app=app, project_uuid=project_uuid, user_id=user_id, - user_primary_gid=user_primary_gid, + user_primary_gid=project_owner_primary_gid, project=project, ) @@ -129,7 +130,7 @@ async def _delete_all_projects_for_user(app: web.Application, user_id: int) -> N await replace_current_owner( app=app, project_uuid=project_uuid, - user_primary_gid=user_primary_gid, + user_primary_gid=project_owner_primary_gid, new_project_owner_gid=new_project_owner_gid, project=project, ) @@ -145,7 +146,7 @@ async def remove_guest_user_with_all_its_resources( """Removes a GUEST user with all its associated projects and S3/MinIO files""" try: - user_role: UserRole = await get_user_role(app, user_id) + user_role: UserRole = await get_user_role(app, user_id=user_id) if user_role > UserRole.GUEST: # NOTE: This acts as a protection barrier to avoid removing resources to more # priviledge users @@ -201,7 +202,9 @@ async def remove_users_manually_marked_as_guests( } # Prevent creating this list if a guest user - guest_users: list[tuple[int, str]] = await get_guest_user_ids_and_names(app) + guest_users: list[tuple[UserID, UserNameID]] = await get_guest_user_ids_and_names( + app + ) for guest_user_id, guest_user_name in guest_users: # Prevents removing GUEST users that were automatically (NOT manually) created diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py index d369de3ed2f..0920aecd168 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py @@ -37,7 +37,7 @@ async def _remove_service( save_service_state = False else: try: - if await get_user_role(app, service.user_id) <= UserRole.GUEST: + if await get_user_role(app, user_id=service.user_id) <= UserRole.GUEST: save_service_state = False else: save_service_state = await has_user_project_access_rights( diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_utils.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_utils.py index a2108766786..6a85dc83539 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_utils.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_utils.py @@ -31,7 +31,7 @@ async def _fetch_new_project_owner_from_groups( # go through user_to_groups table and fetch all uid for matching gid for group_gid in standard_groups: # remove the current owner from the bunch - target_group_users = await get_users_in_group(app=app, gid=group_gid) - { + target_group_users = await get_users_in_group(app=app, gid=int(group_gid)) - { user_id } _logger.info("Found group users '%s'", target_group_users) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_users.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_users.py index 48d781aee8d..e99f9c4a225 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_users.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_users.py @@ -8,14 +8,12 @@ from collections.abc import AsyncIterator, Callable from aiohttp import web -from aiopg.sa.engine import Engine from models_library.users import UserID from servicelib.logging_utils import get_log_record_extra, log_context from tenacity import retry from tenacity.before_sleep import before_sleep_log from tenacity.wait import wait_exponential -from ..db.plugin import get_database_engine from ..login.utils import notify_user_logout from ..security.api import clean_auth_policy_cache from ..users.api import update_expired_users @@ -60,10 +58,8 @@ async def _update_expired_users(app: web.Application): """ It is resilient, i.e. if update goes wrong, it waits a bit and retries """ - engine: Engine = get_database_engine(app) - assert engine # nosec - if updated := await update_expired_users(engine): + if updated := await update_expired_users(app): # expired users might be cached in the auth. If so, any request # with this user-id will get thru producing unexpected side-effects await clean_auth_policy_cache(app) diff --git a/services/web/server/src/simcore_service_webserver/groups/_classifiers_handlers.py b/services/web/server/src/simcore_service_webserver/groups/_classifiers_rest.py similarity index 97% rename from services/web/server/src/simcore_service_webserver/groups/_classifiers_handlers.py rename to services/web/server/src/simcore_service_webserver/groups/_classifiers_rest.py index 40ce8c41a34..e9113e5b666 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_classifiers_handlers.py +++ b/services/web/server/src/simcore_service_webserver/groups/_classifiers_rest.py @@ -14,7 +14,7 @@ from ..scicrunch.service_client import SciCrunch from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response -from ._classifiers_api import GroupClassifierRepository, build_rrids_tree_view +from ._classifiers_service import GroupClassifierRepository, build_rrids_tree_view from ._common.exceptions_handlers import handle_plugin_requests_exceptions from ._common.schemas import GroupsClassifiersQuery, GroupsPathParams diff --git a/services/web/server/src/simcore_service_webserver/groups/_classifiers_api.py b/services/web/server/src/simcore_service_webserver/groups/_classifiers_service.py similarity index 100% rename from services/web/server/src/simcore_service_webserver/groups/_classifiers_api.py rename to services/web/server/src/simcore_service_webserver/groups/_classifiers_service.py diff --git a/services/web/server/src/simcore_service_webserver/groups/_groups_db.py b/services/web/server/src/simcore_service_webserver/groups/_groups_repository.py similarity index 78% rename from services/web/server/src/simcore_service_webserver/groups/_groups_db.py rename to services/web/server/src/simcore_service_webserver/groups/_groups_repository.py index aedc78676d3..0d8b24b83fe 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_groups_db.py +++ b/services/web/server/src/simcore_service_webserver/groups/_groups_repository.py @@ -1,9 +1,11 @@ import re from copy import deepcopy +from typing import Literal import sqlalchemy as sa from aiohttp import web from common_library.groups_enums import GroupType +from common_library.users_enums import UserRole from models_library.basic_types import IDStr from models_library.groups import ( AccessRightsDict, @@ -17,17 +19,19 @@ ) from models_library.users import UserID from simcore_postgres_database.errors import UniqueViolation +from simcore_postgres_database.models.users import users from simcore_postgres_database.utils_products import execute_get_or_create_product_group from simcore_postgres_database.utils_repos import ( pass_or_acquire_connection, transaction_context, ) +from simcore_postgres_database.utils_users import is_public, visible_user_profile_cols from sqlalchemy import and_ from sqlalchemy.dialects.postgresql import insert from sqlalchemy.engine.row import Row from sqlalchemy.ext.asyncio import AsyncConnection -from ..db.models import GroupType, groups, user_to_groups, users +from ..db.models import groups, user_to_groups, users from ..db.plugin import get_asyncpg_engine from ..users.exceptions import UserNotFoundError from .exceptions import ( @@ -88,31 +92,39 @@ def _to_group_info_tuple(group: Row) -> GroupInfoTuple: def _check_group_permissions( - group: Row, user_id: int, gid: int, permission: str + group: Row, + caller_id: UserID, + group_id: GroupID, + permission: Literal["read", "write", "delete"], ) -> None: if not group.access_rights[permission]: raise UserInsufficientRightsError( - user_id=user_id, gid=gid, permission=permission + user_id=caller_id, gid=group_id, permission=permission ) async def _get_group_and_access_rights_or_raise( conn: AsyncConnection, *, - user_id: UserID, - gid: GroupID, + caller_id: UserID, + group_id: GroupID, + permission: Literal["read", "write", "delete"] | None, ) -> Row: - result = await conn.stream( + result = await conn.execute( sa.select( *_GROUP_COLUMNS, user_to_groups.c.access_rights, ) - .select_from(user_to_groups.join(groups, user_to_groups.c.gid == groups.c.gid)) - .where((user_to_groups.c.uid == user_id) & (user_to_groups.c.gid == gid)) + .select_from(groups.join(user_to_groups, user_to_groups.c.gid == groups.c.gid)) + .where((user_to_groups.c.uid == caller_id) & (user_to_groups.c.gid == group_id)) ) - row = await result.fetchone() + row = result.first() if not row: - raise GroupNotFoundError(gid=gid) + raise GroupNotFoundError(gid=group_id) + + if permission: + _check_group_permissions(row, caller_id, group_id, permission) + return row @@ -128,8 +140,10 @@ async def get_group_from_gid( group_id: GroupID, ) -> Group | None: async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: - row = await conn.stream(groups.select().where(groups.c.gid == group_id)) - result = await row.first() + row = await conn.execute( + sa.select(*_GROUP_COLUMNS).where(groups.c.gid == group_id) + ) + result = row.first() if result: return Group.model_validate(result, from_attributes=True) return None @@ -261,10 +275,8 @@ async def get_user_group( """ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: row = await _get_group_and_access_rights_or_raise( - conn, user_id=user_id, gid=group_id + conn, caller_id=user_id, group_id=group_id, permission="read" ) - _check_group_permissions(row, user_id, group_id, "read") - group, access_rights = _to_group_info_tuple(row) return group, access_rights @@ -282,7 +294,10 @@ async def get_product_group_for_user( """ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: row = await _get_group_and_access_rights_or_raise( - conn, user_id=user_id, gid=product_gid + conn, + caller_id=user_id, + group_id=product_gid, + permission=None, ) group, access_rights = _to_group_info_tuple(row) return group, access_rights @@ -301,10 +316,12 @@ async def create_standard_group( async with transaction_context(get_asyncpg_engine(app), connection) as conn: user = await conn.scalar( - sa.select(users.c.primary_gid).where(users.c.id == user_id) + sa.select( + users.c.primary_gid, + ).where(users.c.id == user_id) ) if not user: - raise UserNotFoundError(uid=user_id) + raise UserNotFoundError(user_id=user_id) result = await conn.stream( # pylint: disable=no-value-for-parameter @@ -347,17 +364,17 @@ async def update_standard_group( async with transaction_context(get_asyncpg_engine(app), connection) as conn: row = await _get_group_and_access_rights_or_raise( - conn, user_id=user_id, gid=group_id + conn, caller_id=user_id, group_id=group_id, permission="write" ) assert row.gid == group_id # nosec - _check_group_permissions(row, user_id, group_id, "write") + # NOTE: update does not include access-rights access_rights = AccessRightsDict(**row.access_rights) # type: ignore[typeddict-item] result = await conn.stream( # pylint: disable=no-value-for-parameter groups.update() .values(**values) - .where((groups.c.gid == row.gid) & (groups.c.type == GroupType.STANDARD)) + .where((groups.c.gid == group_id) & (groups.c.type == GroupType.STANDARD)) .returning(*_GROUP_COLUMNS) ) row = await result.fetchone() @@ -375,15 +392,14 @@ async def delete_standard_group( group_id: GroupID, ) -> None: async with transaction_context(get_asyncpg_engine(app), connection) as conn: - group = await _get_group_and_access_rights_or_raise( - conn, user_id=user_id, gid=group_id + await _get_group_and_access_rights_or_raise( + conn, caller_id=user_id, group_id=group_id, permission="delete" ) - _check_group_permissions(group, user_id, group_id, "delete") await conn.execute( # pylint: disable=no-value-for-parameter groups.delete().where( - (groups.c.gid == group.gid) & (groups.c.type == GroupType.STANDARD) + (groups.c.gid == group_id) & (groups.c.type == GroupType.STANDARD) ) ) @@ -397,7 +413,7 @@ async def get_user_from_email( app: web.Application, connection: AsyncConnection | None = None, *, - caller_user_id: UserID, + caller_id: UserID, email: str, ) -> Row: """ @@ -409,10 +425,7 @@ async def get_user_from_email( result = await conn.stream( sa.select(users.c.id).where( (users.c.email == email) - & ( - users.c.privacy_hide_email.is_(False) - | (users.c.id == caller_user_id) - ) + & is_public(users.c.privacy_hide_email, caller_id=caller_id) ) ) user = await result.fetchone() @@ -426,48 +439,28 @@ async def get_user_from_email( # -def _group_user_cols(caller_user_id: int): +def _group_user_cols(caller_id: UserID): return ( users.c.id, users.c.name, - # privacy settings - sa.case( - ( - users.c.privacy_hide_email.is_(True) & (users.c.id != caller_user_id), - None, - ), - else_=users.c.email, - ).label("email"), - sa.case( - ( - users.c.privacy_hide_fullname.is_(True) - & (users.c.id != caller_user_id), - None, - ), - else_=users.c.first_name, - ).label("first_name"), - sa.case( - ( - users.c.privacy_hide_fullname.is_(True) - & (users.c.id != caller_user_id), - None, - ), - else_=users.c.last_name, - ).label("last_name"), + *visible_user_profile_cols(caller_id), users.c.primary_gid, ) -async def _get_user_in_group( - conn: AsyncConnection, *, caller_user_id, group_id: GroupID, user_id: int +async def _get_user_in_group_or_raise( + conn: AsyncConnection, *, caller_id: UserID, group_id: GroupID, user_id: UserID ) -> Row: - # now get the user + # NOTE: that the caller_id might be different that the target user_id result = await conn.stream( - sa.select(*_group_user_cols(caller_user_id), user_to_groups.c.access_rights) + sa.select( + *_group_user_cols(caller_id), + user_to_groups.c.access_rights, + ) .select_from( users.join(user_to_groups, users.c.id == user_to_groups.c.uid), ) - .where(and_(user_to_groups.c.gid == group_id, users.c.id == user_id)) + .where((user_to_groups.c.gid == group_id) & (users.c.id == user_id)) ) row = await result.fetchone() if not row: @@ -479,49 +472,82 @@ async def list_users_in_group( app: web.Application, connection: AsyncConnection | None = None, *, - user_id: UserID, + caller_id: UserID, group_id: GroupID, ) -> list[GroupMember]: async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: - # first check if the group exists - group = await _get_group_and_access_rights_or_raise( - conn, user_id=user_id, gid=group_id - ) - _check_group_permissions(group, user_id, group_id, "read") - - # now get the list + # GET GROUP & caller access-rights (if non PRIMARY) query = ( sa.select( - *_group_user_cols(user_id), + *_GROUP_COLUMNS, user_to_groups.c.access_rights, ) - .select_from(users.join(user_to_groups)) - .where(user_to_groups.c.gid == group_id) + .select_from( + groups.join( + user_to_groups, user_to_groups.c.gid == groups.c.gid, isouter=True + ).join(users, users.c.id == user_to_groups.c.uid) + ) + .where( + (user_to_groups.c.gid == group_id) + & ( + (user_to_groups.c.uid == caller_id) + | ( + (groups.c.type == GroupType.PRIMARY) + & users.c.role.in_([r for r in UserRole if r > UserRole.GUEST]) + ) + ) + ) ) - result = await conn.stream(query) - return [GroupMember.model_validate(row) async for row in result] + result = await conn.execute(query) + group_row = result.first() + if not group_row: + raise GroupNotFoundError(gid=group_id) + + # Drop access-rights if primary group + if group_row.type == GroupType.PRIMARY: + query = sa.select( + *_group_user_cols(caller_id), + ) + else: + _check_group_permissions( + group_row, caller_id=caller_id, group_id=group_id, permission="read" + ) + query = sa.select( + *_group_user_cols(caller_id), + user_to_groups.c.access_rights, + ) + + # GET users + query = query.select_from(users.join(user_to_groups, isouter=True)).where( + user_to_groups.c.gid == group_id + ) + + aresult = await conn.stream(query) + return [ + GroupMember.model_validate(row, from_attributes=True) + async for row in aresult + ] async def get_user_in_group( app: web.Application, connection: AsyncConnection | None = None, *, - user_id: UserID, + caller_id: UserID, group_id: GroupID, the_user_id_in_group: int, ) -> GroupMember: async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: # first check if the group exists - group = await _get_group_and_access_rights_or_raise( - conn, user_id=user_id, gid=group_id + await _get_group_and_access_rights_or_raise( + conn, caller_id=caller_id, group_id=group_id, permission="read" ) - _check_group_permissions(group, user_id, group_id, "read") # get the user with its permissions - the_user = await _get_user_in_group( + the_user = await _get_user_in_group_or_raise( conn, - caller_user_id=user_id, + caller_id=caller_id, group_id=group_id, user_id=the_user_id_in_group, ) @@ -532,7 +558,7 @@ async def update_user_in_group( app: web.Application, connection: AsyncConnection | None = None, *, - user_id: UserID, + caller_id: UserID, group_id: GroupID, the_user_id_in_group: UserID, access_rights: AccessRightsDict, @@ -544,15 +570,14 @@ async def update_user_in_group( async with transaction_context(get_asyncpg_engine(app), connection) as conn: # first check if the group exists - group = await _get_group_and_access_rights_or_raise( - conn, user_id=user_id, gid=group_id + await _get_group_and_access_rights_or_raise( + conn, caller_id=caller_id, group_id=group_id, permission="write" ) - _check_group_permissions(group, user_id, group_id, "write") # now check the user exists - the_user = await _get_user_in_group( + the_user = await _get_user_in_group_or_raise( conn, - caller_user_id=user_id, + caller_id=caller_id, group_id=group_id, user_id=the_user_id_in_group, ) @@ -579,21 +604,20 @@ async def delete_user_from_group( app: web.Application, connection: AsyncConnection | None = None, *, - user_id: UserID, + caller_id: UserID, group_id: GroupID, the_user_id_in_group: UserID, ) -> None: async with transaction_context(get_asyncpg_engine(app), connection) as conn: # first check if the group exists - group = await _get_group_and_access_rights_or_raise( - conn, user_id=user_id, gid=group_id + await _get_group_and_access_rights_or_raise( + conn, caller_id=caller_id, group_id=group_id, permission="write" ) - _check_group_permissions(group, user_id, group_id, "write") # check the user exists - await _get_user_in_group( + await _get_user_in_group_or_raise( conn, - caller_user_id=user_id, + caller_id=caller_id, group_id=group_id, user_id=the_user_id_in_group, ) @@ -637,7 +661,7 @@ async def add_new_user_in_group( app: web.Application, connection: AsyncConnection | None = None, *, - user_id: UserID, + caller_id: UserID, group_id: GroupID, # either user_id or user_name new_user_id: UserID | None = None, @@ -649,10 +673,9 @@ async def add_new_user_in_group( """ async with transaction_context(get_asyncpg_engine(app), connection) as conn: # first check if the group exists - group = await _get_group_and_access_rights_or_raise( - conn, user_id=user_id, gid=group_id + await _get_group_and_access_rights_or_raise( + conn, caller_id=caller_id, group_id=group_id, permission="write" ) - _check_group_permissions(group, user_id, group_id, "write") query = sa.select(users.c.id) if new_user_id is not None: @@ -677,20 +700,23 @@ async def add_new_user_in_group( await conn.execute( # pylint: disable=no-value-for-parameter user_to_groups.insert().values( - uid=new_user_id, gid=group.gid, access_rights=user_access_rights + uid=new_user_id, gid=group_id, access_rights=user_access_rights ) ) except UniqueViolation as exc: raise UserAlreadyInGroupError( uid=new_user_id, gid=group_id, - user_id=user_id, + user_id=caller_id, access_rights=access_rights, ) from exc async def auto_add_user_to_groups( - app: web.Application, connection: AsyncConnection | None = None, *, user: dict + app: web.Application, + connection: AsyncConnection | None = None, + *, + user: dict, ) -> None: user_id: UserID = user["id"] @@ -744,6 +770,6 @@ async def auto_add_user_to_product_group( gid=product_group_id, access_rights=_DEFAULT_PRODUCT_GROUP_ACCESS_RIGHTS, ) - .on_conflict_do_nothing() # in case the user was already added + .on_conflict_do_nothing() # in case the user was already added to this group ) return product_group_id diff --git a/services/web/server/src/simcore_service_webserver/groups/_groups_handlers.py b/services/web/server/src/simcore_service_webserver/groups/_groups_rest.py similarity index 90% rename from services/web/server/src/simcore_service_webserver/groups/_groups_handlers.py rename to services/web/server/src/simcore_service_webserver/groups/_groups_rest.py index 46131510489..32b5e507382 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_groups_handlers.py +++ b/services/web/server/src/simcore_service_webserver/groups/_groups_rest.py @@ -22,7 +22,7 @@ from ..products.api import Product, get_current_product from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response -from . import _groups_api +from . import _groups_service from ._common.exceptions_handlers import handle_plugin_requests_exceptions from ._common.schemas import ( GroupsPathParams, @@ -48,7 +48,7 @@ async def list_groups(request: web.Request): product: Product = get_current_product(request) req_ctx = GroupsRequestContext.model_validate(request) - groups_by_type = await _groups_api.list_user_groups_with_read_access( + groups_by_type = await _groups_service.list_user_groups_with_read_access( request.app, user_id=req_ctx.user_id ) @@ -60,7 +60,7 @@ async def list_groups(request: web.Request): if product.group_id: with suppress(GroupNotFoundError): # Product is optional - my_product_group = await _groups_api.get_product_group_for_user( + my_product_group = await _groups_service.get_product_group_for_user( app=request.app, user_id=req_ctx.user_id, product_gid=product.group_id, @@ -90,7 +90,7 @@ async def get_group(request: web.Request): req_ctx = GroupsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(GroupsPathParams, request) - group, access_rights = await _groups_api.get_associated_group( + group, access_rights = await _groups_service.get_associated_group( request.app, user_id=req_ctx.user_id, group_id=path_params.gid ) @@ -107,7 +107,7 @@ async def create_group(request: web.Request): create = await parse_request_body_as(GroupCreate, request) - group, access_rights = await _groups_api.create_standard_group( + group, access_rights = await _groups_service.create_standard_group( request.app, user_id=req_ctx.user_id, create=create.to_model(), @@ -127,7 +127,7 @@ async def update_group(request: web.Request): path_params = parse_request_path_parameters_as(GroupsPathParams, request) update: GroupUpdate = await parse_request_body_as(GroupUpdate, request) - group, access_rights = await _groups_api.update_standard_group( + group, access_rights = await _groups_service.update_standard_group( request.app, user_id=req_ctx.user_id, group_id=path_params.gid, @@ -147,7 +147,7 @@ async def delete_group(request: web.Request): req_ctx = GroupsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(GroupsPathParams, request) - await _groups_api.delete_standard_group( + await _groups_service.delete_standard_group( request.app, user_id=req_ctx.user_id, group_id=path_params.gid ) @@ -164,11 +164,11 @@ async def delete_group(request: web.Request): @permission_required("groups.*") @handle_plugin_requests_exceptions async def get_all_group_users(request: web.Request): - """Gets users in organization groups""" + """Gets users in organization or primary groups""" req_ctx = GroupsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(GroupsPathParams, request) - users_in_group = await _groups_api.list_group_members( + users_in_group = await _groups_service.list_group_members( request.app, req_ctx.user_id, path_params.gid ) @@ -189,7 +189,7 @@ async def add_group_user(request: web.Request): path_params = parse_request_path_parameters_as(GroupsPathParams, request) added: GroupUserAdd = await parse_request_body_as(GroupUserAdd, request) - await _groups_api.add_user_in_group( + await _groups_service.add_user_in_group( request.app, req_ctx.user_id, path_params.gid, @@ -212,7 +212,7 @@ async def get_group_user(request: web.Request): req_ctx = GroupsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(GroupsUsersPathParams, request) - user = await _groups_api.get_group_member( + user = await _groups_service.get_group_member( request.app, req_ctx.user_id, path_params.gid, path_params.uid ) @@ -228,7 +228,7 @@ async def update_group_user(request: web.Request): path_params = parse_request_path_parameters_as(GroupsUsersPathParams, request) update: GroupUserUpdate = await parse_request_body_as(GroupUserUpdate, request) - user = await _groups_api.update_group_member( + user = await _groups_service.update_group_member( request.app, user_id=req_ctx.user_id, group_id=path_params.gid, @@ -246,7 +246,7 @@ async def update_group_user(request: web.Request): async def delete_group_user(request: web.Request): req_ctx = GroupsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(GroupsUsersPathParams, request) - await _groups_api.delete_group_member( + await _groups_service.delete_group_member( request.app, req_ctx.user_id, path_params.gid, path_params.uid ) diff --git a/services/web/server/src/simcore_service_webserver/groups/_groups_api.py b/services/web/server/src/simcore_service_webserver/groups/_groups_service.py similarity index 79% rename from services/web/server/src/simcore_service_webserver/groups/_groups_api.py rename to services/web/server/src/simcore_service_webserver/groups/_groups_service.py index 465b57c8f80..f53a7be17c6 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_groups_api.py +++ b/services/web/server/src/simcore_service_webserver/groups/_groups_service.py @@ -15,7 +15,7 @@ from pydantic import EmailStr from ..users.api import get_user -from . import _groups_db +from . import _groups_repository from .exceptions import GroupsError # @@ -24,7 +24,7 @@ async def get_group_from_gid(app: web.Application, group_id: GroupID) -> Group | None: - group_db = await _groups_db.get_group_from_gid(app, group_id=group_id) + group_db = await _groups_repository.get_group_from_gid(app, group_id=group_id) if group_db: return Group.model_construct(**group_db.model_dump()) @@ -45,13 +45,15 @@ async def list_user_groups_with_read_access( # NOTE: Careful! It seems we are filtering out groups, such as Product Groups, # because they do not have read access. I believe this was done because the # frontend did not want to display them. - return await _groups_db.get_all_user_groups_with_read_access(app, user_id=user_id) + return await _groups_repository.get_all_user_groups_with_read_access( + app, user_id=user_id + ) async def list_user_groups_ids_with_read_access( app: web.Application, *, user_id: UserID ) -> list[GroupID]: - return await _groups_db.get_ids_of_all_user_groups_with_read_access( + return await _groups_repository.get_ids_of_all_user_groups_with_read_access( app, user_id=user_id ) @@ -59,7 +61,7 @@ async def list_user_groups_ids_with_read_access( async def list_all_user_groups_ids( app: web.Application, *, user_id: UserID ) -> list[GroupID]: - return await _groups_db.get_ids_of_all_user_groups(app, user_id=user_id) + return await _groups_repository.get_ids_of_all_user_groups(app, user_id=user_id) async def get_product_group_for_user( @@ -69,7 +71,7 @@ async def get_product_group_for_user( Returns product's group if user belongs to it, otherwise it raises GroupNotFoundError """ - return await _groups_db.get_product_group_for_user( + return await _groups_repository.get_product_group_for_user( app, user_id=user_id, product_gid=product_gid ) @@ -90,7 +92,7 @@ async def create_standard_group( raises GroupNotFoundError raises UserInsufficientRightsError: needs WRITE access """ - return await _groups_db.create_standard_group( + return await _groups_repository.create_standard_group( app, user_id=user_id, create=create, @@ -108,7 +110,9 @@ async def get_associated_group( raises GroupNotFoundError raises UserInsufficientRightsError: needs READ access """ - return await _groups_db.get_user_group(app, user_id=user_id, group_id=group_id) + return await _groups_repository.get_user_group( + app, user_id=user_id, group_id=group_id + ) async def update_standard_group( @@ -124,7 +128,7 @@ async def update_standard_group( raises UserInsufficientRightsError: needs WRITE access """ - return await _groups_db.update_standard_group( + return await _groups_repository.update_standard_group( app, user_id=user_id, group_id=group_id, @@ -140,7 +144,7 @@ async def delete_standard_group( raises GroupNotFoundError raises UserInsufficientRightsError: needs DELETE access """ - return await _groups_db.delete_standard_group( + return await _groups_repository.delete_standard_group( app, user_id=user_id, group_id=group_id ) @@ -153,7 +157,9 @@ async def delete_standard_group( async def list_group_members( app: web.Application, user_id: UserID, group_id: GroupID ) -> list[GroupMember]: - return await _groups_db.list_users_in_group(app, user_id=user_id, group_id=group_id) + return await _groups_repository.list_users_in_group( + app, caller_id=user_id, group_id=group_id + ) async def get_group_member( @@ -163,9 +169,9 @@ async def get_group_member( the_user_id_in_group: UserID, ) -> GroupMember: - return await _groups_db.get_user_in_group( + return await _groups_repository.get_user_in_group( app, - user_id=user_id, + caller_id=user_id, group_id=group_id, the_user_id_in_group=the_user_id_in_group, ) @@ -178,9 +184,9 @@ async def update_group_member( the_user_id_in_group: UserID, access_rights: AccessRightsDict, ) -> GroupMember: - return await _groups_db.update_user_in_group( + return await _groups_repository.update_user_in_group( app, - user_id=user_id, + caller_id=user_id, group_id=group_id, the_user_id_in_group=the_user_id_in_group, access_rights=access_rights, @@ -193,9 +199,9 @@ async def delete_group_member( group_id: GroupID, the_user_id_in_group: UserID, ) -> None: - return await _groups_db.delete_user_from_group( + return await _groups_repository.delete_user_from_group( app, - user_id=user_id, + caller_id=user_id, group_id=group_id, the_user_id_in_group=the_user_id_in_group, ) @@ -205,7 +211,7 @@ async def is_user_by_email_in_group( app: web.Application, user_email: LowerCaseEmailStr, group_id: GroupID ) -> bool: - return await _groups_db.is_user_by_email_in_group( + return await _groups_repository.is_user_by_email_in_group( app, email=user_email, group_id=group_id, @@ -214,7 +220,7 @@ async def is_user_by_email_in_group( async def auto_add_user_to_groups(app: web.Application, user_id: UserID) -> None: user: dict = await get_user(app, user_id) - return await _groups_db.auto_add_user_to_groups(app, user=user) + return await _groups_repository.auto_add_user_to_groups(app, user=user) async def auto_add_user_to_product_group( @@ -222,7 +228,7 @@ async def auto_add_user_to_product_group( user_id: UserID, product_name: ProductName, ) -> GroupID: - return await _groups_db.auto_add_user_to_product_group( + return await _groups_repository.auto_add_user_to_product_group( app, user_id=user_id, product_name=product_name ) @@ -254,14 +260,14 @@ async def add_user_in_group( raise GroupsError(msg=msg) if new_by_user_email: - user = await _groups_db.get_user_from_email( - app, email=new_by_user_email, caller_user_id=user_id + user = await _groups_repository.get_user_from_email( + app, email=new_by_user_email, caller_id=user_id ) new_by_user_id = user.id - return await _groups_db.add_new_user_in_group( + return await _groups_repository.add_new_user_in_group( app, - user_id=user_id, + caller_id=user_id, group_id=group_id, new_user_id=new_by_user_id, new_user_name=new_by_user_name, diff --git a/services/web/server/src/simcore_service_webserver/groups/api.py b/services/web/server/src/simcore_service_webserver/groups/api.py index 207e1ffb303..a01fe9ef63f 100644 --- a/services/web/server/src/simcore_service_webserver/groups/api.py +++ b/services/web/server/src/simcore_service_webserver/groups/api.py @@ -1,14 +1,16 @@ # # Domain-Specific Interfaces # -from ._groups_api import ( +from ._groups_service import ( add_user_in_group, auto_add_user_to_groups, auto_add_user_to_product_group, get_group_from_gid, + get_product_group_for_user, is_user_by_email_in_group, list_all_user_groups_ids, list_user_groups_ids_with_read_access, + list_user_groups_with_read_access, ) __all__: tuple[str, ...] = ( @@ -16,8 +18,10 @@ "auto_add_user_to_groups", "auto_add_user_to_product_group", "get_group_from_gid", + "get_product_group_for_user", "is_user_by_email_in_group", "list_all_user_groups_ids", "list_user_groups_ids_with_read_access", + "list_user_groups_with_read_access", # nopycln: file ) diff --git a/services/web/server/src/simcore_service_webserver/groups/plugin.py b/services/web/server/src/simcore_service_webserver/groups/plugin.py index 7000926383c..4b240bee190 100644 --- a/services/web/server/src/simcore_service_webserver/groups/plugin.py +++ b/services/web/server/src/simcore_service_webserver/groups/plugin.py @@ -5,7 +5,7 @@ from .._constants import APP_SETTINGS_KEY from ..products.plugin import setup_products -from . import _classifiers_handlers, _groups_handlers +from . import _classifiers_rest, _groups_rest _logger = logging.getLogger(__name__) @@ -23,5 +23,5 @@ def setup_groups(app: web.Application): # plugin dependencies setup_products(app) - app.router.add_routes(_groups_handlers.routes) - app.router.add_routes(_classifiers_handlers.routes) + app.router.add_routes(_groups_rest.routes) + app.router.add_routes(_classifiers_rest.routes) diff --git a/services/web/server/src/simcore_service_webserver/licenses/_exceptions_handlers.py b/services/web/server/src/simcore_service_webserver/licenses/_exceptions_handlers.py index d12b95fafa0..26cf9478b5f 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_exceptions_handlers.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_exceptions_handlers.py @@ -22,11 +22,11 @@ ), WalletAccessForbiddenError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Wallet {wallet_id} forbidden.", + "Credit account {wallet_id} forbidden.", ), WalletNotEnoughCreditsError: HttpErrorInfo( status.HTTP_402_PAYMENT_REQUIRED, - "Not enough credits in the wallet.", + "Not enough credits in the credit account.", ), LicensedItemPricingPlanMatchError: HttpErrorInfo( status.HTTP_400_BAD_REQUEST, diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_models.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_models.py new file mode 100644 index 00000000000..43d6a290d6f --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_models.py @@ -0,0 +1,56 @@ +from datetime import datetime +from typing import NamedTuple + +from models_library.basic_types import IDStr +from models_library.licensed_items import LicensedItemID +from models_library.products import ProductName +from models_library.resource_tracker_licensed_items_checkouts import ( + LicensedItemCheckoutID, +) +from models_library.rest_base import RequestParameters, StrictRequestParameters +from models_library.rest_ordering import ( + OrderBy, + OrderDirection, + create_ordering_query_model_class, +) +from models_library.rest_pagination import PageQueryParameters +from models_library.users import UserID +from models_library.wallets import WalletID +from pydantic import BaseModel, PositiveInt + + +class LicensedItemCheckoutGet(BaseModel): + licensed_item_checkout_id: LicensedItemCheckoutID + licensed_item_id: LicensedItemID + wallet_id: WalletID + user_id: UserID + product_name: ProductName + started_at: datetime + stopped_at: datetime | None + num_of_seats: int + + +class LicensedItemCheckoutGetPage(NamedTuple): + items: list[LicensedItemCheckoutGet] + total: PositiveInt + + +class LicensedItemCheckoutPathParams(StrictRequestParameters): + licensed_item_checkout_id: LicensedItemCheckoutID + + +_LicensedItemsCheckoutsListOrderQueryParams: type[ + RequestParameters +] = create_ordering_query_model_class( + ordering_fields={ + "started_at", + }, + default=OrderBy(field=IDStr("started_at"), direction=OrderDirection.DESC), +) + + +class LicensedItemsCheckoutsListQueryParams( + PageQueryParameters, + _LicensedItemsCheckoutsListOrderQueryParams, # type: ignore[misc, valid-type] +): + ... diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_rest.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_rest.py new file mode 100644 index 00000000000..1a9c7285d0a --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_rest.py @@ -0,0 +1,129 @@ +import logging + +from aiohttp import web +from models_library.api_schemas_webserver.licensed_items_checkouts import ( + LicensedItemCheckoutRestGet, + LicensedItemCheckoutRestGetPage, +) +from models_library.rest_ordering import OrderBy +from models_library.rest_pagination import Page +from models_library.rest_pagination_utils import paginate_data +from servicelib.aiohttp.requests_validation import ( + parse_request_path_parameters_as, + parse_request_query_parameters_as, +) +from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON +from servicelib.rest_constants import RESPONSE_MODEL_POLICY + +from .._meta import API_VTAG as VTAG +from ..login.decorators import login_required +from ..security.decorators import permission_required +from ..utils_aiohttp import envelope_json_response +from ..wallets._handlers import WalletsPathParams +from . import _licensed_items_checkouts_service +from ._exceptions_handlers import handle_plugin_requests_exceptions +from ._licensed_items_checkouts_models import ( + LicensedItemCheckoutGet, + LicensedItemCheckoutGetPage, + LicensedItemCheckoutPathParams, +) +from ._models import LicensedItemsPurchasesListQueryParams, LicensedItemsRequestContext + +_logger = logging.getLogger(__name__) + + +routes = web.RouteTableDef() + + +@routes.get( + f"/{VTAG}/licensed-items-checkouts/{{licensed_item_checkout_id}}", + name="get_licensed_item_checkout", +) +@login_required +@permission_required("catalog/licensed-items.*") +@handle_plugin_requests_exceptions +async def get_licensed_item_checkout(request: web.Request): + req_ctx = LicensedItemsRequestContext.model_validate(request) + path_params = parse_request_path_parameters_as( + LicensedItemCheckoutPathParams, request + ) + + checkout_item: LicensedItemCheckoutGet = ( + await _licensed_items_checkouts_service.get_licensed_item_checkout( + app=request.app, + product_name=req_ctx.product_name, + user_id=req_ctx.user_id, + licensed_item_checkout_id=path_params.licensed_item_checkout_id, + ) + ) + + output = LicensedItemCheckoutRestGet.model_construct( + licensed_item_checkout_id=checkout_item.licensed_item_checkout_id, + licensed_item_id=checkout_item.licensed_item_id, + wallet_id=checkout_item.wallet_id, + user_id=checkout_item.user_id, + product_name=checkout_item.product_name, + started_at=checkout_item.started_at, + stopped_at=checkout_item.stopped_at, + num_of_seats=checkout_item.num_of_seats, + ) + + return envelope_json_response(output) + + +@routes.get( + f"/{VTAG}/wallets/{{wallet_id}}/licensed-items-checkouts", + name="list_licensed_item_checkouts_for_wallet", +) +@login_required +@permission_required("catalog/licensed-items.*") +@handle_plugin_requests_exceptions +async def list_licensed_item_checkouts_for_wallet(request: web.Request): + req_ctx = LicensedItemsRequestContext.model_validate(request) + path_params = parse_request_path_parameters_as(WalletsPathParams, request) + query_params: LicensedItemsPurchasesListQueryParams = ( + parse_request_query_parameters_as( + LicensedItemsPurchasesListQueryParams, request + ) + ) + + result: LicensedItemCheckoutGetPage = await _licensed_items_checkouts_service.list_licensed_items_checkouts_for_wallet( + app=request.app, + product_name=req_ctx.product_name, + user_id=req_ctx.user_id, + wallet_id=path_params.wallet_id, + offset=query_params.offset, + limit=query_params.limit, + order_by=OrderBy.model_construct(**query_params.order_by.model_dump()), + ) + + get_page = LicensedItemCheckoutRestGetPage( + total=result.total, + items=[ + LicensedItemCheckoutRestGet.model_construct( + licensed_item_checkout_id=checkout_item.licensed_item_checkout_id, + licensed_item_id=checkout_item.licensed_item_id, + wallet_id=checkout_item.wallet_id, + user_id=checkout_item.user_id, + product_name=checkout_item.product_name, + started_at=checkout_item.started_at, + stopped_at=checkout_item.stopped_at, + num_of_seats=checkout_item.num_of_seats, + ) + for checkout_item in result.items + ], + ) + + page = Page[LicensedItemCheckoutRestGet].model_validate( + paginate_data( + chunk=get_page.items, + request_url=request.url, + total=get_page.total, + limit=query_params.limit, + offset=query_params.offset, + ) + ) + return web.Response( + text=page.model_dump_json(**RESPONSE_MODEL_POLICY), + content_type=MIMETYPE_APPLICATION_JSON, + ) diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_service.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_service.py new file mode 100644 index 00000000000..87a8aaf14c5 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_service.py @@ -0,0 +1,201 @@ +from aiohttp import web +from models_library.api_schemas_resource_usage_tracker import ( + licensed_items_checkouts as rut_licensed_items_checkouts, +) +from models_library.licensed_items import LicensedItemID +from models_library.products import ProductName +from models_library.resource_tracker_licensed_items_checkouts import ( + LicensedItemCheckoutID, +) +from models_library.rest_ordering import OrderBy +from models_library.services_types import ServiceRunID +from models_library.users import UserID +from models_library.wallets import WalletID +from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker import ( + licensed_items_checkouts, +) + +from ..rabbitmq import get_rabbitmq_rpc_client +from ..users.api import get_user +from ..wallets.api import get_wallet_by_user +from ._licensed_items_checkouts_models import ( + LicensedItemCheckoutGet, + LicensedItemCheckoutGetPage, +) + + +async def list_licensed_items_checkouts_for_wallet( + app: web.Application, + *, + # access context + product_name: ProductName, + user_id: UserID, + wallet_id: WalletID, + offset: int, + limit: int, + order_by: OrderBy, +) -> LicensedItemCheckoutGetPage: + # Check whether user has access to the wallet + await get_wallet_by_user( + app, + user_id=user_id, + wallet_id=wallet_id, + product_name=product_name, + ) + + rpc_client = get_rabbitmq_rpc_client(app) + + result = await licensed_items_checkouts.get_licensed_items_checkouts_page( + rpc_client, + product_name=product_name, + filter_wallet_id=wallet_id, + offset=offset, + limit=limit, + order_by=order_by, + ) + + return LicensedItemCheckoutGetPage( + total=result.total, + items=[ + LicensedItemCheckoutGet.model_construct( + licensed_item_checkout_id=checkout_item.licensed_item_checkout_id, + licensed_item_id=checkout_item.licensed_item_id, + wallet_id=checkout_item.wallet_id, + user_id=checkout_item.user_id, + product_name=checkout_item.product_name, + started_at=checkout_item.started_at, + stopped_at=checkout_item.stopped_at, + num_of_seats=checkout_item.num_of_seats, + ) + for checkout_item in result.items + ], + ) + + +async def get_licensed_item_checkout( + app: web.Application, + *, + # access context + product_name: ProductName, + user_id: UserID, + licensed_item_checkout_id: LicensedItemCheckoutID, +) -> LicensedItemCheckoutGet: + rpc_client = get_rabbitmq_rpc_client(app) + + checkout_item = await licensed_items_checkouts.get_licensed_item_checkout( + rpc_client, + product_name=product_name, + licensed_item_checkout_id=licensed_item_checkout_id, + ) + + # Check whether user has access to the wallet + await get_wallet_by_user( + app, + user_id=user_id, + wallet_id=checkout_item.wallet_id, + product_name=product_name, + ) + + return LicensedItemCheckoutGet.model_construct( + licensed_item_checkout_id=checkout_item.licensed_item_checkout_id, + licensed_item_id=checkout_item.licensed_item_id, + wallet_id=checkout_item.wallet_id, + user_id=checkout_item.user_id, + product_name=checkout_item.product_name, + started_at=checkout_item.started_at, + stopped_at=checkout_item.stopped_at, + num_of_seats=checkout_item.num_of_seats, + ) + + +async def checkout_licensed_item_for_wallet( + app: web.Application, + *, + # access context + product_name: ProductName, + wallet_id: WalletID, + user_id: UserID, + # checkout args + licensed_item_id: LicensedItemID, + num_of_seats: int, + service_run_id: ServiceRunID, +) -> LicensedItemCheckoutGet: + # Check whether user has access to the wallet + await get_wallet_by_user( + app, + user_id=user_id, + wallet_id=wallet_id, + product_name=product_name, + ) + + user = await get_user(app, user_id=user_id) + + rpc_client = get_rabbitmq_rpc_client(app) + licensed_item_get: rut_licensed_items_checkouts.LicensedItemCheckoutGet = ( + await licensed_items_checkouts.checkout_licensed_item( + rpc_client, + licensed_item_id=licensed_item_id, + wallet_id=wallet_id, + product_name=product_name, + num_of_seats=num_of_seats, + service_run_id=service_run_id, + user_id=user_id, + user_email=user["email"], + ) + ) + + return LicensedItemCheckoutGet.model_construct( + licensed_item_checkout_id=licensed_item_get.licensed_item_checkout_id, + licensed_item_id=licensed_item_get.licensed_item_id, + wallet_id=licensed_item_get.wallet_id, + user_id=licensed_item_get.user_id, + product_name=licensed_item_get.product_name, + started_at=licensed_item_get.started_at, + stopped_at=licensed_item_get.stopped_at, + num_of_seats=licensed_item_get.num_of_seats, + ) + + +async def release_licensed_item_for_wallet( + app: web.Application, + *, + # access context + product_name: ProductName, + user_id: UserID, + # release args + licensed_item_checkout_id: LicensedItemCheckoutID, +) -> LicensedItemCheckoutGet: + rpc_client = get_rabbitmq_rpc_client(app) + + checkout_item = await licensed_items_checkouts.get_licensed_item_checkout( + rpc_client, + product_name=product_name, + licensed_item_checkout_id=licensed_item_checkout_id, + ) + + # Check whether user has access to the wallet + await get_wallet_by_user( + app, + user_id=user_id, + wallet_id=checkout_item.wallet_id, + product_name=product_name, + ) + + licensed_item_get: rut_licensed_items_checkouts.LicensedItemCheckoutGet = ( + await licensed_items_checkouts.release_licensed_item( + rpc_client, + product_name=product_name, + licensed_item_checkout_id=licensed_item_checkout_id, + ) + ) + + return LicensedItemCheckoutGet.model_construct( + licensed_item_checkout_id=licensed_item_get.licensed_item_checkout_id, + licensed_item_id=licensed_item_get.licensed_item_id, + wallet_id=licensed_item_get.wallet_id, + user_id=licensed_item_get.user_id, + product_name=licensed_item_get.product_name, + started_at=licensed_item_get.started_at, + stopped_at=licensed_item_get.stopped_at, + num_of_seats=licensed_item_get.num_of_seats, + ) diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_handlers.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_rest.py similarity index 94% rename from services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_handlers.py rename to services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_rest.py index 95f48ebbd0e..5ae0738ebe1 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_handlers.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_rest.py @@ -20,7 +20,7 @@ from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response from ..wallets._handlers import WalletsPathParams -from . import _licensed_items_purchases_api +from . import _licensed_items_purchases_service from ._exceptions_handlers import handle_plugin_requests_exceptions from ._models import ( LicensedItemsPurchasesListQueryParams, @@ -47,7 +47,7 @@ async def get_licensed_item_purchase(request: web.Request): ) licensed_item_purchase_get: LicensedItemPurchaseGet = ( - await _licensed_items_purchases_api.get_licensed_item_purchase( + await _licensed_items_purchases_service.get_licensed_item_purchase( app=request.app, product_name=req_ctx.product_name, user_id=req_ctx.user_id, @@ -75,7 +75,7 @@ async def list_wallet_licensed_items_purchases(request: web.Request): ) licensed_item_purchase_get_page: LicensedItemPurchaseGetPage = ( - await _licensed_items_purchases_api.list_licensed_items_purchases( + await _licensed_items_purchases_service.list_licensed_items_purchases( app=request.app, product_name=req_ctx.product_name, user_id=req_ctx.user_id, diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_api.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_service.py similarity index 99% rename from services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_api.py rename to services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_service.py index 4aae82ae768..2cfa6355f83 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_api.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_service.py @@ -26,6 +26,7 @@ async def list_licensed_items_purchases( app: web.Application, + *, product_name: ProductName, user_id: UserID, wallet_id: WalletID, @@ -74,6 +75,7 @@ async def list_licensed_items_purchases( async def get_licensed_item_purchase( app: web.Application, + *, product_name: ProductName, user_id: UserID, licensed_item_purchase_id: LicensedItemPurchaseID, diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_db.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_repository.py similarity index 99% rename from services/web/server/src/simcore_service_webserver/licenses/_licensed_items_db.py rename to services/web/server/src/simcore_service_webserver/licenses/_licensed_items_repository.py index 415dec7149d..57861698161 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_db.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_repository.py @@ -36,6 +36,7 @@ _SELECTION_ARGS = ( licensed_items.c.licensed_item_id, licensed_items.c.name, + licensed_items.c.license_key, licensed_items.c.licensed_resource_type, licensed_items.c.pricing_plan_id, licensed_items.c.product_name, diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_handlers.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_rest.py similarity index 89% rename from services/web/server/src/simcore_service_webserver/licenses/_licensed_items_handlers.py rename to services/web/server/src/simcore_service_webserver/licenses/_licensed_items_rest.py index 355d9658ebb..4f0a936c041 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_handlers.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_rest.py @@ -21,7 +21,7 @@ from ..login.decorators import login_required from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response -from . import _licensed_items_api +from . import _licensed_items_service from ._exceptions_handlers import handle_plugin_requests_exceptions from ._models import ( LicensedItemsBodyParams, @@ -47,7 +47,7 @@ async def list_licensed_items(request: web.Request): ) licensed_item_get_page: LicensedItemGetPage = ( - await _licensed_items_api.list_licensed_items( + await _licensed_items_service.list_licensed_items( app=request.app, product_name=req_ctx.product_name, offset=query_params.offset, @@ -81,10 +81,12 @@ async def get_licensed_item(request: web.Request): req_ctx = LicensedItemsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(LicensedItemsPathParams, request) - licensed_item_get: LicensedItemGet = await _licensed_items_api.get_licensed_item( - app=request.app, - licensed_item_id=path_params.licensed_item_id, - product_name=req_ctx.product_name, + licensed_item_get: LicensedItemGet = ( + await _licensed_items_service.get_licensed_item( + app=request.app, + licensed_item_id=path_params.licensed_item_id, + product_name=req_ctx.product_name, + ) ) return envelope_json_response(licensed_item_get) @@ -102,7 +104,7 @@ async def purchase_licensed_item(request: web.Request): path_params = parse_request_path_parameters_as(LicensedItemsPathParams, request) body_params = await parse_request_body_as(LicensedItemsBodyParams, request) - await _licensed_items_api.purchase_licensed_item( + await _licensed_items_service.purchase_licensed_item( app=request.app, user_id=req_ctx.user_id, licensed_item_id=path_params.licensed_item_id, diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_api.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_service.py similarity index 94% rename from services/web/server/src/simcore_service_webserver/licenses/_licensed_items_api.py rename to services/web/server/src/simcore_service_webserver/licenses/_licensed_items_service.py index 6feacf24b1d..374da33bbbe 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_api.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_service.py @@ -25,7 +25,7 @@ from ..users.api import get_user from ..wallets.api import get_wallet_with_available_credits_by_user_and_wallet from ..wallets.errors import WalletNotEnoughCreditsError -from . import _licensed_items_db +from . import _licensed_items_repository from ._models import LicensedItemsBodyParams from .errors import LicensedItemPricingPlanMatchError @@ -39,12 +39,13 @@ async def get_licensed_item( product_name: ProductName, ) -> LicensedItemGet: - licensed_item_db = await _licensed_items_db.get( + licensed_item_db = await _licensed_items_repository.get( app, licensed_item_id=licensed_item_id, product_name=product_name ) return LicensedItemGet( licensed_item_id=licensed_item_db.licensed_item_id, name=licensed_item_db.name, + license_key=licensed_item_db.license_key, licensed_resource_type=licensed_item_db.licensed_resource_type, pricing_plan_id=licensed_item_db.pricing_plan_id, created_at=licensed_item_db.created, @@ -60,7 +61,7 @@ async def list_licensed_items( limit: int, order_by: OrderBy, ) -> LicensedItemGetPage: - total_count, licensed_item_db_list = await _licensed_items_db.list_( + total_count, licensed_item_db_list = await _licensed_items_repository.list_( app, product_name=product_name, offset=offset, limit=limit, order_by=order_by ) return LicensedItemGetPage( @@ -68,6 +69,7 @@ async def list_licensed_items( LicensedItemGet( licensed_item_id=licensed_item_db.licensed_item_id, name=licensed_item_db.name, + license_key=licensed_item_db.license_key, licensed_resource_type=licensed_item_db.licensed_resource_type, pricing_plan_id=licensed_item_db.pricing_plan_id, created_at=licensed_item_db.created, diff --git a/services/web/server/src/simcore_service_webserver/licenses/_rpc.py b/services/web/server/src/simcore_service_webserver/licenses/_rpc.py index fede0759b0d..261eb51c3aa 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_rpc.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_rpc.py @@ -1,22 +1,31 @@ from aiohttp import web from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE from models_library.api_schemas_webserver.licensed_items import LicensedItemGetPage +from models_library.api_schemas_webserver.licensed_items_checkouts import ( + LicensedItemCheckoutRpcGet, +) from models_library.basic_types import IDStr from models_library.licensed_items import LicensedItemID from models_library.products import ProductName -from models_library.resource_tracker import ServiceRunId +from models_library.resource_tracker_licensed_items_checkouts import ( + LicensedItemCheckoutID, +) from models_library.rest_ordering import OrderBy +from models_library.services_types import ServiceRunID from models_library.users import UserID from models_library.wallets import WalletID from servicelib.rabbitmq import RPCRouter +from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import ( + LICENSES_ERRORS, +) from ..rabbitmq import get_rabbitmq_rpc_server -from . import _licensed_items_api +from . import _licensed_items_checkouts_service, _licensed_items_service router = RPCRouter() -@router.expose() +@router.expose(reraise_if_error_type=LICENSES_ERRORS) async def get_licensed_items( app: web.Application, *, @@ -25,7 +34,7 @@ async def get_licensed_items( limit: int, ) -> LicensedItemGetPage: licensed_item_get_page: LicensedItemGetPage = ( - await _licensed_items_api.list_licensed_items( + await _licensed_items_service.list_licensed_items( app=app, product_name=product_name, offset=offset, @@ -37,42 +46,78 @@ async def get_licensed_items( @router.expose(reraise_if_error_type=(NotImplementedError,)) -async def get_licensed_items_for_wallet( +async def get_available_licensed_items_for_wallet( app: web.Application, *, - user_id: UserID, product_name: ProductName, + user_id: UserID, wallet_id: WalletID, -) -> None: + offset: int, + limit: int, +) -> LicensedItemGetPage: raise NotImplementedError -@router.expose(reraise_if_error_type=(NotImplementedError,)) +@router.expose(reraise_if_error_type=LICENSES_ERRORS) async def checkout_licensed_item_for_wallet( app: web.Application, *, - user_id: UserID, product_name: ProductName, + user_id: UserID, wallet_id: WalletID, licensed_item_id: LicensedItemID, num_of_seats: int, - service_run_id: ServiceRunId, -) -> None: - raise NotImplementedError + service_run_id: ServiceRunID, +) -> LicensedItemCheckoutRpcGet: + licensed_item_get = ( + await _licensed_items_checkouts_service.checkout_licensed_item_for_wallet( + app, + licensed_item_id=licensed_item_id, + wallet_id=wallet_id, + product_name=product_name, + num_of_seats=num_of_seats, + service_run_id=service_run_id, + user_id=user_id, + ) + ) + return LicensedItemCheckoutRpcGet.model_construct( + licensed_item_checkout_id=licensed_item_get.licensed_item_checkout_id, + licensed_item_id=licensed_item_get.licensed_item_id, + wallet_id=licensed_item_get.wallet_id, + user_id=licensed_item_get.user_id, + product_name=licensed_item_get.product_name, + started_at=licensed_item_get.started_at, + stopped_at=licensed_item_get.stopped_at, + num_of_seats=licensed_item_get.num_of_seats, + ) -@router.expose(reraise_if_error_type=(NotImplementedError,)) +@router.expose(reraise_if_error_type=LICENSES_ERRORS) async def release_licensed_item_for_wallet( app: web.Application, *, - user_id: str, - product_name: str, - wallet_id: WalletID, - licensed_item_id: LicensedItemID, - num_of_seats: int, - service_run_id: ServiceRunId, -) -> None: - raise NotImplementedError + product_name: ProductName, + user_id: UserID, + licensed_item_checkout_id: LicensedItemCheckoutID, +) -> LicensedItemCheckoutRpcGet: + licensed_item_get = ( + await _licensed_items_checkouts_service.release_licensed_item_for_wallet( + app, + product_name=product_name, + user_id=user_id, + licensed_item_checkout_id=licensed_item_checkout_id, + ) + ) + return LicensedItemCheckoutRpcGet.model_construct( + licensed_item_checkout_id=licensed_item_get.licensed_item_checkout_id, + licensed_item_id=licensed_item_get.licensed_item_id, + wallet_id=licensed_item_get.wallet_id, + user_id=licensed_item_get.user_id, + product_name=licensed_item_get.product_name, + started_at=licensed_item_get.started_at, + stopped_at=licensed_item_get.stopped_at, + num_of_seats=licensed_item_get.num_of_seats, + ) async def register_rpc_routes_on_startup(app: web.Application): diff --git a/services/web/server/src/simcore_service_webserver/licenses/plugin.py b/services/web/server/src/simcore_service_webserver/licenses/plugin.py index 137c7b2d1dc..72af99badeb 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/plugin.py +++ b/services/web/server/src/simcore_service_webserver/licenses/plugin.py @@ -8,7 +8,12 @@ from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from ..rabbitmq import setup_rabbitmq -from . import _licensed_items_handlers, _licensed_items_purchases_handlers, _rpc +from . import ( + _licensed_items_checkouts_rest, + _licensed_items_purchases_rest, + _licensed_items_rest, + _rpc, +) _logger = logging.getLogger(__name__) @@ -24,8 +29,9 @@ def setup_licenses(app: web.Application): assert app[APP_SETTINGS_KEY].WEBSERVER_LICENSES # nosec # routes - app.router.add_routes(_licensed_items_handlers.routes) - app.router.add_routes(_licensed_items_purchases_handlers.routes) + app.router.add_routes(_licensed_items_rest.routes) + app.router.add_routes(_licensed_items_purchases_rest.routes) + app.router.add_routes(_licensed_items_checkouts_rest.routes) setup_rabbitmq(app) if app[APP_SETTINGS_KEY].WEBSERVER_RABBITMQ: diff --git a/services/web/server/src/simcore_service_webserver/licenses/api.py b/services/web/server/src/simcore_service_webserver/licenses/service.py similarity index 100% rename from services/web/server/src/simcore_service_webserver/licenses/api.py rename to services/web/server/src/simcore_service_webserver/licenses/service.py diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py index d26a63a9cf8..9953914f5d0 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py @@ -26,7 +26,8 @@ from ..application_settings import get_application_settings from ..catalog import client as catalog_client -from ..director_v2 import api +from ..director_v2 import api as director_v2_api +from ..dynamic_scheduler import api as dynamic_scheduler_api from ..folders import _folders_db as folders_db from ..storage.api import ( copy_data_folders_from_project, @@ -171,7 +172,7 @@ async def _copy_files_from_source_project( source_project["uuid"], ProjectStatus.CLONING, user_id, - await get_user_fullname(app, user_id), + await get_user_fullname(app, user_id=user_id), ) ) starting_value = task_progress.percent @@ -376,13 +377,13 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche await db.set_hidden_flag(new_project["uuid"], hidden=False) # update the network information in director-v2 - await api.update_dynamic_service_networks_in_project( - request.app, ProjectID(new_project["uuid"]) + await dynamic_scheduler_api.update_projects_networks( + request.app, project_id=ProjectID(new_project["uuid"]) ) task_progress.update() # This is a new project and every new graph needs to be reflected in the pipeline tables - await api.create_or_update_pipeline( + await director_v2_api.create_or_update_pipeline( request.app, user_id, new_project["uuid"], product_name ) # get the latest state of the project (lastChangeDate for instance) diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py index 95e35582c9e..91f43f8a94c 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py @@ -448,7 +448,8 @@ async def delete_project(request: web.Request): ) if project_users: other_user_names = { - await get_user_fullname(request.app, uid) for uid in project_users + await get_user_fullname(request.app, user_id=uid) + for uid in project_users } raise web.HTTPForbidden( reason=f"Project is open by {other_user_names}. " diff --git a/services/web/server/src/simcore_service_webserver/projects/_db_utils.py b/services/web/server/src/simcore_service_webserver/projects/_db_utils.py index e36e2d455b3..2b14c2d1566 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_db_utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/_db_utils.py @@ -151,7 +151,7 @@ async def _get_user_primary_group_gid(conn: SAConnection, user_id: int) -> int: sa.select(users.c.primary_gid).where(users.c.id == str(user_id)) ) if not primary_gid: - raise UserNotFoundError(uid=user_id) + raise UserNotFoundError(user_id=user_id) assert isinstance(primary_gid, int) return primary_gid diff --git a/services/web/server/src/simcore_service_webserver/projects/_folders_db.py b/services/web/server/src/simcore_service_webserver/projects/_folders_db.py index d8c965be26b..d4fde1f5ce9 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_folders_db.py +++ b/services/web/server/src/simcore_service_webserver/projects/_folders_db.py @@ -8,7 +8,7 @@ from datetime import datetime from aiohttp import web -from common_library.unset import UnSet, as_dict_exclude_unset +from common_library.exclude import UnSet, as_dict_exclude_unset from models_library.folders import FolderID from models_library.projects import ProjectID from models_library.users import UserID diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py index 6670ed64442..9ddd88c0df1 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py @@ -62,7 +62,6 @@ from .._meta import API_VTAG as VTAG from ..catalog import client as catalog_client -from ..director_v2 import api as director_v2_api from ..dynamic_scheduler import api as dynamic_scheduler_api from ..groups.api import get_group_from_gid, list_all_user_groups_ids from ..groups.exceptions import GroupNotFoundError @@ -279,8 +278,8 @@ async def retrieve_node(request: web.Request) -> web.Response: retrieve = await parse_request_body_as(NodeRetrieve, request) return web.json_response( - await director_v2_api.retrieve( - request.app, f"{path_params.node_id}", retrieve.port_keys + await dynamic_scheduler_api.retrieve_inputs( + request.app, path_params.node_id, retrieve.port_keys ), dumps=json_dumps, ) @@ -376,7 +375,7 @@ async def stop_node(request: web.Request) -> web.Response: permission="write", ) - user_role = await get_user_role(request.app, req_ctx.user_id) + user_role = await get_user_role(request.app, user_id=req_ctx.user_id) if user_role is None or user_role <= UserRole.GUEST: save_state = False @@ -411,7 +410,9 @@ async def restart_node(request: web.Request) -> web.Response: path_params = parse_request_path_parameters_as(NodePathParams, request) - await director_v2_api.restart_dynamic_service(request.app, f"{path_params.node_id}") + await dynamic_scheduler_api.restart_user_services( + request.app, node_id=path_params.node_id + ) return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py index b2f5e46381c..8ec0400238c 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py @@ -109,7 +109,9 @@ async def open_project(request: web.Request) -> web.Response: project_type: ProjectType = await projects_api.get_project_type( request.app, path_params.project_id ) - user_role: UserRole = await api.get_user_role(request.app, req_ctx.user_id) + user_role: UserRole = await api.get_user_role( + request.app, user_id=req_ctx.user_id + ) if project_type is ProjectType.TEMPLATE and user_role < UserRole.USER: # only USERS/TESTERS can do that raise web.HTTPForbidden(reason="Wrong user role to open/edit a template") diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index 472855677b4..3edd4c50e39 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -593,7 +593,7 @@ async def _start_dynamic_service( raise save_state = False - user_role: UserRole = await get_user_role(request.app, user_id) + user_role: UserRole = await get_user_role(request.app, user_id=user_id) if user_role > UserRole.GUEST: save_state = await has_user_project_access_rights( request.app, project_id=project_uuid, user_id=user_id, permission="write" @@ -813,8 +813,8 @@ async def add_project_node( await director_v2_api.create_or_update_pipeline( request.app, user_id, project["uuid"], product_name ) - await director_v2_api.update_dynamic_service_networks_in_project( - request.app, project["uuid"] + await dynamic_scheduler_api.update_projects_networks( + request.app, project_id=ProjectID(project["uuid"]) ) if _is_node_dynamic(service_key): @@ -936,8 +936,8 @@ async def delete_project_node( await director_v2_api.create_or_update_pipeline( request.app, user_id, project_uuid, product_name ) - await director_v2_api.update_dynamic_service_networks_in_project( - request.app, project_uuid + await dynamic_scheduler_api.update_projects_networks( + request.app, project_id=project_uuid ) @@ -1045,11 +1045,19 @@ async def patch_project_node( app, user_id, project_id, product_name=product_name ) if _node_patch_exclude_unset.get("label"): - await director_v2_api.update_dynamic_service_networks_in_project( - app, project_id + await dynamic_scheduler_api.update_projects_networks(app, project_id=project_id) + + # 5. Updates project states for user, if inputs/outputs have been changed + if {"inputs", "outputs"} & _node_patch_exclude_unset.keys(): + updated_project = await add_project_states_for_user( + user_id=user_id, project=updated_project, is_template=False, app=app ) + for node_uuid in updated_project["workbench"]: + await notify_project_node_update( + app, updated_project, node_uuid, errors=None + ) + return - # 5. Notify project node update await notify_project_node_update(app, updated_project, node_id, errors=None) @@ -1128,6 +1136,20 @@ async def is_node_id_present_in_any_project_workbench( return await db.node_id_exists(node_id) +async def _safe_retrieve( + app: web.Application, node_id: NodeID, port_keys: list[str] +) -> None: + try: + await dynamic_scheduler_api.retrieve_inputs(app, node_id, port_keys) + except RPCServerError as exc: + log.warning( + "Unable to call :retrieve endpoint on service %s, keys: [%s]: error: [%s]", + node_id, + port_keys, + exc, + ) + + async def _trigger_connected_service_retrieve( app: web.Application, project: dict, updated_node_uuid: str, changed_keys: list[str] ) -> None: @@ -1168,7 +1190,7 @@ async def _trigger_connected_service_retrieve( # call /retrieve on the nodes update_tasks = [ - director_v2_api.request_retrieve_dyn_service(app, node, keys) + _safe_retrieve(app, NodeID(node), keys) for node, keys in nodes_keys_to_update.items() ] await logged_gather(*update_tasks) @@ -1243,7 +1265,7 @@ async def try_open_project_for_user( project_uuid, ProjectStatus.OPENING, user_id, - await get_user_fullname(app, user_id), + await get_user_fullname(app, user_id=user_id), notify_users=False, ): with managed_resource(user_id, client_session_id, app) as user_session: @@ -1413,22 +1435,23 @@ async def _get_project_lock_state( f"{set_user_ids=}", ) usernames: list[FullNameDict] = [ - await get_user_fullname(app, uid) for uid in set_user_ids + await get_user_fullname(app, user_id=uid) for uid in set_user_ids ] # let's check if the project is opened by the same user, maybe already opened or closed in a orphaned session - if set_user_ids.issubset({user_id}): - if not await _user_has_another_client_open(user_session_id_list, app): - # in this case the project is re-openable by the same user until it gets closed - log.debug( - "project [%s] is in use by the same user [%s] that is currently disconnected, so it is unlocked for this specific user and opened", - f"{project_uuid=}", - f"{set_user_ids=}", - ) - return ProjectLocked( - value=False, - owner=Owner(user_id=next(iter(set_user_ids)), **usernames[0]), - status=ProjectStatus.OPENED, - ) + if set_user_ids.issubset({user_id}) and not await _user_has_another_client_open( + user_session_id_list, app + ): + # in this case the project is re-openable by the same user until it gets closed + log.debug( + "project [%s] is in use by the same user [%s] that is currently disconnected, so it is unlocked for this specific user and opened", + f"{project_uuid=}", + f"{set_user_ids=}", + ) + return ProjectLocked( + value=False, + owner=Owner(user_id=next(iter(set_user_ids)), **usernames[0]), + status=ProjectStatus.OPENED, + ) # the project is opened in another tab or browser, or by another user, both case resolves to the project being locked, and opened log.debug( "project [%s] is in use by another user [%s], so it is locked", @@ -1712,11 +1735,13 @@ async def remove_project_dynamic_services( user_id, ) - user_name_data: FullNameDict = user_name or await get_user_fullname(app, user_id) + user_name_data: FullNameDict = user_name or await get_user_fullname( + app, user_id=user_id + ) user_role: UserRole | None = None try: - user_role = await get_user_role(app, user_id) + user_role = await get_user_role(app, user_id=user_id) except UserNotFoundError: user_role = None @@ -1870,13 +1895,12 @@ async def get_project_inactivity( app: web.Application, project_id: ProjectID ) -> GetProjectInactivityResponse: project_settings: ProjectsSettings = get_plugin_settings(app) - project_inactivity = await director_v2_api.get_project_inactivity( + return await dynamic_scheduler_api.get_project_inactivity( app, - project_id, + project_id=project_id, # NOTE: project is considered inactive if all services exposing an /inactivity # endpoint were inactive since at least PROJECTS_INACTIVITY_INTERVAL max_inactivity_seconds=int( project_settings.PROJECTS_INACTIVITY_INTERVAL.total_seconds() ), ) - return GetProjectInactivityResponse.model_validate(project_inactivity) diff --git a/services/web/server/src/simcore_service_webserver/rest/plugin.py b/services/web/server/src/simcore_service_webserver/rest/plugin.py index 833023d6df9..9d23181a814 100644 --- a/services/web/server/src/simcore_service_webserver/rest/plugin.py +++ b/services/web/server/src/simcore_service_webserver/rest/plugin.py @@ -9,12 +9,12 @@ import logging from aiohttp import web -from aiohttp_swagger import setup_swagger # type: ignore[import-untyped] from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from servicelib.aiohttp.rest_middlewares import ( envelope_middleware_factory, error_middleware_factory, ) +from swagger_ui import api_doc # type: ignore from .._meta import API_VTAG from ..security.plugin import setup_security @@ -60,11 +60,11 @@ def setup_rest(app: web.Application): # _logger.debug("OAS loaded from %s ", spec_path) if settings.REST_SWAGGER_API_DOC_ENABLED: - setup_swagger( - app, - swagger_url="/dev/doc", - swagger_from_file=str(spec_path), - ui_version=3, + api_doc( + app=app, + url_prefix="/dev/doc", + config_path=str(spec_path), + title="Web-API doc", ) diff --git a/services/web/server/src/simcore_service_webserver/security/_authz_access_roles.py b/services/web/server/src/simcore_service_webserver/security/_authz_access_roles.py index da342e1b996..0bd7e6a75eb 100644 --- a/services/web/server/src/simcore_service_webserver/security/_authz_access_roles.py +++ b/services/web/server/src/simcore_service_webserver/security/_authz_access_roles.py @@ -83,6 +83,7 @@ class PermissionDict(TypedDict, total=False): "user.notifications.write", "user.profile.delete", "user.profile.update", + "user.read", "user.tokens.*", "wallets.*", "workspaces.*", @@ -103,7 +104,7 @@ class PermissionDict(TypedDict, total=False): can=[ "product.details.*", "product.invitations.create", - "user.users.*", + "admin.users.read", ], inherits=[UserRole.TESTER], ), diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py index 3ee106028fd..3fc24965f3f 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py @@ -18,7 +18,7 @@ from servicelib.aiohttp.typing_extension import Handler from servicelib.logging_errors import create_troubleshotting_log_kwargs -from ..director_v2.api import update_dynamic_service_networks_in_project +from ..dynamic_scheduler import api as dynamic_scheduler_api from ..products.api import get_product_name from ..utils import compose_support_error_msg from ..utils_aiohttp import create_redirect_to_page_response @@ -252,7 +252,9 @@ async def get_redirection_to_viewer(request: web.Request): file_params.download_link, product_name=get_product_name(request), ) - await update_dynamic_service_networks_in_project(request.app, project_id) + await dynamic_scheduler_api.update_projects_networks( + request.app, project_id=project_id + ) response = _create_redirect_response_to_view_page( request.app, @@ -281,7 +283,9 @@ async def get_redirection_to_viewer(request: web.Request): service_info=_create_service_info_from(valid_service), product_name=get_product_name(request), ) - await update_dynamic_service_networks_in_project(request.app, project_id) + await dynamic_scheduler_api.update_projects_networks( + request.app, project_id=project_id + ) response = _create_redirect_response_to_view_page( request.app, @@ -317,7 +321,9 @@ async def get_redirection_to_viewer(request: web.Request): ).STUDIES_DEFAULT_FILE_THUMBNAIL, product_name=get_product_name(request), ) - await update_dynamic_service_networks_in_project(request.app, project_id) + await dynamic_scheduler_api.update_projects_networks( + request.app, project_id=project_id + ) response = _create_redirect_response_to_view_page( request.app, diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py index 85d47f0dba8..10f1d8b6674 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py @@ -28,9 +28,7 @@ from .._constants import INDEX_RESOURCE_NAME from ..director_v2._core_computations import create_or_update_pipeline -from ..director_v2._core_dynamic_services import ( - update_dynamic_service_networks_in_project, -) +from ..dynamic_scheduler import api as dynamic_scheduler_api from ..products.api import get_current_product, get_product_name from ..projects._groups_db import get_project_group from ..projects.api import check_user_project_permission @@ -214,7 +212,9 @@ async def copy_study_to_account( await create_or_update_pipeline( request.app, user["id"], project["uuid"], product_name ) - await update_dynamic_service_networks_in_project(request.app, project["uuid"]) + await dynamic_scheduler_api.update_projects_networks( + request.app, project_id=ProjectID(project["uuid"]) + ) return project_uuid diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py index b76d8a4b3f9..531759b062f 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py @@ -13,6 +13,7 @@ import string from contextlib import suppress from datetime import datetime +from typing import Final import redis.asyncio as aioredis from aiohttp import web @@ -65,6 +66,33 @@ async def get_authorized_user(request: web.Request) -> dict: return {} +# GUEST_USER_RC_LOCK: +# +# These locks prevents the GC from deleting a GUEST user in to stages of its lifefime: +# +# 1. During construction: +# - Prevents GC from deleting this GUEST user while it is being created +# - Since the user still does not have an ID assigned, the lock is named with his random_user_name +# - the timeout here is the TTL of the lock in Redis. in case the webserver is overwhelmed and cannot create +# a user during that time or crashes, then redis will ensure the lock disappears and let the garbage collector do its work +# +MAX_DELAY_TO_CREATE_USER: Final[int] = 8 # secs +# +# 2. During initialization +# - Prevents the GC from deleting this GUEST user, with ID assigned, while it gets initialized and acquires it's first resource +# - Uses the ID assigned to name the lock +# +MAX_DELAY_TO_GUEST_FIRST_CONNECTION: Final[int] = 15 # secs +# +# +# NOTES: +# - In case of failure or excessive delay the lock has a timeout that automatically unlocks it +# and the GC can clean up what remains +# - Notice that the ids to name the locks are unique, therefore the lock can be acquired w/o errors +# - These locks are very specific to resources and have timeout so the risk of blocking from GC is small +# + + async def create_temporary_guest_user(request: web.Request): """Creates a guest user with a random name and @@ -86,33 +114,6 @@ async def create_temporary_guest_user(request: web.Request): password = generate_password(length=12) expires_at = datetime.utcnow() + settings.STUDIES_GUEST_ACCOUNT_LIFETIME - # GUEST_USER_RC_LOCK: - # - # These locks prevents the GC from deleting a GUEST user in to stages of its lifefime: - # - # 1. During construction: - # - Prevents GC from deleting this GUEST user while it is being created - # - Since the user still does not have an ID assigned, the lock is named with his random_user_name - # - the timeout here is the TTL of the lock in Redis. in case the webserver is overwhelmed and cannot create - # a user during that time or crashes, then redis will ensure the lock disappears and let the garbage collector do its work - # - MAX_DELAY_TO_CREATE_USER = 5 # secs - # - # 2. During initialization - # - Prevents the GC from deleting this GUEST user, with ID assigned, while it gets initialized and acquires it's first resource - # - Uses the ID assigned to name the lock - # - MAX_DELAY_TO_GUEST_FIRST_CONNECTION = 15 # secs - # - # - # NOTES: - # - In case of failure or excessive delay the lock has a timeout that automatically unlocks it - # and the GC can clean up what remains - # - Notice that the ids to name the locks are unique, therefore the lock can be acquired w/o errors - # - These locks are very specific to resources and have timeout so the risk of blocking from GC is small - # - - # (1) read details above usr = None try: async with redis_locks_client.lock( diff --git a/services/web/server/src/simcore_service_webserver/users/_api.py b/services/web/server/src/simcore_service_webserver/users/_api.py deleted file mode 100644 index 458366367f5..00000000000 --- a/services/web/server/src/simcore_service_webserver/users/_api.py +++ /dev/null @@ -1,166 +0,0 @@ -import logging -from typing import NamedTuple - -import pycountry -from aiohttp import web -from models_library.emails import LowerCaseEmailStr -from models_library.payments import UserInvoiceAddress -from models_library.users import UserBillingDetails, UserID -from pydantic import TypeAdapter -from simcore_postgres_database.models.users import UserStatus - -from ..db.plugin import get_database_engine -from . import _db, _schemas -from ._db import get_user_or_raise -from ._db import list_user_permissions as db_list_of_permissions -from ._db import update_user_status -from .exceptions import AlreadyPreRegisteredError -from .schemas import Permission - -_logger = logging.getLogger(__name__) - - -async def list_user_permissions( - app: web.Application, user_id: UserID, product_name: str -) -> list[Permission]: - permissions: list[Permission] = await db_list_of_permissions( - app, user_id=user_id, product_name=product_name - ) - return permissions - - -class UserCredentialsTuple(NamedTuple): - email: LowerCaseEmailStr - password_hash: str - display_name: str - - -async def get_user_credentials( - app: web.Application, *, user_id: UserID -) -> UserCredentialsTuple: - row = await get_user_or_raise( - get_database_engine(app), - user_id=user_id, - return_column_names=[ - "name", - "first_name", - "email", - "password_hash", - ], - ) - - return UserCredentialsTuple( - email=TypeAdapter(LowerCaseEmailStr).validate_python(row.email), - password_hash=row.password_hash, - display_name=row.first_name or row.name.capitalize(), - ) - - -async def set_user_as_deleted(app: web.Application, user_id: UserID) -> None: - await update_user_status( - get_database_engine(app), user_id=user_id, new_status=UserStatus.DELETED - ) - - -def _glob_to_sql_like(glob_pattern: str) -> str: - # Escape SQL LIKE special characters in the glob pattern - sql_like_pattern = glob_pattern.replace("%", r"\%").replace("_", r"\_") - # Convert glob wildcards to SQL LIKE wildcards - return sql_like_pattern.replace("*", "%").replace("?", "_") - - -async def search_users( - app: web.Application, email_glob: str, *, include_products: bool = False -) -> list[_schemas.UserProfile]: - # NOTE: this search is deploy-wide i.e. independent of the product! - rows = await _db.search_users_and_get_profile( - get_database_engine(app), email_like=_glob_to_sql_like(email_glob) - ) - - async def _list_products_or_none(user_id): - if user_id is not None and include_products: - products = await _db.get_user_products( - get_database_engine(app), user_id=user_id - ) - return [_.product_name for _ in products] - return None - - return [ - _schemas.UserProfile( - first_name=r.first_name or r.pre_first_name, - last_name=r.last_name or r.pre_last_name, - email=r.email or r.pre_email, - institution=r.institution, - phone=r.phone or r.pre_phone, - address=r.address, - city=r.city, - state=r.state, - postal_code=r.postal_code, - country=r.country, - extras=r.extras or {}, - invited_by=r.invited_by, - products=await _list_products_or_none(r.user_id), - # NOTE: old users will not have extra details - registered=r.user_id is not None if r.pre_email else r.status is not None, - status=r.status, - ) - for r in rows - ] - - -async def pre_register_user( - app: web.Application, profile: _schemas.PreUserProfile, creator_user_id: UserID -) -> _schemas.UserProfile: - - found = await search_users(app, email_glob=profile.email, include_products=False) - if found: - raise AlreadyPreRegisteredError(num_found=len(found), email=profile.email) - - details = profile.model_dump( - include={ - "first_name", - "last_name", - "phone", - "institution", - "address", - "city", - "state", - "country", - "postal_code", - "extras", - }, - exclude_none=True, - ) - - for key in ("first_name", "last_name", "phone"): - if key in details: - details[f"pre_{key}"] = details.pop(key) - - await _db.new_user_details( - get_database_engine(app), - email=profile.email, - created_by=creator_user_id, - **details, - ) - - found = await search_users(app, email_glob=profile.email, include_products=False) - - assert len(found) == 1 # nosec - return found[0] - - -async def get_user_invoice_address( - app: web.Application, user_id: UserID -) -> UserInvoiceAddress: - user_billing_details: UserBillingDetails = await _db.get_user_billing_details( - get_database_engine(app), user_id=user_id - ) - _user_billing_country = pycountry.countries.lookup(user_billing_details.country) - _user_billing_country_alpha_2_format = _user_billing_country.alpha_2 - return UserInvoiceAddress( - line1=user_billing_details.address, - state=user_billing_details.state, - postal_code=user_billing_details.postal_code, - city=user_billing_details.city, - country=_user_billing_country_alpha_2_format, - ) diff --git a/services/web/server/src/simcore_service_webserver/users/_common/__init__.py b/services/web/server/src/simcore_service_webserver/users/_common/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/web/server/src/simcore_service_webserver/users/_models.py b/services/web/server/src/simcore_service_webserver/users/_common/models.py similarity index 63% rename from services/web/server/src/simcore_service_webserver/users/_models.py rename to services/web/server/src/simcore_service_webserver/users/_common/models.py index cd9de6a873c..513d8bed102 100644 --- a/services/web/server/src/simcore_service_webserver/users/_models.py +++ b/services/web/server/src/simcore_service_webserver/users/_common/models.py @@ -1,6 +1,30 @@ -from typing import Annotated, Any, Self +from typing import Annotated, Any, NamedTuple, Self, TypedDict + +from models_library.basic_types import IDStr +from models_library.emails import LowerCaseEmailStr +from pydantic import BaseModel, ConfigDict, EmailStr, Field + + +class FullNameDict(TypedDict): + first_name: str | None + last_name: str | None + + +class UserDisplayAndIdNamesTuple(NamedTuple): + name: str + email: EmailStr + first_name: IDStr + last_name: IDStr + + @property + def full_name(self) -> IDStr: + return IDStr.concatenate(self.first_name, self.last_name) + + +class UserIdNamesTuple(NamedTuple): + name: str + email: str -from pydantic import BaseModel, ConfigDict, Field # # DB models @@ -45,3 +69,9 @@ def from_api(cls, profile_update) -> Self: def to_db(self) -> dict[str, Any]: return self.model_dump(exclude_unset=True, by_alias=False) + + +class UserCredentialsTuple(NamedTuple): + email: LowerCaseEmailStr + password_hash: str + display_name: str diff --git a/services/web/server/src/simcore_service_webserver/users/_schemas.py b/services/web/server/src/simcore_service_webserver/users/_common/schemas.py similarity index 62% rename from services/web/server/src/simcore_service_webserver/users/_schemas.py rename to services/web/server/src/simcore_service_webserver/users/_common/schemas.py index 4b9aa7acf63..04946e21fcc 100644 --- a/services/web/server/src/simcore_service_webserver/users/_schemas.py +++ b/services/web/server/src/simcore_service_webserver/users/_common/schemas.py @@ -1,62 +1,37 @@ -""" models for rest api schemas, i.e. those defined in openapi.json +""" input/output datasets used in the rest-API +NOTE: Most of the model schemas are in `models_library.api_schemas_webserver.users`, +the rest (hidden or needs a dependency) is here """ + import re import sys from contextlib import suppress from typing import Annotated, Any, Final import pycountry -from models_library.api_schemas_webserver._base import InputSchema, OutputSchema +from models_library.api_schemas_webserver._base import InputSchema +from models_library.api_schemas_webserver.users import UserForAdminGet from models_library.emails import LowerCaseEmailStr -from models_library.products import ProductName -from pydantic import ConfigDict, Field, ValidationInfo, field_validator, model_validator -from simcore_postgres_database.models.users import UserStatus - - -class UserProfile(OutputSchema): - first_name: str | None - last_name: str | None - email: LowerCaseEmailStr - institution: str | None - phone: str | None - address: str | None - city: str | None - state: str | None = Field(description="State, province, canton, ...") - postal_code: str | None - country: str | None - extras: dict[str, Any] = Field( - default_factory=dict, - description="Keeps extra information provided in the request form", - ) +from models_library.users import UserID +from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator +from servicelib.request_keys import RQT_USERID_KEY - # authorization - invited_by: str | None = Field(default=None) +from ..._constants import RQ_PRODUCT_KEY - # user status - registered: bool - status: UserStatus | None - products: list[ProductName] | None = Field( - default=None, - description="List of products this users is included or None if fields is unset", - ) - @field_validator("status") - @classmethod - def _consistency_check(cls, v, info: ValidationInfo): - registered = info.data["registered"] - status = v - if not registered and status is not None: - msg = f"{registered=} and {status=} is not allowed" - raise ValueError(msg) - return v +class UsersRequestContext(BaseModel): + user_id: UserID = Field(..., alias=RQT_USERID_KEY) # type: ignore[literal-required] + product_name: str = Field(..., alias=RQ_PRODUCT_KEY) # type: ignore[literal-required] MAX_BYTES_SIZE_EXTRAS: Final[int] = 512 -class PreUserProfile(InputSchema): +class PreRegisteredUserGet(InputSchema): + # NOTE: validators need pycountry! + first_name: str last_name: str email: LowerCaseEmailStr @@ -74,7 +49,7 @@ class PreUserProfile(InputSchema): dict[str, Any], Field( default_factory=dict, - description="Keeps extra information provided in the request form. At most MAX_NUM_EXTRAS fields", + description="Keeps extra information provided in the request form.", ), ] @@ -133,4 +108,7 @@ def _pre_check_and_normalize_country(cls, v): return v -assert set(PreUserProfile.model_fields).issubset(UserProfile.model_fields) # nosec +# asserts field names are in sync +assert set(PreRegisteredUserGet.model_fields).issubset( + UserForAdminGet.model_fields +) # nosec diff --git a/services/web/server/src/simcore_service_webserver/users/_constants.py b/services/web/server/src/simcore_service_webserver/users/_constants.py deleted file mode 100644 index 5347d3e7527..00000000000 --- a/services/web/server/src/simcore_service_webserver/users/_constants.py +++ /dev/null @@ -1,7 +0,0 @@ -from typing import Final - -FMSG_MISSING_CONFIG_WITH_OEC: Final[str] = ( - "The product is not ready for use until the configuration is fully completed. " - "Please wait and try again. " - "If the issue continues, contact support with error code: {error_code}." -) diff --git a/services/web/server/src/simcore_service_webserver/users/_db.py b/services/web/server/src/simcore_service_webserver/users/_db.py deleted file mode 100644 index f80c4596423..00000000000 --- a/services/web/server/src/simcore_service_webserver/users/_db.py +++ /dev/null @@ -1,216 +0,0 @@ -import contextlib - -import sqlalchemy as sa -from aiohttp import web -from aiopg.sa.connection import SAConnection -from aiopg.sa.engine import Engine -from aiopg.sa.result import ResultProxy, RowProxy -from models_library.groups import GroupID -from models_library.users import UserBillingDetails, UserID -from simcore_postgres_database.models.groups import groups, user_to_groups -from simcore_postgres_database.models.products import products -from simcore_postgres_database.models.users import UserStatus, users -from simcore_postgres_database.models.users_details import ( - users_pre_registration_details, -) -from simcore_postgres_database.utils_groups_extra_properties import ( - GroupExtraPropertiesNotFoundError, - GroupExtraPropertiesRepo, -) -from simcore_postgres_database.utils_users import UsersRepo -from simcore_service_webserver.users.exceptions import UserNotFoundError - -from ..db.models import user_to_groups -from ..db.plugin import get_database_engine -from .exceptions import BillingDetailsNotFoundError -from .schemas import Permission - -_ALL = None - - -async def get_user_or_raise( - engine: Engine, *, user_id: UserID, return_column_names: list[str] | None = _ALL -) -> RowProxy: - if return_column_names == _ALL: - return_column_names = list(users.columns.keys()) - - assert return_column_names is not None # nosec - assert set(return_column_names).issubset(users.columns.keys()) # nosec - - async with engine.acquire() as conn: - row: RowProxy | None = await ( - await conn.execute( - sa.select(*(users.columns[name] for name in return_column_names)).where( - users.c.id == user_id - ) - ) - ).first() - if row is None: - raise UserNotFoundError(uid=user_id) - return row - - -async def get_users_ids_in_group(conn: SAConnection, gid: GroupID) -> set[UserID]: - result: set[UserID] = set() - query_result = await conn.execute( - sa.select(user_to_groups.c.uid).where(user_to_groups.c.gid == gid) - ) - async for entry in query_result: - result.add(entry[0]) - return result - - -async def list_user_permissions( - app: web.Application, *, user_id: UserID, product_name: str -) -> list[Permission]: - override_services_specifications = Permission( - name="override_services_specifications", - allowed=False, - ) - with contextlib.suppress(GroupExtraPropertiesNotFoundError): - async with get_database_engine(app).acquire() as conn: - user_group_extra_properties = ( - await GroupExtraPropertiesRepo.get_aggregated_properties_for_user( - conn, user_id=user_id, product_name=product_name - ) - ) - override_services_specifications.allowed = ( - user_group_extra_properties.override_services_specifications - ) - - return [override_services_specifications] - - -async def do_update_expired_users(conn: SAConnection) -> list[UserID]: - result: ResultProxy = await conn.execute( - users.update() - .values(status=UserStatus.EXPIRED) - .where( - (users.c.expires_at.is_not(None)) - & (users.c.status == UserStatus.ACTIVE) - & (users.c.expires_at < sa.sql.func.now()) - ) - .returning(users.c.id) - ) - if rows := await result.fetchall(): - return [r.id for r in rows] - return [] - - -async def update_user_status( - engine: Engine, *, user_id: UserID, new_status: UserStatus -): - async with engine.acquire() as conn: - await conn.execute( - users.update().values(status=new_status).where(users.c.id == user_id) - ) - - -async def search_users_and_get_profile( - engine: Engine, *, email_like: str -) -> list[RowProxy]: - - users_alias = sa.alias(users, name="users_alias") - - invited_by = ( - sa.select(users_alias.c.name) - .where(users_pre_registration_details.c.created_by == users_alias.c.id) - .label("invited_by") - ) - - async with engine.acquire() as conn: - columns = ( - users.c.first_name, - users.c.last_name, - users.c.email, - users.c.phone, - users_pre_registration_details.c.pre_email, - users_pre_registration_details.c.pre_first_name, - users_pre_registration_details.c.pre_last_name, - users_pre_registration_details.c.institution, - users_pre_registration_details.c.pre_phone, - users_pre_registration_details.c.address, - users_pre_registration_details.c.city, - users_pre_registration_details.c.state, - users_pre_registration_details.c.postal_code, - users_pre_registration_details.c.country, - users_pre_registration_details.c.user_id, - users_pre_registration_details.c.extras, - users.c.status, - invited_by, - ) - - left_outer_join = ( - sa.select(*columns) - .select_from( - users_pre_registration_details.outerjoin( - users, users.c.id == users_pre_registration_details.c.user_id - ) - ) - .where(users_pre_registration_details.c.pre_email.like(email_like)) - ) - right_outer_join = ( - sa.select(*columns) - .select_from( - users.outerjoin( - users_pre_registration_details, - users.c.id == users_pre_registration_details.c.user_id, - ) - ) - .where(users.c.email.like(email_like)) - ) - - result = await conn.execute(sa.union(left_outer_join, right_outer_join)) - return await result.fetchall() or [] - - -async def get_user_products(engine: Engine, user_id: UserID) -> list[RowProxy]: - async with engine.acquire() as conn: - product_name_subq = ( - sa.select(products.c.name) - .where(products.c.group_id == groups.c.gid) - .label("product_name") - ) - products_gis_subq = sa.select(products.c.group_id).distinct().subquery() - query = ( - sa.select( - groups.c.gid, - product_name_subq, - ) - .select_from( - users.join(user_to_groups, user_to_groups.c.uid == users.c.id).join( - groups, - (groups.c.gid == user_to_groups.c.gid) - & groups.c.gid.in_(products_gis_subq), - ) - ) - .where(users.c.id == user_id) - .order_by(groups.c.gid) - ) - result = await conn.execute(query) - return await result.fetchall() or [] - - -async def new_user_details( - engine: Engine, email: str, created_by: UserID, **other_values -) -> None: - async with engine.acquire() as conn: - await conn.execute( - sa.insert(users_pre_registration_details).values( - created_by=created_by, pre_email=email, **other_values - ) - ) - - -async def get_user_billing_details( - engine: Engine, user_id: UserID -) -> UserBillingDetails: - """ - Raises: - BillingDetailsNotFoundError - """ - async with engine.acquire() as conn: - user_billing_details = await UsersRepo.get_billing_details(conn, user_id) - if not user_billing_details: - raise BillingDetailsNotFoundError(user_id=user_id) - return UserBillingDetails.model_validate(user_billing_details) diff --git a/services/web/server/src/simcore_service_webserver/users/_handlers.py b/services/web/server/src/simcore_service_webserver/users/_handlers.py deleted file mode 100644 index 25785673a03..00000000000 --- a/services/web/server/src/simcore_service_webserver/users/_handlers.py +++ /dev/null @@ -1,149 +0,0 @@ -import functools -import logging - -from aiohttp import web -from models_library.api_schemas_webserver.users import MyProfileGet, MyProfilePatch -from models_library.users import UserID -from pydantic import BaseModel, Field -from servicelib.aiohttp import status -from servicelib.aiohttp.requests_validation import ( - parse_request_body_as, - parse_request_query_parameters_as, -) -from servicelib.aiohttp.typing_extension import Handler -from servicelib.logging_errors import create_troubleshotting_log_kwargs -from servicelib.request_keys import RQT_USERID_KEY -from servicelib.rest_constants import RESPONSE_MODEL_POLICY - -from .._constants import RQ_PRODUCT_KEY -from .._meta import API_VTAG -from ..login.decorators import login_required -from ..security.decorators import permission_required -from ..utils_aiohttp import envelope_json_response -from . import _api, api -from ._constants import FMSG_MISSING_CONFIG_WITH_OEC -from ._schemas import PreUserProfile -from .exceptions import ( - AlreadyPreRegisteredError, - MissingGroupExtraPropertiesForProductError, - UserNameDuplicateError, - UserNotFoundError, -) - -_logger = logging.getLogger(__name__) - - -routes = web.RouteTableDef() - - -class UsersRequestContext(BaseModel): - user_id: UserID = Field(..., alias=RQT_USERID_KEY) # type: ignore[literal-required] - product_name: str = Field(..., alias=RQ_PRODUCT_KEY) # type: ignore[literal-required] - - -def _handle_users_exceptions(handler: Handler): - @functools.wraps(handler) - async def wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except UserNotFoundError as exc: - raise web.HTTPNotFound(reason=f"{exc}") from exc - - except UserNameDuplicateError as exc: - raise web.HTTPConflict(reason=f"{exc}") from exc - - except MissingGroupExtraPropertiesForProductError as exc: - error_code = exc.error_code() - user_error_msg = FMSG_MISSING_CONFIG_WITH_OEC.format(error_code=error_code) - _logger.exception( - **create_troubleshotting_log_kwargs( - user_error_msg, - error=exc, - error_code=error_code, - tip="Row in `groups_extra_properties` for this product is missing.", - ) - ) - raise web.HTTPServiceUnavailable(reason=user_error_msg) from exc - - return wrapper - - -@routes.get(f"/{API_VTAG}/me", name="get_my_profile") -@login_required -@_handle_users_exceptions -async def get_my_profile(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.model_validate(request) - profile: MyProfileGet = await api.get_user_profile( - request.app, req_ctx.user_id, req_ctx.product_name - ) - return envelope_json_response(profile) - - -@routes.patch(f"/{API_VTAG}/me", name="update_my_profile") -@routes.put( - f"/{API_VTAG}/me", name="replace_my_profile" # deprecated. Use patch instead -) -@login_required -@permission_required("user.profile.update") -@_handle_users_exceptions -async def update_my_profile(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.model_validate(request) - profile_update = await parse_request_body_as(MyProfilePatch, request) - - await api.update_user_profile( - request.app, user_id=req_ctx.user_id, update=profile_update - ) - return web.json_response(status=status.HTTP_204_NO_CONTENT) - - -class _SearchQueryParams(BaseModel): - email: str = Field( - min_length=3, - max_length=200, - description="complete or glob pattern for an email", - ) - - -_RESPONSE_MODEL_MINIMAL_POLICY = RESPONSE_MODEL_POLICY.copy() -_RESPONSE_MODEL_MINIMAL_POLICY["exclude_none"] = True - - -@routes.get(f"/{API_VTAG}/users:search", name="search_users") -@login_required -@permission_required("user.users.*") -@_handle_users_exceptions -async def search_users(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.model_validate(request) - assert req_ctx.product_name # nosec - - query_params: _SearchQueryParams = parse_request_query_parameters_as( - _SearchQueryParams, request - ) - - found = await _api.search_users( - request.app, email_glob=query_params.email, include_products=True - ) - - return envelope_json_response( - [_.model_dump(**_RESPONSE_MODEL_MINIMAL_POLICY) for _ in found] - ) - - -@routes.post(f"/{API_VTAG}/users:pre-register", name="pre_register_user") -@login_required -@permission_required("user.users.*") -@_handle_users_exceptions -async def pre_register_user(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.model_validate(request) - pre_user_profile = await parse_request_body_as(PreUserProfile, request) - - try: - user_profile = await _api.pre_register_user( - request.app, profile=pre_user_profile, creator_user_id=req_ctx.user_id - ) - return envelope_json_response( - user_profile.model_dump(**_RESPONSE_MODEL_MINIMAL_POLICY) - ) - except AlreadyPreRegisteredError as err: - raise web.HTTPConflict(reason=f"{err}") from err diff --git a/services/web/server/src/simcore_service_webserver/users/_notifications_handlers.py b/services/web/server/src/simcore_service_webserver/users/_notifications_rest.py similarity index 91% rename from services/web/server/src/simcore_service_webserver/users/_notifications_handlers.py rename to services/web/server/src/simcore_service_webserver/users/_notifications_rest.py index 58fb1a483e5..e9f3b1788e9 100644 --- a/services/web/server/src/simcore_service_webserver/users/_notifications_handlers.py +++ b/services/web/server/src/simcore_service_webserver/users/_notifications_rest.py @@ -3,6 +3,8 @@ import redis.asyncio as aioredis from aiohttp import web +from models_library.api_schemas_webserver.users import MyPermissionGet +from models_library.users import UserPermission from pydantic import BaseModel from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( @@ -17,8 +19,8 @@ from ..redis import get_redis_user_notifications_client from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response -from . import _api -from ._handlers import UsersRequestContext +from . import _users_service +from ._common.schemas import UsersRequestContext from ._notifications import ( MAX_NOTIFICATIONS_FOR_USER_TO_KEEP, MAX_NOTIFICATIONS_FOR_USER_TO_SHOW, @@ -27,7 +29,6 @@ UserNotificationPatch, get_notification_key, ) -from .schemas import Permission, PermissionGet _logger = logging.getLogger(__name__) @@ -125,14 +126,9 @@ async def mark_notification_as_read(request: web.Request) -> web.Response: @permission_required("user.permissions.read") async def list_user_permissions(request: web.Request) -> web.Response: req_ctx = UsersRequestContext.model_validate(request) - list_permissions: list[Permission] = await _api.list_user_permissions( - request.app, req_ctx.user_id, req_ctx.product_name + list_permissions: list[UserPermission] = await _users_service.list_user_permissions( + request.app, user_id=req_ctx.user_id, product_name=req_ctx.product_name ) return envelope_json_response( - [ - PermissionGet.model_construct( - _fields_set=p.model_fields_set, **p.model_dump() - ) - for p in list_permissions - ] + [MyPermissionGet.from_model(p) for p in list_permissions] ) diff --git a/services/web/server/src/simcore_service_webserver/users/_preferences_db.py b/services/web/server/src/simcore_service_webserver/users/_preferences_repository.py similarity index 100% rename from services/web/server/src/simcore_service_webserver/users/_preferences_db.py rename to services/web/server/src/simcore_service_webserver/users/_preferences_repository.py diff --git a/services/web/server/src/simcore_service_webserver/users/_preferences_handlers.py b/services/web/server/src/simcore_service_webserver/users/_preferences_rest.py similarity index 94% rename from services/web/server/src/simcore_service_webserver/users/_preferences_handlers.py rename to services/web/server/src/simcore_service_webserver/users/_preferences_rest.py index 0c886472171..1793cd65ccd 100644 --- a/services/web/server/src/simcore_service_webserver/users/_preferences_handlers.py +++ b/services/web/server/src/simcore_service_webserver/users/_preferences_rest.py @@ -18,7 +18,7 @@ from .._meta import API_VTAG from ..login.decorators import login_required from ..models import RequestContext -from . import _preferences_api +from . import _preferences_service from .exceptions import FrontendUserPreferenceIsNotDefinedError routes = web.RouteTableDef() @@ -50,7 +50,7 @@ async def set_frontend_preference(request: web.Request) -> web.Response: req_body = await parse_request_body_as(PatchRequestBody, request) req_path_params = parse_request_path_parameters_as(PatchPathParams, request) - await _preferences_api.set_frontend_user_preference( + await _preferences_service.set_frontend_user_preference( request.app, user_id=req_ctx.user_id, product_name=req_ctx.product_name, diff --git a/services/web/server/src/simcore_service_webserver/users/_preferences_api.py b/services/web/server/src/simcore_service_webserver/users/_preferences_service.py similarity index 95% rename from services/web/server/src/simcore_service_webserver/users/_preferences_api.py rename to services/web/server/src/simcore_service_webserver/users/_preferences_service.py index fb55ac58d2f..0a5893141e1 100644 --- a/services/web/server/src/simcore_service_webserver/users/_preferences_api.py +++ b/services/web/server/src/simcore_service_webserver/users/_preferences_service.py @@ -20,7 +20,7 @@ ) from ..db.plugin import get_database_engine -from . import _preferences_db +from . import _preferences_repository from ._preferences_models import ( ALL_FRONTEND_PREFERENCES, TelemetryLowDiskSpaceWarningThresholdFrontendUserPreference, @@ -39,7 +39,7 @@ async def _get_frontend_user_preferences( ) -> list[FrontendUserPreference]: saved_user_preferences: list[FrontendUserPreference | None] = await logged_gather( *( - _preferences_db.get_user_preference( + _preferences_repository.get_user_preference( app, user_id=user_id, product_name=product_name, @@ -64,7 +64,7 @@ async def get_frontend_user_preference( product_name: ProductName, preference_class: type[FrontendUserPreference], ) -> AnyUserPreference | None: - return await _preferences_db.get_user_preference( + return await _preferences_repository.get_user_preference( app, user_id=user_id, product_name=product_name, @@ -127,7 +127,7 @@ async def set_frontend_user_preference( FrontendUserPreference.get_preference_class_from_name(preference_name), ) - await _preferences_db.set_user_preference( + await _preferences_repository.set_user_preference( app, user_id=user_id, preference=TypeAdapter(preference_class).validate_python({"value": value}), # type: ignore[arg-type] # GitHK this is suspicious diff --git a/services/web/server/src/simcore_service_webserver/users/_tokens_handlers.py b/services/web/server/src/simcore_service_webserver/users/_tokens_rest.py similarity index 74% rename from services/web/server/src/simcore_service_webserver/users/_tokens_handlers.py rename to services/web/server/src/simcore_service_webserver/users/_tokens_rest.py index 9f5dfc941b8..64c971761a7 100644 --- a/services/web/server/src/simcore_service_webserver/users/_tokens_handlers.py +++ b/services/web/server/src/simcore_service_webserver/users/_tokens_rest.py @@ -2,6 +2,7 @@ import logging from aiohttp import web +from models_library.api_schemas_webserver.users import MyTokenCreate, MyTokenGet from pydantic import BaseModel from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( @@ -14,10 +15,9 @@ from ..login.decorators import login_required from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response -from . import _tokens -from ._handlers import UsersRequestContext +from . import _tokens_service +from ._common.schemas import UsersRequestContext from .exceptions import TokenNotFoundError -from .schemas import TokenCreate _logger = logging.getLogger(__name__) @@ -45,8 +45,8 @@ async def _wrapper(request: web.Request) -> web.StreamResponse: @permission_required("user.tokens.*") async def list_tokens(request: web.Request) -> web.Response: req_ctx = UsersRequestContext.model_validate(request) - all_tokens = await _tokens.list_tokens(request.app, req_ctx.user_id) - return envelope_json_response(all_tokens) + all_tokens = await _tokens_service.list_tokens(request.app, req_ctx.user_id) + return envelope_json_response([MyTokenGet.from_model(t) for t in all_tokens]) @routes.post(f"/{API_VTAG}/me/tokens", name="create_token") @@ -55,9 +55,13 @@ async def list_tokens(request: web.Request) -> web.Response: @permission_required("user.tokens.*") async def create_token(request: web.Request) -> web.Response: req_ctx = UsersRequestContext.model_validate(request) - token_create = await parse_request_body_as(TokenCreate, request) - await _tokens.create_token(request.app, req_ctx.user_id, token_create) - return envelope_json_response(token_create, web.HTTPCreated) + token_create = await parse_request_body_as(MyTokenCreate, request) + + token = await _tokens_service.create_token( + request.app, req_ctx.user_id, token_create.to_model() + ) + + return envelope_json_response(MyTokenGet.from_model(token), web.HTTPCreated) class _TokenPathParams(BaseModel): @@ -71,10 +75,12 @@ class _TokenPathParams(BaseModel): async def get_token(request: web.Request) -> web.Response: req_ctx = UsersRequestContext.model_validate(request) req_path_params = parse_request_path_parameters_as(_TokenPathParams, request) - token = await _tokens.get_token( + + token = await _tokens_service.get_token( request.app, req_ctx.user_id, req_path_params.service ) - return envelope_json_response(token) + + return envelope_json_response(MyTokenGet.from_model(token)) @routes.delete(f"/{API_VTAG}/me/tokens/{{service}}", name="delete_token") @@ -84,5 +90,9 @@ async def get_token(request: web.Request) -> web.Response: async def delete_token(request: web.Request) -> web.Response: req_ctx = UsersRequestContext.model_validate(request) req_path_params = parse_request_path_parameters_as(_TokenPathParams, request) - await _tokens.delete_token(request.app, req_ctx.user_id, req_path_params.service) + + await _tokens_service.delete_token( + request.app, req_ctx.user_id, req_path_params.service + ) + return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/users/_tokens.py b/services/web/server/src/simcore_service_webserver/users/_tokens_service.py similarity index 78% rename from services/web/server/src/simcore_service_webserver/users/_tokens.py rename to services/web/server/src/simcore_service_webserver/users/_tokens_service.py index 6b4e58c8443..18e2f6323fd 100644 --- a/services/web/server/src/simcore_service_webserver/users/_tokens.py +++ b/services/web/server/src/simcore_service_webserver/users/_tokens_service.py @@ -4,43 +4,43 @@ """ import sqlalchemy as sa from aiohttp import web -from models_library.users import UserID -from models_library.utils.fastapi_encoders import jsonable_encoder +from models_library.users import UserID, UserThirdPartyToken from sqlalchemy import and_, literal_column from ..db.models import tokens from ..db.plugin import get_database_engine from .exceptions import TokenNotFoundError -from .schemas import ThirdPartyToken, TokenCreate async def create_token( - app: web.Application, user_id: UserID, token: TokenCreate -) -> ThirdPartyToken: + app: web.Application, user_id: UserID, token: UserThirdPartyToken +) -> UserThirdPartyToken: async with get_database_engine(app).acquire() as conn: await conn.execute( tokens.insert().values( user_id=user_id, token_service=token.service, - token_data=jsonable_encoder(token), + token_data=token.model_dump(mode="json"), ) ) return token -async def list_tokens(app: web.Application, user_id: UserID) -> list[ThirdPartyToken]: - user_tokens: list[ThirdPartyToken] = [] +async def list_tokens( + app: web.Application, user_id: UserID +) -> list[UserThirdPartyToken]: + user_tokens: list[UserThirdPartyToken] = [] async with get_database_engine(app).acquire() as conn: async for row in conn.execute( sa.select(tokens.c.token_data).where(tokens.c.user_id == user_id) ): - user_tokens.append(ThirdPartyToken.model_construct(**row["token_data"])) + user_tokens.append(UserThirdPartyToken.model_construct(**row["token_data"])) return user_tokens async def get_token( app: web.Application, user_id: UserID, service_id: str -) -> ThirdPartyToken: +) -> UserThirdPartyToken: async with get_database_engine(app).acquire() as conn: result = await conn.execute( sa.select(tokens.c.token_data).where( @@ -48,13 +48,13 @@ async def get_token( ) ) if row := await result.first(): - return ThirdPartyToken.model_construct(**row["token_data"]) + return UserThirdPartyToken.model_construct(**row["token_data"]) raise TokenNotFoundError(service_id=service_id) async def update_token( app: web.Application, user_id: UserID, service_id: str, token_data: dict[str, str] -) -> ThirdPartyToken: +) -> UserThirdPartyToken: async with get_database_engine(app).acquire() as conn: result = await conn.execute( sa.select(tokens.c.token_data, tokens.c.token_id).where( @@ -78,7 +78,7 @@ async def update_token( assert resp.rowcount == 1 # nosec updated_token = await resp.fetchone() assert updated_token # nosec - return ThirdPartyToken.model_construct(**updated_token["token_data"]) + return UserThirdPartyToken.model_construct(**updated_token["token_data"]) async def delete_token(app: web.Application, user_id: UserID, service_id: str) -> None: diff --git a/services/web/server/src/simcore_service_webserver/users/_users_repository.py b/services/web/server/src/simcore_service_webserver/users/_users_repository.py new file mode 100644 index 00000000000..5fcc88af4a1 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/users/_users_repository.py @@ -0,0 +1,540 @@ +import contextlib +from typing import Any + +import sqlalchemy as sa +from aiohttp import web +from common_library.users_enums import UserRole +from models_library.groups import GroupID +from models_library.users import ( + MyProfile, + UserBillingDetails, + UserID, + UserNameID, + UserPermission, +) +from pydantic import TypeAdapter, ValidationError +from simcore_postgres_database.models.groups import groups, user_to_groups +from simcore_postgres_database.models.products import products +from simcore_postgres_database.models.users import UserStatus, users +from simcore_postgres_database.models.users_details import ( + users_pre_registration_details, +) +from simcore_postgres_database.utils_groups_extra_properties import ( + GroupExtraPropertiesNotFoundError, + GroupExtraPropertiesRepo, +) +from simcore_postgres_database.utils_repos import ( + pass_or_acquire_connection, + transaction_context, +) +from simcore_postgres_database.utils_users import ( + UsersRepo, + generate_alternative_username, + is_public, + visible_user_profile_cols, +) +from sqlalchemy import delete +from sqlalchemy.engine.row import Row +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine + +from ..db.plugin import get_asyncpg_engine +from ._common.models import FullNameDict, ToUserUpdateDB +from .exceptions import ( + BillingDetailsNotFoundError, + UserNameDuplicateError, + UserNotFoundError, +) + + +def _parse_as_user(user_id: Any) -> UserID: + try: + return TypeAdapter(UserID).validate_python(user_id) + except ValidationError as err: + raise UserNotFoundError(user_id=user_id) from err + + +def _public_user_cols(caller_id: int): + return ( + # Fits PublicUser model + users.c.id.label("user_id"), + users.c.name.label("user_name"), + *visible_user_profile_cols(caller_id), + users.c.primary_gid.label("group_id"), + ) + + +# +# PUBLIC User +# + + +async def get_public_user( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + caller_id: UserID, + user_id: UserID, +): + query = sa.select(*_public_user_cols(caller_id=caller_id)).where( + users.c.id == user_id + ) + + async with pass_or_acquire_connection(engine, connection) as conn: + result = await conn.execute(query) + user = result.first() + if not user: + raise UserNotFoundError(user_id=user_id) + return user + + +async def search_public_user( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + caller_id: UserID, + search_pattern: str, + limit: int, +) -> list: + + _pattern = f"%{search_pattern}%" + + query = ( + sa.select(*_public_user_cols(caller_id=caller_id)) + .where( + users.c.name.ilike(_pattern) + | ( + is_public(users.c.privacy_hide_email, caller_id) + & users.c.email.ilike(_pattern) + ) + | ( + is_public(users.c.privacy_hide_fullname, caller_id) + & ( + users.c.first_name.ilike(_pattern) + | users.c.last_name.ilike(_pattern) + ) + ) + ) + .limit(limit) + ) + + async with pass_or_acquire_connection(engine, connection) as conn: + result = await conn.stream(query) + return [got async for got in result] + + +async def get_user_or_raise( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + return_column_names: list[str] | None = None, +) -> dict[str, Any]: + if not return_column_names: # None or empty list, returns all + return_column_names = list(users.columns.keys()) + + assert return_column_names is not None # nosec + assert set(return_column_names).issubset(users.columns.keys()) # nosec + + query = sa.select(*(users.columns[name] for name in return_column_names)).where( + users.c.id == user_id + ) + + async with pass_or_acquire_connection(engine, connection) as conn: + result = await conn.execute(query) + row = result.first() + if row is None: + raise UserNotFoundError(user_id=user_id) + + user: dict[str, Any] = row._asdict() + return user + + +async def get_user_primary_group_id( + engine: AsyncEngine, connection: AsyncConnection | None = None, *, user_id: UserID +) -> GroupID: + async with pass_or_acquire_connection(engine, connection) as conn: + primary_gid: GroupID | None = await conn.scalar( + sa.select( + users.c.primary_gid, + ).where(users.c.id == user_id) + ) + if primary_gid is None: + raise UserNotFoundError(user_id=user_id) + return primary_gid + + +async def get_users_ids_in_group( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + group_id: GroupID, +) -> set[UserID]: + async with pass_or_acquire_connection(engine, connection) as conn: + result = await conn.stream( + sa.select( + user_to_groups.c.uid, + ).where(user_to_groups.c.gid == group_id) + ) + return {row.uid async for row in result} + + +async def get_user_id_from_pgid(app: web.Application, primary_gid: int) -> UserID: + async with pass_or_acquire_connection(engine=get_asyncpg_engine(app)) as conn: + user_id: UserID = await conn.scalar( + sa.select( + users.c.id, + ).where(users.c.primary_gid == primary_gid) + ) + return user_id + + +async def get_user_fullname(app: web.Application, *, user_id: UserID) -> FullNameDict: + """ + :raises UserNotFoundError: + """ + user_id = _parse_as_user(user_id) + + async with pass_or_acquire_connection(engine=get_asyncpg_engine(app)) as conn: + result = await conn.stream( + sa.select( + users.c.first_name, + users.c.last_name, + ).where(users.c.id == user_id) + ) + user = await result.first() + if not user: + raise UserNotFoundError(user_id=user_id) + + return FullNameDict( + first_name=user.first_name, + last_name=user.last_name, + ) + + +async def get_guest_user_ids_and_names( + app: web.Application, +) -> list[tuple[UserID, UserNameID]]: + async with pass_or_acquire_connection(engine=get_asyncpg_engine(app)) as conn: + result = await conn.stream( + sa.select( + users.c.id, + users.c.name, + ).where(users.c.role == UserRole.GUEST) + ) + + return TypeAdapter(list[tuple[UserID, UserNameID]]).validate_python( + [(row.id, row.name) async for row in result] + ) + + +async def get_user_role(app: web.Application, *, user_id: UserID) -> UserRole: + """ + :raises UserNotFoundError: + """ + user_id = _parse_as_user(user_id) + + async with pass_or_acquire_connection(engine=get_asyncpg_engine(app)) as conn: + user_role = await conn.scalar( + sa.select( + users.c.role, + ).where(users.c.id == user_id) + ) + if user_role is None: + raise UserNotFoundError(user_id=user_id) + assert isinstance(user_role, UserRole) # nosec + return user_role + + +async def list_user_permissions( + app: web.Application, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + product_name: str, +) -> list[UserPermission]: + override_services_specifications = UserPermission( + name="override_services_specifications", + allowed=False, + ) + engine = get_asyncpg_engine(app) + with contextlib.suppress(GroupExtraPropertiesNotFoundError): + async with pass_or_acquire_connection(engine, connection) as conn: + user_group_extra_properties = ( + await GroupExtraPropertiesRepo.get_aggregated_properties_for_user_v2( + engine, conn, user_id=user_id, product_name=product_name + ) + ) + override_services_specifications.allowed = ( + user_group_extra_properties.override_services_specifications + ) + + return [override_services_specifications] + + +async def do_update_expired_users( + engine: AsyncEngine, + connection: AsyncConnection | None = None, +) -> list[UserID]: + async with transaction_context(engine, connection) as conn: + result = await conn.stream( + users.update() + .values( + status=UserStatus.EXPIRED, + ) + .where( + (users.c.expires_at.is_not(None)) + & (users.c.status == UserStatus.ACTIVE) + & (users.c.expires_at < sa.sql.func.now()) + ) + .returning(users.c.id) + ) + return [row.id async for row in result] + + +async def update_user_status( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + new_status: UserStatus, +): + async with transaction_context(engine, connection) as conn: + await conn.execute( + users.update() + .values( + status=new_status, + ) + .where(users.c.id == user_id) + ) + + +async def search_users_and_get_profile( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + email_like: str, +) -> list[Row]: + + users_alias = sa.alias(users, name="users_alias") + + invited_by = ( + sa.select( + users_alias.c.name, + ) + .where(users_pre_registration_details.c.created_by == users_alias.c.id) + .label("invited_by") + ) + + async with pass_or_acquire_connection(engine, connection) as conn: + columns = ( + users.c.first_name, + users.c.last_name, + users.c.email, + users.c.phone, + users_pre_registration_details.c.pre_email, + users_pre_registration_details.c.pre_first_name, + users_pre_registration_details.c.pre_last_name, + users_pre_registration_details.c.institution, + users_pre_registration_details.c.pre_phone, + users_pre_registration_details.c.address, + users_pre_registration_details.c.city, + users_pre_registration_details.c.state, + users_pre_registration_details.c.postal_code, + users_pre_registration_details.c.country, + users_pre_registration_details.c.user_id, + users_pre_registration_details.c.extras, + users.c.status, + invited_by, + ) + + left_outer_join = ( + sa.select(*columns) + .select_from( + users_pre_registration_details.outerjoin( + users, users.c.id == users_pre_registration_details.c.user_id + ) + ) + .where(users_pre_registration_details.c.pre_email.like(email_like)) + ) + right_outer_join = ( + sa.select(*columns) + .select_from( + users.outerjoin( + users_pre_registration_details, + users.c.id == users_pre_registration_details.c.user_id, + ) + ) + .where(users.c.email.like(email_like)) + ) + + result = await conn.stream(sa.union(left_outer_join, right_outer_join)) + return [row async for row in result] + + +async def get_user_products( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + user_id: UserID, +) -> list[Row]: + async with pass_or_acquire_connection(engine, connection) as conn: + product_name_subq = ( + sa.select( + products.c.name, + ) + .where(products.c.group_id == groups.c.gid) + .label("product_name") + ) + products_gis_subq = ( + sa.select( + products.c.group_id, + ) + .distinct() + .subquery() + ) + query = ( + sa.select( + groups.c.gid, + product_name_subq, + ) + .select_from( + users.join(user_to_groups, user_to_groups.c.uid == users.c.id).join( + groups, + (groups.c.gid == user_to_groups.c.gid) + & groups.c.gid.in_(products_gis_subq), + ) + ) + .where(users.c.id == user_id) + .order_by(groups.c.gid) + ) + result = await conn.stream(query) + return [row async for row in result] + + +async def create_user_details( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + email: str, + created_by: UserID, + **other_values, +) -> None: + async with transaction_context(engine, connection) as conn: + await conn.execute( + sa.insert(users_pre_registration_details).values( + created_by=created_by, + pre_email=email, + **other_values, + ) + ) + + +async def get_user_billing_details( + engine: AsyncEngine, connection: AsyncConnection | None = None, *, user_id: UserID +) -> UserBillingDetails: + """ + Raises: + BillingDetailsNotFoundError + """ + async with pass_or_acquire_connection(engine, connection) as conn: + query = UsersRepo.get_billing_details_query(user_id=user_id) + result = await conn.execute(query) + row = result.first() + if not row: + raise BillingDetailsNotFoundError(user_id=user_id) + return UserBillingDetails.model_validate(row) + + +async def delete_user_by_id( + engine: AsyncEngine, connection: AsyncConnection | None = None, *, user_id: UserID +) -> bool: + async with transaction_context(engine, connection) as conn: + result = await conn.execute( + delete(users) + .where(users.c.id == user_id) + .returning(users.c.id) # Return the ID of the deleted row otherwise None + ) + deleted_user = result.first() + + # If no row was deleted, the user did not exist + return bool(deleted_user) + + +# +# USER PROFILE +# + + +async def get_my_profile(app: web.Application, *, user_id: UserID) -> MyProfile: + user_id = _parse_as_user(user_id) + + async with pass_or_acquire_connection(engine=get_asyncpg_engine(app)) as conn: + result = await conn.stream( + sa.select( + # users -> MyProfile map + users.c.id, + users.c.name.label("user_name"), + users.c.first_name, + users.c.last_name, + users.c.email, + users.c.role, + sa.func.json_build_object( + "hide_fullname", + users.c.privacy_hide_fullname, + "hide_email", + users.c.privacy_hide_email, + ).label("privacy"), + sa.case( + ( + users.c.expires_at.isnot(None), + sa.func.date(users.c.expires_at), + ), + else_=None, + ).label("expiration_date"), + ).where(users.c.id == user_id) + ) + row = await result.first() + if not row: + raise UserNotFoundError(user_id=user_id) + + my_profile = MyProfile.model_validate(row, from_attributes=True) + assert my_profile.id == user_id # nosec + + return my_profile + + +async def update_user_profile( + app: web.Application, + *, + user_id: UserID, + update: ToUserUpdateDB, +) -> None: + """ + Raises: + UserNotFoundError + UserNameAlreadyExistsError + """ + user_id = _parse_as_user(user_id) + + if updated_values := update.to_db(): + try: + + async with transaction_context(engine=get_asyncpg_engine(app)) as conn: + await conn.execute( + users.update() + .where( + users.c.id == user_id, + ) + .values(**updated_values) + ) + + except IntegrityError as err: + user_name = updated_values.get("name") + + raise UserNameDuplicateError( + user_name=user_name, + alternative_user_name=generate_alternative_username(user_name), + user_id=user_id, + updated_values=updated_values, + ) from err diff --git a/services/web/server/src/simcore_service_webserver/users/_users_rest.py b/services/web/server/src/simcore_service_webserver/users/_users_rest.py new file mode 100644 index 00000000000..688b024b40a --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/users/_users_rest.py @@ -0,0 +1,203 @@ +import logging +from contextlib import suppress + +from aiohttp import web +from models_library.api_schemas_webserver.users import ( + MyProfileGet, + MyProfilePatch, + UserGet, + UsersForAdminSearchQueryParams, + UsersSearch, +) +from servicelib.aiohttp import status +from servicelib.aiohttp.requests_validation import ( + parse_request_body_as, + parse_request_query_parameters_as, +) +from servicelib.rest_constants import RESPONSE_MODEL_POLICY +from simcore_service_webserver.products._api import get_current_product +from simcore_service_webserver.products._model import Product + +from .._meta import API_VTAG +from ..exception_handling import ( + ExceptionToHttpErrorMap, + HttpErrorInfo, + exception_handling_decorator, + to_exceptions_handlers_map, +) +from ..groups import api as groups_api +from ..groups.exceptions import GroupNotFoundError +from ..login.decorators import login_required +from ..security.decorators import permission_required +from ..utils_aiohttp import envelope_json_response +from . import _users_service +from ._common.schemas import PreRegisteredUserGet, UsersRequestContext +from .exceptions import ( + AlreadyPreRegisteredError, + MissingGroupExtraPropertiesForProductError, + UserNameDuplicateError, + UserNotFoundError, +) + +_logger = logging.getLogger(__name__) + + +_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { + UserNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "This user cannot be found. Either it is not registered or has enabled privacy settings.", + ), + UserNameDuplicateError: HttpErrorInfo( + status.HTTP_409_CONFLICT, + "Username '{user_name}' is already taken. " + "Consider '{alternative_user_name}' instead.", + ), + AlreadyPreRegisteredError: HttpErrorInfo( + status.HTTP_409_CONFLICT, + "Found {num_found} matches for '{email}'. Cannot pre-register existing user", + ), + MissingGroupExtraPropertiesForProductError: HttpErrorInfo( + status.HTTP_503_SERVICE_UNAVAILABLE, + "The product is not ready for use until the configuration is fully completed. " + "Please wait and try again. " + "If this issue persists, contact support indicating this support code: {error_code}.", + ), +} + +_handle_users_exceptions = exception_handling_decorator( + # Transforms raised service exceptions into controller-errors (i.e. http 4XX,5XX responses) + to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP) +) + + +routes = web.RouteTableDef() + +# +# MY PROFILE: /me +# + + +@routes.get(f"/{API_VTAG}/me", name="get_my_profile") +@login_required +@_handle_users_exceptions +async def get_my_profile(request: web.Request) -> web.Response: + product: Product = get_current_product(request) + req_ctx = UsersRequestContext.model_validate(request) + + groups_by_type = await groups_api.list_user_groups_with_read_access( + request.app, user_id=req_ctx.user_id + ) + + assert groups_by_type.primary + assert groups_by_type.everyone + + my_product_group = None + + if product.group_id: + with suppress(GroupNotFoundError): + # Product is optional + my_product_group = await groups_api.get_product_group_for_user( + app=request.app, + user_id=req_ctx.user_id, + product_gid=product.group_id, + ) + + my_profile, preferences = await _users_service.get_my_profile( + request.app, user_id=req_ctx.user_id, product_name=req_ctx.product_name + ) + + profile = MyProfileGet.from_model( + my_profile, groups_by_type, my_product_group, preferences + ) + + return envelope_json_response(profile) + + +@routes.patch(f"/{API_VTAG}/me", name="update_my_profile") +@routes.put( + f"/{API_VTAG}/me", name="replace_my_profile" # deprecated. Use patch instead +) +@login_required +@permission_required("user.profile.update") +@_handle_users_exceptions +async def update_my_profile(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + profile_update = await parse_request_body_as(MyProfilePatch, request) + + await _users_service.update_my_profile( + request.app, user_id=req_ctx.user_id, update=profile_update + ) + return web.json_response(status=status.HTTP_204_NO_CONTENT) + + +# +# USERS (public) +# + + +@routes.post(f"/{API_VTAG}/users:search", name="search_users") +@login_required +@permission_required("user.read") +@_handle_users_exceptions +async def search_users(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + assert req_ctx.product_name # nosec + + # NOTE: Decided for body instead of query parameters because it is easier for the front-end + search_params = await parse_request_body_as(UsersSearch, request) + + found = await _users_service.search_public_users( + request.app, + caller_id=req_ctx.user_id, + match_=search_params.match_, + limit=search_params.limit, + ) + + return envelope_json_response([UserGet.from_model(user) for user in found]) + + +# +# USERS (only POs) +# + +_RESPONSE_MODEL_MINIMAL_POLICY = RESPONSE_MODEL_POLICY.copy() +_RESPONSE_MODEL_MINIMAL_POLICY["exclude_none"] = True + + +@routes.get(f"/{API_VTAG}/admin/users:search", name="search_users_for_admin") +@login_required +@permission_required("admin.users.read") +@_handle_users_exceptions +async def search_users_for_admin(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + assert req_ctx.product_name # nosec + + query_params: UsersForAdminSearchQueryParams = parse_request_query_parameters_as( + UsersForAdminSearchQueryParams, request + ) + + found = await _users_service.search_users( + request.app, email_glob=query_params.email, include_products=True + ) + + return envelope_json_response( + [_.model_dump(**_RESPONSE_MODEL_MINIMAL_POLICY) for _ in found] + ) + + +@routes.post( + f"/{API_VTAG}/admin/users:pre-register", name="pre_register_user_for_admin" +) +@login_required +@permission_required("admin.users.read") +@_handle_users_exceptions +async def pre_register_user_for_admin(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + pre_user_profile = await parse_request_body_as(PreRegisteredUserGet, request) + + user_profile = await _users_service.pre_register_user( + request.app, profile=pre_user_profile, creator_user_id=req_ctx.user_id + ) + return envelope_json_response( + user_profile.model_dump(**_RESPONSE_MODEL_MINIMAL_POLICY) + ) diff --git a/services/web/server/src/simcore_service_webserver/users/_users_service.py b/services/web/server/src/simcore_service_webserver/users/_users_service.py new file mode 100644 index 00000000000..2bb52b85d57 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/users/_users_service.py @@ -0,0 +1,363 @@ +import logging +from typing import Any + +import pycountry +from aiohttp import web +from models_library.api_schemas_webserver.users import MyProfilePatch, UserForAdminGet +from models_library.basic_types import IDStr +from models_library.emails import LowerCaseEmailStr +from models_library.groups import GroupID +from models_library.payments import UserInvoiceAddress +from models_library.products import ProductName +from models_library.users import UserBillingDetails, UserID, UserPermission +from pydantic import TypeAdapter +from simcore_postgres_database.models.users import UserStatus +from simcore_postgres_database.utils_groups_extra_properties import ( + GroupExtraPropertiesNotFoundError, +) + +from ..db.plugin import get_asyncpg_engine +from ..security.api import clean_auth_policy_cache +from . import _preferences_service, _users_repository +from ._common.models import ( + FullNameDict, + ToUserUpdateDB, + UserCredentialsTuple, + UserDisplayAndIdNamesTuple, + UserIdNamesTuple, +) +from ._common.schemas import PreRegisteredUserGet +from .exceptions import ( + AlreadyPreRegisteredError, + MissingGroupExtraPropertiesForProductError, +) + +_logger = logging.getLogger(__name__) + +# +# PRE-REGISTRATION +# + + +async def pre_register_user( + app: web.Application, + profile: PreRegisteredUserGet, + creator_user_id: UserID, +) -> UserForAdminGet: + + found = await search_users(app, email_glob=profile.email, include_products=False) + if found: + raise AlreadyPreRegisteredError(num_found=len(found), email=profile.email) + + details = profile.model_dump( + include={ + "first_name", + "last_name", + "phone", + "institution", + "address", + "city", + "state", + "country", + "postal_code", + "extras", + }, + exclude_none=True, + ) + + for key in ("first_name", "last_name", "phone"): + if key in details: + details[f"pre_{key}"] = details.pop(key) + + await _users_repository.create_user_details( + get_asyncpg_engine(app), + email=profile.email, + created_by=creator_user_id, + **details, + ) + + found = await search_users(app, email_glob=profile.email, include_products=False) + + assert len(found) == 1 # nosec + return found[0] + + +# +# GET USERS +# + + +async def get_public_user(app: web.Application, *, caller_id: UserID, user_id: UserID): + return await _users_repository.get_public_user( + get_asyncpg_engine(app), + caller_id=caller_id, + user_id=user_id, + ) + + +async def search_public_users( + app: web.Application, *, caller_id: UserID, match_: str, limit: int +) -> list: + return await _users_repository.search_public_user( + get_asyncpg_engine(app), + caller_id=caller_id, + search_pattern=match_, + limit=limit, + ) + + +async def get_user(app: web.Application, user_id: UserID) -> dict[str, Any]: + """ + :raises UserNotFoundError: if missing but NOT if marked for deletion! + """ + return await _users_repository.get_user_or_raise( + engine=get_asyncpg_engine(app), user_id=user_id + ) + + +async def get_user_primary_group_id(app: web.Application, user_id: UserID) -> GroupID: + return await _users_repository.get_user_primary_group_id( + engine=get_asyncpg_engine(app), user_id=user_id + ) + + +async def get_user_id_from_gid(app: web.Application, primary_gid: GroupID) -> UserID: + return await _users_repository.get_user_id_from_pgid(app, primary_gid) + + +async def search_users( + app: web.Application, email_glob: str, *, include_products: bool = False +) -> list[UserForAdminGet]: + # NOTE: this search is deploy-wide i.e. independent of the product! + + def _glob_to_sql_like(glob_pattern: str) -> str: + # Escape SQL LIKE special characters in the glob pattern + sql_like_pattern = glob_pattern.replace("%", r"\%").replace("_", r"\_") + # Convert glob wildcards to SQL LIKE wildcards + return sql_like_pattern.replace("*", "%").replace("?", "_") + + rows = await _users_repository.search_users_and_get_profile( + get_asyncpg_engine(app), email_like=_glob_to_sql_like(email_glob) + ) + + async def _list_products_or_none(user_id): + if user_id is not None and include_products: + products = await _users_repository.get_user_products( + get_asyncpg_engine(app), user_id=user_id + ) + return [_.product_name for _ in products] + return None + + return [ + UserForAdminGet( + first_name=r.first_name or r.pre_first_name, + last_name=r.last_name or r.pre_last_name, + email=r.email or r.pre_email, + institution=r.institution, + phone=r.phone or r.pre_phone, + address=r.address, + city=r.city, + state=r.state, + postal_code=r.postal_code, + country=r.country, + extras=r.extras or {}, + invited_by=r.invited_by, + products=await _list_products_or_none(r.user_id), + # NOTE: old users will not have extra details + registered=r.user_id is not None if r.pre_email else r.status is not None, + status=r.status, + ) + for r in rows + ] + + +async def get_users_in_group(app: web.Application, *, gid: GroupID) -> set[UserID]: + return await _users_repository.get_users_ids_in_group( + get_asyncpg_engine(app), group_id=gid + ) + + +get_guest_user_ids_and_names = _users_repository.get_guest_user_ids_and_names + + +# +# GET USER PROPERTIES +# + + +async def get_user_fullname(app: web.Application, *, user_id: UserID) -> FullNameDict: + """ + :raises UserNotFoundError: + """ + return await _users_repository.get_user_fullname(app, user_id=user_id) + + +async def get_user_name_and_email( + app: web.Application, *, user_id: UserID +) -> UserIdNamesTuple: + """ + Raises: + UserNotFoundError + + Returns: + (user, email) + """ + row = await _users_repository.get_user_or_raise( + get_asyncpg_engine(app), + user_id=user_id, + return_column_names=["name", "email"], + ) + return UserIdNamesTuple(name=row["name"], email=row["email"]) + + +async def get_user_display_and_id_names( + app: web.Application, *, user_id: UserID +) -> UserDisplayAndIdNamesTuple: + """ + Raises: + UserNotFoundError + """ + row = await _users_repository.get_user_or_raise( + get_asyncpg_engine(app), + user_id=user_id, + return_column_names=["name", "email", "first_name", "last_name"], + ) + return UserDisplayAndIdNamesTuple( + name=row["name"], + email=row["email"], + first_name=row["first_name"] or row["name"].capitalize(), + last_name=IDStr(row["last_name"] or ""), + ) + + +get_user_role = _users_repository.get_user_role + + +async def get_user_credentials( + app: web.Application, *, user_id: UserID +) -> UserCredentialsTuple: + row = await _users_repository.get_user_or_raise( + get_asyncpg_engine(app), + user_id=user_id, + return_column_names=[ + "name", + "first_name", + "email", + "password_hash", + ], + ) + + return UserCredentialsTuple( + email=TypeAdapter(LowerCaseEmailStr).validate_python(row["email"]), + password_hash=row["password_hash"], + display_name=row["first_name"] or row["name"].capitalize(), + ) + + +async def list_user_permissions( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, +) -> list[UserPermission]: + permissions: list[UserPermission] = await _users_repository.list_user_permissions( + app, user_id=user_id, product_name=product_name + ) + return permissions + + +async def get_user_invoice_address( + app: web.Application, *, user_id: UserID +) -> UserInvoiceAddress: + user_billing_details: UserBillingDetails = ( + await _users_repository.get_user_billing_details( + get_asyncpg_engine(app), user_id=user_id + ) + ) + _user_billing_country = pycountry.countries.lookup(user_billing_details.country) + _user_billing_country_alpha_2_format = _user_billing_country.alpha_2 + return UserInvoiceAddress( + line1=user_billing_details.address, + state=user_billing_details.state, + postal_code=user_billing_details.postal_code, + city=user_billing_details.city, + country=_user_billing_country_alpha_2_format, + ) + + +# +# DELETE USER +# + + +async def delete_user_without_projects(app: web.Application, user_id: UserID) -> None: + """Deletes a user from the database if the user exists""" + # WARNING: user cannot be deleted without deleting first all ist project + # otherwise this function will raise asyncpg.exceptions.ForeignKeyViolationError + # Consider "marking" users as deleted and havning a background job that + # cleans it up + is_deleted = await _users_repository.delete_user_by_id( + engine=get_asyncpg_engine(app), user_id=user_id + ) + if not is_deleted: + _logger.warning( + "User with id '%s' could not be deleted because it does not exist", user_id + ) + return + + # This user might be cached in the auth. If so, any request + # with this user-id will get thru producing unexpected side-effects + await clean_auth_policy_cache(app) + + +async def set_user_as_deleted(app: web.Application, *, user_id: UserID) -> None: + await _users_repository.update_user_status( + get_asyncpg_engine(app), user_id=user_id, new_status=UserStatus.DELETED + ) + + +async def update_expired_users(app: web.Application) -> list[UserID]: + return await _users_repository.do_update_expired_users(get_asyncpg_engine(app)) + + +# +# MY USER PROFILE +# + + +async def get_my_profile( + app: web.Application, *, user_id: UserID, product_name: ProductName +): + """Caller and target user is the same. Privacy settings do not apply here + + :raises UserNotFoundError: + :raises MissingGroupExtraPropertiesForProductError: when product is not properly configured + """ + my_profile = await _users_repository.get_my_profile(app, user_id=user_id) + + try: + preferences = ( + await _preferences_service.get_frontend_user_preferences_aggregation( + app, user_id=user_id, product_name=product_name + ) + ) + except GroupExtraPropertiesNotFoundError as err: + raise MissingGroupExtraPropertiesForProductError( + user_id=user_id, product_name=product_name + ) from err + + return my_profile, preferences + + +async def update_my_profile( + app: web.Application, + *, + user_id: UserID, + update: MyProfilePatch, +) -> None: + + await _users_repository.update_user_profile( + app, + user_id=user_id, + update=ToUserUpdateDB.from_api(update), + ) diff --git a/services/web/server/src/simcore_service_webserver/users/api.py b/services/web/server/src/simcore_service_webserver/users/api.py index 1c1d217a28e..09ca7b757e6 100644 --- a/services/web/server/src/simcore_service_webserver/users/api.py +++ b/services/web/server/src/simcore_service_webserver/users/api.py @@ -1,382 +1,39 @@ # mypy: disable-error-code=truthy-function -""" - This should be the interface other modules should use to get - information from user module -""" - -import logging -from collections import deque -from typing import Any, NamedTuple, TypedDict - -import simcore_postgres_database.errors as db_errors -import sqlalchemy as sa -from aiohttp import web -from aiopg.sa.engine import Engine -from aiopg.sa.result import RowProxy -from models_library.api_schemas_webserver.users import ( - MyProfileGet, - MyProfilePatch, - MyProfilePrivacyGet, -) -from models_library.basic_types import IDStr -from models_library.groups import GroupID -from models_library.products import ProductName -from models_library.users import UserID -from pydantic import EmailStr, TypeAdapter, ValidationError -from simcore_postgres_database.models.groups import GroupType, groups, user_to_groups -from simcore_postgres_database.models.users import UserRole, users -from simcore_postgres_database.utils_groups_extra_properties import ( - GroupExtraPropertiesNotFoundError, -) -from simcore_postgres_database.utils_users import generate_alternative_username - -from ..db.plugin import get_database_engine -from ..login.storage import AsyncpgStorage, get_plugin_storage -from ..security.api import clean_auth_policy_cache -from . import _db -from ._api import get_user_credentials, get_user_invoice_address, set_user_as_deleted -from ._models import ToUserUpdateDB -from ._preferences_api import get_frontend_user_preferences_aggregation -from .exceptions import ( - MissingGroupExtraPropertiesForProductError, - UserNameDuplicateError, - UserNotFoundError, +from ._common.models import FullNameDict, UserDisplayAndIdNamesTuple +from ._users_service import ( + delete_user_without_projects, + get_guest_user_ids_and_names, + get_user, + get_user_credentials, + get_user_display_and_id_names, + get_user_fullname, + get_user_id_from_gid, + get_user_invoice_address, + get_user_name_and_email, + get_user_primary_group_id, + get_user_role, + get_users_in_group, + set_user_as_deleted, + update_expired_users, ) -_logger = logging.getLogger(__name__) - - -_GROUPS_SCHEMA_TO_DB = { - "gid": "gid", - "label": "name", - "description": "description", - "thumbnail": "thumbnail", - "accessRights": "access_rights", -} - - -def _convert_groups_db_to_schema( - db_row: RowProxy, *, prefix: str | None = "", **kwargs -) -> dict: - # NOTE: Deprecated. has to be replaced with - converted_dict = { - k: db_row[f"{prefix}{v}"] - for k, v in _GROUPS_SCHEMA_TO_DB.items() - if f"{prefix}{v}" in db_row - } - converted_dict.update(**kwargs) - converted_dict["inclusionRules"] = {} - return converted_dict - - -def _parse_as_user(user_id: Any) -> UserID: - try: - return TypeAdapter(UserID).validate_python(user_id) - except ValidationError as err: - raise UserNotFoundError(uid=user_id, user_id=user_id) from err - - -async def get_user_profile( - app: web.Application, user_id: UserID, product_name: ProductName -) -> MyProfileGet: - """ - :raises UserNotFoundError: - :raises MissingGroupExtraPropertiesForProductError: when product is not properly configured - """ - - engine = get_database_engine(app) - user_profile: dict[str, Any] = {} - user_primary_group = everyone_group = {} - user_standard_groups = [] - user_id = _parse_as_user(user_id) - - async with engine.acquire() as conn: - row: RowProxy - - async for row in conn.execute( - sa.select(users, groups, user_to_groups.c.access_rights) - .select_from( - users.join(user_to_groups, users.c.id == user_to_groups.c.uid).join( - groups, user_to_groups.c.gid == groups.c.gid - ) - ) - .where(users.c.id == user_id) - .order_by(sa.asc(groups.c.name)) - .set_label_style(sa.LABEL_STYLE_TABLENAME_PLUS_COL) - ): - if not user_profile: - user_profile = { - "id": row.users_id, - "user_name": row.users_name, - "first_name": row.users_first_name, - "last_name": row.users_last_name, - "login": row.users_email, - "role": row.users_role, - "privacy_hide_fullname": row.users_privacy_hide_fullname, - "privacy_hide_email": row.users_privacy_hide_email, - "expiration_date": ( - row.users_expires_at.date() if row.users_expires_at else None - ), - } - assert user_profile["id"] == user_id # nosec - - if row.groups_type == GroupType.EVERYONE: - everyone_group = _convert_groups_db_to_schema( - row, - prefix="groups_", - accessRights=row["user_to_groups_access_rights"], - ) - elif row.groups_type == GroupType.PRIMARY: - user_primary_group = _convert_groups_db_to_schema( - row, - prefix="groups_", - accessRights=row["user_to_groups_access_rights"], - ) - else: - user_standard_groups.append( - _convert_groups_db_to_schema( - row, - prefix="groups_", - accessRights=row["user_to_groups_access_rights"], - ) - ) - - if not user_profile: - raise UserNotFoundError(uid=user_id) - - try: - preferences = await get_frontend_user_preferences_aggregation( - app, user_id=user_id, product_name=product_name - ) - except GroupExtraPropertiesNotFoundError as err: - raise MissingGroupExtraPropertiesForProductError( - user_id=user_id, product_name=product_name - ) from err - - # NOTE: expirationDate null is not handled properly in front-end. - # https://github.com/ITISFoundation/osparc-simcore/issues/5244 - optional = {} - if user_profile.get("expiration_date"): - optional["expiration_date"] = user_profile["expiration_date"] - - return MyProfileGet( - id=user_profile["id"], - user_name=user_profile["user_name"], - first_name=user_profile["first_name"], - last_name=user_profile["last_name"], - login=user_profile["login"], - role=user_profile["role"], - groups={ # type: ignore[arg-type] - "me": user_primary_group, - "organizations": user_standard_groups, - "all": everyone_group, - }, - privacy=MyProfilePrivacyGet( - hide_fullname=user_profile["privacy_hide_fullname"], - hide_email=user_profile["privacy_hide_email"], - ), - preferences=preferences, - **optional, - ) - - -async def update_user_profile( - app: web.Application, - *, - user_id: UserID, - update: MyProfilePatch, -) -> None: - """ - Raises: - UserNotFoundError - UserNameAlreadyExistsError - """ - user_id = _parse_as_user(user_id) - - if updated_values := ToUserUpdateDB.from_api(update).to_db(): - async with get_database_engine(app).acquire() as conn: - query = users.update().where(users.c.id == user_id).values(**updated_values) - - try: - - resp = await conn.execute(query) - assert resp.rowcount == 1 # nosec - - except db_errors.UniqueViolation as err: - user_name = updated_values.get("name") - - raise UserNameDuplicateError( - user_name=user_name, - alternative_user_name=generate_alternative_username(user_name), - user_id=user_id, - updated_values=updated_values, - ) from err - - -async def get_user_role(app: web.Application, user_id: UserID) -> UserRole: - """ - :raises UserNotFoundError: - """ - user_id = _parse_as_user(user_id) - - engine = get_database_engine(app) - async with engine.acquire() as conn: - user_role: RowProxy | None = await conn.scalar( - sa.select(users.c.role).where(users.c.id == user_id) - ) - if user_role is None: - raise UserNotFoundError(uid=user_id) - return UserRole(user_role) - - -class UserIdNamesTuple(NamedTuple): - name: str - email: str - - -async def get_user_name_and_email( - app: web.Application, *, user_id: UserID -) -> UserIdNamesTuple: - """ - Raises: - UserNotFoundError - - Returns: - (user, email) - """ - row = await _db.get_user_or_raise( - get_database_engine(app), - user_id=_parse_as_user(user_id), - return_column_names=["name", "email"], - ) - return UserIdNamesTuple(name=row.name, email=row.email) - - -class UserDisplayAndIdNamesTuple(NamedTuple): - name: str - email: EmailStr - first_name: IDStr - last_name: IDStr - - @property - def full_name(self) -> IDStr: - return IDStr.concatenate(self.first_name, self.last_name) - - -async def get_user_display_and_id_names( - app: web.Application, *, user_id: UserID -) -> UserDisplayAndIdNamesTuple: - """ - Raises: - UserNotFoundError - """ - row = await _db.get_user_or_raise( - get_database_engine(app), - user_id=_parse_as_user(user_id), - return_column_names=["name", "email", "first_name", "last_name"], - ) - return UserDisplayAndIdNamesTuple( - name=row.name, - email=row.email, - first_name=row.first_name or row.name.capitalize(), - last_name=IDStr(row.last_name or ""), - ) - - -async def get_guest_user_ids_and_names(app: web.Application) -> list[tuple[int, str]]: - engine = get_database_engine(app) - result: deque = deque() - async with engine.acquire() as conn: - async for row in conn.execute( - sa.select(users.c.id, users.c.name).where(users.c.role == UserRole.GUEST) - ): - result.append(row.as_tuple()) - return list(result) - - -async def delete_user_without_projects(app: web.Application, user_id: UserID) -> None: - """Deletes a user from the database if the user exists""" - # WARNING: user cannot be deleted without deleting first all ist project - # otherwise this function will raise asyncpg.exceptions.ForeignKeyViolationError - # Consider "marking" users as deleted and havning a background job that - # cleans it up - db: AsyncpgStorage = get_plugin_storage(app) - user = await db.get_user({"id": user_id}) - if not user: - _logger.warning( - "User with id '%s' could not be deleted because it does not exist", user_id - ) - return - - await db.delete_user(dict(user)) - - # This user might be cached in the auth. If so, any request - # with this user-id will get thru producing unexpected side-effects - await clean_auth_policy_cache(app) - - -class FullNameDict(TypedDict): - first_name: str | None - last_name: str | None - - -async def get_user_fullname(app: web.Application, user_id: UserID) -> FullNameDict: - """ - :raises UserNotFoundError: - """ - user_id = _parse_as_user(user_id) - - async with get_database_engine(app).acquire() as conn: - result = await conn.execute( - sa.select(users.c.first_name, users.c.last_name).where( - users.c.id == user_id - ) - ) - user = await result.first() - if not user: - raise UserNotFoundError(uid=user_id) - - return FullNameDict( - first_name=user.first_name, - last_name=user.last_name, - ) - - -async def get_user(app: web.Application, user_id: UserID) -> dict[str, Any]: - """ - :raises UserNotFoundError: - """ - row = await _db.get_user_or_raise(engine=get_database_engine(app), user_id=user_id) - return dict(row) - - -async def get_user_id_from_gid(app: web.Application, primary_gid: int) -> UserID: - engine = get_database_engine(app) - async with engine.acquire() as conn: - user_id: UserID = await conn.scalar( - sa.select(users.c.id).where(users.c.primary_gid == primary_gid) - ) - return user_id - - -async def get_users_in_group(app: web.Application, gid: GroupID) -> set[UserID]: - engine = get_database_engine(app) - async with engine.acquire() as conn: - return await _db.get_users_ids_in_group(conn, gid) - - -async def update_expired_users(engine: Engine) -> list[UserID]: - async with engine.acquire() as conn: - return await _db.do_update_expired_users(conn) - - -assert set_user_as_deleted # nosec -assert get_user_credentials # nosec -assert get_user_invoice_address # nosec - __all__: tuple[str, ...] = ( + "delete_user_without_projects", + "get_guest_user_ids_and_names", "get_user_credentials", - "set_user_as_deleted", + "get_user_display_and_id_names", + "get_user_fullname", + "get_user_id_from_gid", "get_user_invoice_address", + "get_user_name_and_email", + "get_user_primary_group_id", + "get_user_role", + "get_user", + "get_users_in_group", + "set_user_as_deleted", + "update_expired_users", + "FullNameDict", + "UserDisplayAndIdNamesTuple", ) +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/users/exceptions.py b/services/web/server/src/simcore_service_webserver/users/exceptions.py index d1f838d2133..9f1bb48ef0a 100644 --- a/services/web/server/src/simcore_service_webserver/users/exceptions.py +++ b/services/web/server/src/simcore_service_webserver/users/exceptions.py @@ -8,24 +8,23 @@ class UsersBaseError(WebServerBaseError): class UserNotFoundError(UsersBaseError): - def __init__(self, *, uid: int | None = None, email: str | None = None, **ctx: Any): + def __init__( + self, *, user_id: int | None = None, email: str | None = None, **ctx: Any + ): super().__init__( msg_template=( - "User id {uid} not found" - if uid + "User id {user_id} not found" + if user_id else f"User with email {email} not found" ), **ctx, ) - self.uid = uid + self.user_id = user_id self.email = email class UserNameDuplicateError(UsersBaseError): - msg_template = ( - "The username '{user_name}' is already taken. " - "Consider using '{alternative_user_name}' instead." - ) + msg_template = "username is a unique ID and cannot create a new as '{user_name}' since it already exists " class TokenNotFoundError(UsersBaseError): diff --git a/services/web/server/src/simcore_service_webserver/users/plugin.py b/services/web/server/src/simcore_service_webserver/users/plugin.py index 697ed277ca6..e9fb7d2ea53 100644 --- a/services/web/server/src/simcore_service_webserver/users/plugin.py +++ b/services/web/server/src/simcore_service_webserver/users/plugin.py @@ -9,12 +9,7 @@ from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from servicelib.aiohttp.observer import setup_observer_registry -from . import ( - _handlers, - _notifications_handlers, - _preferences_handlers, - _tokens_handlers, -) +from . import _notifications_rest, _preferences_rest, _tokens_rest, _users_rest from ._preferences_models import overwrite_user_preferences_defaults _logger = logging.getLogger(__name__) @@ -32,7 +27,7 @@ def setup_users(app: web.Application): setup_observer_registry(app) overwrite_user_preferences_defaults(app) - app.router.add_routes(_handlers.routes) - app.router.add_routes(_tokens_handlers.routes) - app.router.add_routes(_notifications_handlers.routes) - app.router.add_routes(_preferences_handlers.routes) + app.router.add_routes(_users_rest.routes) + app.router.add_routes(_tokens_rest.routes) + app.router.add_routes(_notifications_rest.routes) + app.router.add_routes(_preferences_rest.routes) diff --git a/services/web/server/src/simcore_service_webserver/users/preferences_api.py b/services/web/server/src/simcore_service_webserver/users/preferences_api.py index a0f3e11fdc9..9f51b52e8b3 100644 --- a/services/web/server/src/simcore_service_webserver/users/preferences_api.py +++ b/services/web/server/src/simcore_service_webserver/users/preferences_api.py @@ -1,8 +1,11 @@ -from ._preferences_api import get_frontend_user_preference, set_frontend_user_preference from ._preferences_models import ( PreferredWalletIdFrontendUserPreference, TwoFAFrontendUserPreference, ) +from ._preferences_service import ( + get_frontend_user_preference, + set_frontend_user_preference, +) from .exceptions import UserDefaultWalletNotFoundError __all__ = ( diff --git a/services/web/server/src/simcore_service_webserver/users/schemas.py b/services/web/server/src/simcore_service_webserver/users/schemas.py deleted file mode 100644 index 8ad46a5c317..00000000000 --- a/services/web/server/src/simcore_service_webserver/users/schemas.py +++ /dev/null @@ -1,44 +0,0 @@ -from uuid import UUID - -from models_library.api_schemas_webserver._base import OutputSchema -from pydantic import BaseModel, ConfigDict, Field - - -# -# TOKENS resource -# -class ThirdPartyToken(BaseModel): - """ - Tokens used to access third-party services connected to osparc (e.g. pennsieve, scicrunch, etc) - """ - - service: str = Field( - ..., description="uniquely identifies the service where this token is used" - ) - token_key: UUID = Field(..., description="basic token key") - token_secret: UUID | None = None - - model_config = ConfigDict( - json_schema_extra={ - "example": { - "service": "github-api-v1", - "token_key": "5f21abf5-c596-47b7-bfd1-c0e436ef1107", - } - } - ) - - -class TokenCreate(ThirdPartyToken): - ... - - -# -# Permissions -# -class Permission(BaseModel): - name: str - allowed: bool - - -class PermissionGet(Permission, OutputSchema): - ... diff --git a/services/web/server/tests/conftest.py b/services/web/server/tests/conftest.py index 0e1de456b78..26bd1d6f5dd 100644 --- a/services/web/server/tests/conftest.py +++ b/services/web/server/tests/conftest.py @@ -21,8 +21,8 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.projects_state import ProjectState +from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.dict_tools import ConfigDict from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from pytest_simcore.helpers.webserver_login import LoggedUser, NewUser, UserInfoDict from pytest_simcore.simcore_webserver_projects_rest_api import NEW_PROJECT @@ -32,7 +32,10 @@ X_SIMCORE_PARENT_NODE_ID, X_SIMCORE_PARENT_PROJECT_UUID, ) -from simcore_service_webserver.application_settings_utils import convert_to_environ_vars +from simcore_service_webserver.application_settings_utils import ( + AppConfigDict, + convert_to_environ_vars, +) from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects._crud_api_create import ( OVERRIDABLE_DOCUMENT_KEYS, @@ -132,8 +135,8 @@ async def user(client: TestClient) -> AsyncIterator[UserInfoDict]: "name": "test-user", }, app=client.app, - ) as user: - yield user + ) as user_info: + yield user_info @pytest.fixture @@ -163,7 +166,7 @@ async def logged_user( @pytest.fixture def monkeypatch_setenv_from_app_config( monkeypatch: pytest.MonkeyPatch, -) -> Callable[[ConfigDict], EnvVarsDict]: +) -> Callable[[AppConfigDict], EnvVarsDict]: # TODO: Change signature to be analogous to # packages/pytest-simcore/src/pytest_simcore/helpers/utils_envs.py # That solution is more flexible e.g. for context manager with monkeypatch @@ -444,3 +447,15 @@ async def _creator( for client, project_uuid in zip(used_clients, created_project_uuids, strict=True): url = client.app.router["delete_project"].url_for(project_id=project_uuid) await client.delete(url.path) + + +@pytest.fixture +def mock_dynamic_scheduler(mocker: MockerFixture) -> None: + mocker.patch( + "simcore_service_webserver.dynamic_scheduler.api.stop_dynamic_services_in_project", + autospec=True, + ) + mocker.patch( + "simcore_service_webserver.dynamic_scheduler.api.update_projects_networks", + autospec=True, + ) diff --git a/services/web/server/tests/integration/01/test_garbage_collection.py b/services/web/server/tests/integration/01/test_garbage_collection.py index f373c302df4..62075ff6ba0 100644 --- a/services/web/server/tests/integration/01/test_garbage_collection.py +++ b/services/web/server/tests/integration/01/test_garbage_collection.py @@ -35,8 +35,9 @@ from simcore_service_webserver.db.plugin import setup_db from simcore_service_webserver.director_v2.plugin import setup_director_v2 from simcore_service_webserver.garbage_collector import _core as gc_core +from simcore_service_webserver.garbage_collector._tasks_core import _GC_TASK_NAME from simcore_service_webserver.garbage_collector.plugin import setup_garbage_collector -from simcore_service_webserver.groups._groups_api import create_standard_group +from simcore_service_webserver.groups._groups_service import create_standard_group from simcore_service_webserver.groups.api import add_user_in_group from simcore_service_webserver.login.plugin import setup_login from simcore_service_webserver.projects._crud_api_delete import get_scheduled_tasks @@ -113,9 +114,6 @@ async def director_v2_service_mock( r"^http://[a-z\-_]*director-v2:[0-9]+/v2/computations/.*$" ) delete_computation_pattern = get_computation_pattern - projects_networks_pattern = re.compile( - r"^http://[a-z\-_]*director-v2:[0-9]+/v2/dynamic_services/projects/.*/-/networks$" - ) mocker.patch( "simcore_service_webserver.dynamic_scheduler.api.list_dynamic_services", @@ -134,7 +132,6 @@ async def director_v2_service_mock( repeat=True, ) mock.delete(delete_computation_pattern, status=204, repeat=True) - mock.patch(projects_networks_pattern, status=204, repeat=True) yield mock @@ -278,7 +275,7 @@ async def get_template_project( ) -async def get_group(client: TestClient, user: dict): +async def get_group(client: TestClient, user: UserInfoDict): """Creates a group for a given user""" assert client.app @@ -635,7 +632,7 @@ async def test_t4_project_shared_with_group_transferred_to_user_in_group_on_owne await assert_projects_count(aiopg_engine, 1) await assert_user_is_owner_of_project(aiopg_engine, u1, project) - await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) + await asyncio.sleep(2 * WAIT_FOR_COMPLETE_GC_CYCLE) # expected outcome: u1 was deleted, one of the users in g1 is the new owner await assert_user_not_in_db(aiopg_engine, u1) @@ -1019,6 +1016,12 @@ async def test_t10_owner_and_all_shared_users_marked_as_guests( USER "u1", "u2" and "u3" are manually marked as "GUEST"; EXPECTED: the project and all the users are removed """ + + gc_task: asyncio.Task = next( + task for task in asyncio.all_tasks() if task.get_name() == _GC_TASK_NAME + ) + assert not gc_task.done() + u1 = await login_user(client) u2 = await login_user(client) u3 = await login_user(client) diff --git a/services/web/server/tests/integration/conftest.py b/services/web/server/tests/integration/conftest.py index 2f8cda8aa5e..c6575d80e21 100644 --- a/services/web/server/tests/integration/conftest.py +++ b/services/web/server/tests/integration/conftest.py @@ -24,8 +24,8 @@ import yaml from pytest_mock import MockerFixture from pytest_simcore.helpers import FIXTURE_CONFIG_CORE_SERVICES_SELECTION -from pytest_simcore.helpers.dict_tools import ConfigDict from pytest_simcore.helpers.docker import get_service_published_port +from simcore_service_webserver.application_settings_utils import AppConfigDict CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent @@ -100,7 +100,7 @@ def _default_app_config_for_integration_tests( default_app_config_integration_file: Path, webserver_environ: dict, osparc_simcore_root_dir: Path, -) -> ConfigDict: +) -> AppConfigDict: """ Swarm with integration stack already started @@ -135,7 +135,7 @@ def _default_app_config_for_integration_tests( # recreate config-file config_template = Template(default_app_config_integration_file.read_text()) config_text = config_template.substitute(**test_environ) - cfg: ConfigDict = yaml.safe_load(config_text) + cfg: AppConfigDict = yaml.safe_load(config_text) # NOTE: test webserver works in host cfg["main"]["host"] = "127.0.0.1" @@ -149,8 +149,8 @@ def _default_app_config_for_integration_tests( @pytest.fixture() def app_config( - _default_app_config_for_integration_tests: ConfigDict, unused_tcp_port_factory -) -> ConfigDict: + _default_app_config_for_integration_tests: AppConfigDict, unused_tcp_port_factory +) -> AppConfigDict: """ Swarm with integration stack already started This fixture can be safely modified during test since it is renovated on every call diff --git a/services/web/server/tests/unit/conftest.py b/services/web/server/tests/unit/conftest.py index 695a7aa1ed4..b322655c20c 100644 --- a/services/web/server/tests/unit/conftest.py +++ b/services/web/server/tests/unit/conftest.py @@ -14,8 +14,8 @@ import pytest import yaml -from pytest_simcore.helpers.dict_tools import ConfigDict from pytest_simcore.helpers.webserver_projects import empty_project_data +from simcore_service_webserver.application_settings_utils import AppConfigDict CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent @@ -40,7 +40,7 @@ def default_app_config_unit_file(tests_data_dir: Path) -> Path: @pytest.fixture(scope="session") -def default_app_cfg(default_app_config_unit_file: Path) -> ConfigDict: +def default_app_cfg(default_app_config_unit_file: Path) -> AppConfigDict: # NOTE: ONLY used at the session scopes # TODO: create instead a loader function and return a Callable config: dict = yaml.safe_load(default_app_config_unit_file.read_text()) diff --git a/services/web/server/tests/unit/isolated/conftest.py b/services/web/server/tests/unit/isolated/conftest.py index 9cc0948ff88..77a4b7ca567 100644 --- a/services/web/server/tests/unit/isolated/conftest.py +++ b/services/web/server/tests/unit/isolated/conftest.py @@ -6,12 +6,12 @@ import pytest from faker import Faker from pytest_mock import MockerFixture -from pytest_simcore.helpers.dict_tools import ConfigDict from pytest_simcore.helpers.monkeypatch_envs import ( setenvs_from_dict, setenvs_from_envfile, ) from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_webserver.application_settings_utils import AppConfigDict @pytest.fixture @@ -68,7 +68,7 @@ def make_subdirectories_with_content( @pytest.fixture -def app_config_for_production_legacy(test_data_dir: Path) -> ConfigDict: +def app_config_for_production_legacy(test_data_dir: Path) -> AppConfigDict: app_config = json.loads( (test_data_dir / "server_docker_prod_app_config-unit.json").read_text() ) diff --git a/services/web/server/tests/unit/isolated/test_application_settings_utils.py b/services/web/server/tests/unit/isolated/test_application_settings_utils.py index a8e97785754..77195f3d02a 100644 --- a/services/web/server/tests/unit/isolated/test_application_settings_utils.py +++ b/services/web/server/tests/unit/isolated/test_application_settings_utils.py @@ -1,9 +1,9 @@ -from typing import Callable +from collections.abc import Callable import pytest -from pytest_simcore.helpers.dict_tools import ConfigDict from simcore_service_webserver.application_settings import ApplicationSettings from simcore_service_webserver.application_settings_utils import ( + AppConfigDict, convert_to_app_config, convert_to_environ_vars, ) @@ -11,7 +11,7 @@ @pytest.mark.skip(reason="UNDER DEV") def test_settings_infered_from_default_tests_config( - default_app_cfg: ConfigDict, monkeypatch_setenv_from_app_config: Callable + default_app_cfg: AppConfigDict, monkeypatch_setenv_from_app_config: Callable ): # TODO: use app_config_for_production_legacy envs = monkeypatch_setenv_from_app_config(default_app_cfg) diff --git a/services/web/server/tests/unit/isolated/test_diagnostics.py b/services/web/server/tests/unit/isolated/test_diagnostics.py index 3f18e81fd09..fdd08db062d 100644 --- a/services/web/server/tests/unit/isolated/test_diagnostics.py +++ b/services/web/server/tests/unit/isolated/test_diagnostics.py @@ -6,6 +6,7 @@ from unittest.mock import Mock import pytest +from pytest_mock import MockerFixture from servicelib.aiohttp.application_setup import APP_SETUP_COMPLETED_KEY from simcore_service_webserver.application_settings import setup_settings from simcore_service_webserver.diagnostics.plugin import setup_diagnostics @@ -35,12 +36,14 @@ def add_routes(self, *args, **kwargs): @pytest.fixture -def app_mock(): +def app_mock(mocker: MockerFixture): app = MockApp() # emulates security is initialized app[APP_SETUP_COMPLETED_KEY] = ["simcore_service_webserver.security"] + mocker.patch("simcore_service_webserver.rest.plugin.api_doc") + return app diff --git a/services/web/server/tests/unit/isolated/test_garbage_collector_core.py b/services/web/server/tests/unit/isolated/test_garbage_collector_core.py index 3226abb2284..5205f7fa4da 100644 --- a/services/web/server/tests/unit/isolated/test_garbage_collector_core.py +++ b/services/web/server/tests/unit/isolated/test_garbage_collector_core.py @@ -240,7 +240,9 @@ async def test_remove_orphaned_services_inexisting_user_does_not_save_state( mock.ANY, fake_running_service.node_uuid ) mock_list_node_ids_in_project.assert_called_once_with(mock.ANY, project_id) - mock_get_user_role.assert_called_once_with(mock_app, fake_running_service.user_id) + mock_get_user_role.assert_called_once_with( + mock_app, user_id=fake_running_service.user_id + ) mock_has_write_permission.assert_not_called() mock_stop_dynamic_service.assert_called_once_with( mock_app, diff --git a/services/web/server/tests/unit/isolated/test_tracing.py b/services/web/server/tests/unit/isolated/test_tracing.py index c236e446ab9..c356a31053c 100644 --- a/services/web/server/tests/unit/isolated/test_tracing.py +++ b/services/web/server/tests/unit/isolated/test_tracing.py @@ -18,14 +18,13 @@ def mock_webserver_service_environment( monkeypatch: pytest.MonkeyPatch, mock_webserver_service_environment: EnvVarsDict ) -> EnvVarsDict: monkeypatch.delenv("WEBSERVER_TRACING") - envs = mock_webserver_service_environment | setenvs_from_dict( + return mock_webserver_service_environment | setenvs_from_dict( monkeypatch, { "TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT": "http://opentelemetry-collector", "TRACING_OPENTELEMETRY_COLLECTOR_PORT": "4318", }, ) - return envs def test_middleware_restrictions_opentelemetry_is_second_middleware( diff --git a/services/web/server/tests/unit/isolated/test_users_models.py b/services/web/server/tests/unit/isolated/test_users_models.py index db129b68550..c7cfeba336e 100644 --- a/services/web/server/tests/unit/isolated/test_users_models.py +++ b/services/web/server/tests/unit/isolated/test_users_models.py @@ -16,17 +16,17 @@ MyProfilePrivacyGet, ) from models_library.generics import Envelope +from models_library.users import UserThirdPartyToken from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import BaseModel from servicelib.rest_constants import RESPONSE_MODEL_POLICY from simcore_postgres_database.models.users import UserRole -from simcore_service_webserver.users._models import ToUserUpdateDB -from simcore_service_webserver.users.schemas import ThirdPartyToken +from simcore_service_webserver.users._common.models import ToUserUpdateDB @pytest.mark.parametrize( "model_cls", - [MyProfileGet, ThirdPartyToken], + [MyProfileGet, UserThirdPartyToken], ) def test_user_models_examples( model_cls: type[BaseModel], model_cls_examples: dict[str, Any] diff --git a/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_crud.py b/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_crud.py index 684f8726089..74aa021ddb6 100644 --- a/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_crud.py +++ b/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_crud.py @@ -71,8 +71,8 @@ async def test_list_user_groups_and_try_modify_organizations( my_groups = MyGroupsGet.model_validate(data) assert not error - assert my_groups.me.model_dump(by_alias=True) == primary_group - assert my_groups.all.model_dump(by_alias=True) == all_group + assert my_groups.me.model_dump(by_alias=True, exclude_unset=True) == primary_group + assert my_groups.all.model_dump(by_alias=True, exclude_unset=True) == all_group assert my_groups.organizations assert len(my_groups.organizations) == len(standard_groups) @@ -80,7 +80,7 @@ async def test_list_user_groups_and_try_modify_organizations( by_gid = operator.itemgetter("gid") assert sorted( TypeAdapter(list[GroupGet]).dump_python( - my_groups.organizations, mode="json", by_alias=True + my_groups.organizations, mode="json", by_alias=True, exclude_unset=True ), key=by_gid, ) == sorted(standard_groups, key=by_gid) diff --git a/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_users.py b/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_users.py index f018e6fab00..0575ae5a4ff 100644 --- a/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_users.py +++ b/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_users.py @@ -24,14 +24,14 @@ from servicelib.status_codes_utils import is_2xx_success from simcore_postgres_database.models.users import UserRole from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.groups._groups_api import ( - create_standard_group, - delete_standard_group, -) -from simcore_service_webserver.groups._groups_db import ( +from simcore_service_webserver.groups._groups_repository import ( _DEFAULT_GROUP_OWNER_ACCESS_RIGHTS, _DEFAULT_GROUP_READ_ACCESS_RIGHTS, ) +from simcore_service_webserver.groups._groups_service import ( + create_standard_group, + delete_standard_group, +) from simcore_service_webserver.groups.api import auto_add_user_to_groups from simcore_service_webserver.security.api import clean_auth_policy_cache diff --git a/services/web/server/tests/unit/with_dbs/01/test_groups_classifiers.py b/services/web/server/tests/unit/with_dbs/01/test_groups_classifiers.py index 354a30ef1d9..98fa573cd08 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_groups_classifiers.py +++ b/services/web/server/tests/unit/with_dbs/01/test_groups_classifiers.py @@ -8,7 +8,9 @@ import sqlalchemy as sa from servicelib.common_aiopg_utils import DataSourceName, create_pg_engine from simcore_service_webserver._constants import APP_AIOPG_ENGINE_KEY -from simcore_service_webserver.groups._classifiers_api import GroupClassifierRepository +from simcore_service_webserver.groups._classifiers_service import ( + GroupClassifierRepository, +) from sqlalchemy.sql import text diff --git a/services/web/server/tests/unit/with_dbs/01/test_groups_handlers_classifers.py b/services/web/server/tests/unit/with_dbs/01/test_groups_handlers_classifers.py index 6ccdcf1f44f..c7367b03b94 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_groups_handlers_classifers.py +++ b/services/web/server/tests/unit/with_dbs/01/test_groups_handlers_classifers.py @@ -8,11 +8,11 @@ import pytest from aiohttp import web_exceptions from aioresponses.core import aioresponses -from pytest_simcore.helpers.dict_tools import ConfigDict +from simcore_service_webserver.application_settings_utils import AppConfigDict @pytest.fixture -def app_cfg(default_app_cfg: ConfigDict, unused_tcp_port_factory): +def app_cfg(default_app_cfg: AppConfigDict, unused_tcp_port_factory): """App's configuration used for every test in this module NOTE: Overrides services/web/server/tests/unit/with_dbs/conftest.py::app_cfg to influence app setup diff --git a/services/web/server/tests/unit/with_dbs/01/test_statics.py b/services/web/server/tests/unit/with_dbs/01/test_statics.py index 1edb437b20a..1eb8212d986 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_statics.py +++ b/services/web/server/tests/unit/with_dbs/01/test_statics.py @@ -11,7 +11,6 @@ import sqlalchemy as sa from aiohttp.test_utils import TestClient from aioresponses import aioresponses -from pytest_simcore.helpers.dict_tools import ConfigDict from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp import status @@ -19,6 +18,7 @@ from simcore_postgres_database.models.products import products from simcore_service_webserver._meta import API_VTAG from simcore_service_webserver.application_settings import setup_settings +from simcore_service_webserver.application_settings_utils import AppConfigDict from simcore_service_webserver.db.plugin import setup_db from simcore_service_webserver.products.plugin import setup_products from simcore_service_webserver.rest.plugin import setup_rest @@ -53,7 +53,7 @@ def client( app_environment: EnvVarsDict, event_loop: asyncio.AbstractEventLoop, aiohttp_client: Callable, - app_cfg: ConfigDict, + app_cfg: AppConfigDict, postgres_db: sa.engine.Engine, monkeypatch_setenv_from_app_config: Callable, ) -> TestClient: diff --git a/services/web/server/tests/unit/with_dbs/02/conftest.py b/services/web/server/tests/unit/with_dbs/02/conftest.py index 7e765694d3d..148d6315d8d 100644 --- a/services/web/server/tests/unit/with_dbs/02/conftest.py +++ b/services/web/server/tests/unit/with_dbs/02/conftest.py @@ -239,10 +239,8 @@ async def project_db_cleaner(client: TestClient): @pytest.fixture(autouse=True) -async def mocked_director_v2( - director_v2_service_mock: aioresponses, -) -> AsyncIterator[aioresponses]: - return director_v2_service_mock +async def mocked_director_v2(director_v2_service_mock: aioresponses) -> None: + pass @pytest.fixture() diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py index 92184c0d145..fe43672ebfc 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py @@ -180,6 +180,7 @@ async def test_copying_large_project_and_retrieving_copy_task( @pytest.mark.parametrize(*_standard_user_role_response()) async def test_creating_new_project_from_template_without_copying_data_creates_skeleton( + mock_dynamic_scheduler: None, client: TestClient, logged_user: dict[str, Any], primary_group: dict[str, str], @@ -230,6 +231,7 @@ async def test_creating_new_project_from_template_without_copying_data_creates_s @pytest.mark.parametrize(*_standard_user_role_response()) async def test_creating_new_project_as_template_without_copying_data_creates_skeleton( + mock_dynamic_scheduler: None, client: TestClient, logged_user: dict[str, Any], primary_group: dict[str, str], diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py index 95a2671739b..dcf954d2b54 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py @@ -4,7 +4,6 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -import re import uuid as uuidlib from collections.abc import Awaitable, Callable, Iterator from http import HTTPStatus @@ -16,9 +15,13 @@ from aiohttp.test_utils import TestClient from aioresponses import aioresponses from faker import Faker +from models_library.api_schemas_directorv2.dynamic_services import ( + GetProjectInactivityResponse, +) from models_library.products import ProductName from models_library.projects_state import ProjectState from pydantic import TypeAdapter +from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_parametrizations import ( @@ -33,7 +36,7 @@ from simcore_postgres_database.models.projects_to_products import projects_to_products from simcore_service_webserver._meta import api_version_prefix from simcore_service_webserver.db.models import UserRole -from simcore_service_webserver.groups._groups_api import get_product_group_for_user +from simcore_service_webserver.groups._groups_service import get_product_group_for_user from simcore_service_webserver.groups.api import auto_add_user_to_product_group from simcore_service_webserver.groups.exceptions import GroupNotFoundError from simcore_service_webserver.products.api import get_product @@ -412,6 +415,7 @@ async def test_get_project( @pytest.mark.parametrize(*standard_role_response()) async def test_new_project( + mock_dynamic_scheduler: None, client: TestClient, logged_user: UserInfoDict, primary_group, @@ -427,6 +431,7 @@ async def test_new_project( @pytest.mark.parametrize(*standard_user_role_response()) async def test_new_project_from_template( + mock_dynamic_scheduler: None, client: TestClient, logged_user: UserInfoDict, primary_group: dict[str, str], @@ -453,6 +458,7 @@ async def test_new_project_from_template( @pytest.mark.parametrize(*standard_user_role_response()) async def test_new_project_from_other_study( + mock_dynamic_scheduler: None, client: TestClient, logged_user: UserInfoDict, primary_group: dict[str, str], @@ -482,6 +488,7 @@ async def test_new_project_from_other_study( @pytest.mark.parametrize(*standard_user_role_response()) async def test_new_project_from_template_with_body( + mock_dynamic_scheduler: None, client: TestClient, logged_user: UserInfoDict, primary_group: dict[str, str], @@ -536,6 +543,7 @@ async def test_new_project_from_template_with_body( @pytest.mark.parametrize(*standard_user_role_response()) async def test_new_template_from_project( + mock_dynamic_scheduler: None, client: TestClient, logged_user: dict[str, Any], primary_group: dict[str, str], @@ -651,18 +659,11 @@ async def test_new_template_from_project( @pytest.fixture -def mock_director_v2_inactivity( - aioresponses_mocker: aioresponses, is_inactive: bool -) -> None: - aioresponses_mocker.clear() - get_services_pattern = re.compile( - r"^http://[a-z\-_]*director-v2:[0-9]+/v2/dynamic_services/projects/.*/inactivity.*$" - ) - aioresponses_mocker.get( - get_services_pattern, - status=status.HTTP_200_OK, - repeat=True, - payload={"is_inactive": is_inactive}, +def mock_dynamic_scheduler_inactivity(mocker: MockerFixture, is_inactive: bool) -> None: + mocker.patch( + "simcore_service_webserver.dynamic_scheduler.api.get_project_inactivity", + autospec=True, + return_value=GetProjectInactivityResponse(is_inactive=is_inactive), ) @@ -675,7 +676,7 @@ def mock_director_v2_inactivity( ) @pytest.mark.parametrize("is_inactive", [True, False]) async def test_get_project_inactivity( - mock_director_v2_inactivity: None, + mock_dynamic_scheduler_inactivity: None, logged_user: UserInfoDict, client: TestClient, faker: Faker, diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py index 657b19e20d6..5945d290744 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py @@ -54,6 +54,7 @@ async def _request_clone_project(client: TestClient, url: URL) -> ProjectGet: @pytest.mark.parametrize(*standard_role_response(), ids=str) async def test_clone_project_user_permissions( + mock_dynamic_scheduler: None, client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, @@ -85,6 +86,7 @@ async def test_clone_project_user_permissions( [UserRole.USER], ) async def test_clone_project( + mock_dynamic_scheduler: None, client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py index 18ba745eaee..8ba05ea870c 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py @@ -3,8 +3,9 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable +from collections.abc import Iterator from copy import deepcopy -from typing import Any, Iterator +from typing import Any import pytest import sqlalchemy as sa @@ -88,6 +89,7 @@ async def _request_clone_project(client: TestClient, url: URL) -> ProjectGet: [UserRole.USER], ) async def test_clone_project( + mock_dynamic_scheduler: None, client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py index c4d5d1f26b0..80f7f4a7bd3 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py @@ -5,9 +5,8 @@ # pylint: disable=unused-variable -from collections.abc import Callable, Iterator +from collections.abc import Awaitable, Callable, Iterator from http import HTTPStatus -from typing import Awaitable from unittest import mock from unittest.mock import MagicMock, call diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list.py index a9f111e9c4a..f0147c7eb02 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list.py @@ -152,6 +152,7 @@ async def test_list_projects_with_invalid_pagination_parameters( @pytest.mark.parametrize("limit", [7, 20, 43]) @pytest.mark.parametrize(*standard_user_role()) async def test_list_projects_with_pagination( + mock_dynamic_scheduler: None, client: TestClient, logged_user: dict[str, Any], primary_group: dict[str, str], @@ -181,7 +182,7 @@ async def test_list_projects_with_pagination( next_link = None default_query_parameter = {"limit": limit} projects = [] - for i in range(NUMBER_OF_CALLS): + for _ in range(NUMBER_OF_CALLS): print( "calling in with query", next_link.query if next_link else default_query_parameter, @@ -189,9 +190,9 @@ async def test_list_projects_with_pagination( data, meta, links = await _list_projects( client, expected.ok, - query_parameters=next_link.query - if next_link - else default_query_parameter, + query_parameters=( + next_link.query if next_link else default_query_parameter + ), ) print("...received [", meta, "]") assert len(data) == meta["count"] diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_groups_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_groups_handlers.py index 396f18ede5e..fe3ecfa8c3a 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_groups_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_groups_handlers.py @@ -57,6 +57,7 @@ async def test_projects_groups_full_workflow( mock_project_uses_available_services, mock_catalog_api_get_services_for_user_in_product_2, ): + assert client.app # check the default project permissions url = client.app.router["list_project_groups"].url_for( project_id=f"{user_project['uuid']}" @@ -65,9 +66,9 @@ async def test_projects_groups_full_workflow( data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 1 assert data[0]["gid"] == logged_user["primary_gid"] - assert data[0]["read"] == True - assert data[0]["write"] == True - assert data[0]["delete"] == True + assert data[0]["read"] is True + assert data[0]["write"] is True + assert data[0]["delete"] is True # Get project endpoint and check permissions url = client.app.router["get_project"].url_for( diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py index 80c941eca23..ca193544302 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py @@ -114,6 +114,7 @@ async def _wait_until_deleted(): @pytest.mark.parametrize(*standard_user_role_response()) async def test_new_project_with_parent_project_node( + mock_dynamic_scheduler: None, # for deletion mocked_dynamic_services_interface: dict[str, MagicMock], storage_subsystem_mock: MockedStorageSubsystem, @@ -191,6 +192,7 @@ async def test_new_project_with_parent_project_node( @pytest.mark.parametrize(*standard_user_role_response()) async def test_new_project_with_invalid_parent_project_node( + mock_dynamic_scheduler: None, # for deletion mocked_dynamic_services_interface: dict[str, MagicMock], storage_subsystem_mock: MockedStorageSubsystem, @@ -274,6 +276,7 @@ async def test_new_project_with_invalid_parent_project_node( @pytest.mark.parametrize(*standard_user_role_response()) async def test_set_project_parent_backward_compatibility( + mock_dynamic_scheduler: None, # for deletion mocked_dynamic_services_interface: dict[str, MagicMock], storage_subsystem_mock: MockedStorageSubsystem, @@ -393,6 +396,7 @@ async def test_update_project_metadata_backward_compatibility_with_same_project_ @pytest.mark.parametrize(*standard_user_role_response()) async def test_update_project_metadata_s4lacad_backward_compatibility_passing_nil_parent_node_id( + mock_dynamic_scheduler: None, # for deletion mocked_dynamic_services_interface: dict[str, MagicMock], storage_subsystem_mock: MockedStorageSubsystem, diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py index 6fc5c13b194..c5fa6330978 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py @@ -318,6 +318,7 @@ async def test_create_node_returns_422_if_body_is_missing( ) @pytest.mark.parametrize(*standard_role_response(), ids=str) async def test_create_node( + mock_dynamic_scheduler: None, node_class: str, expect_run_service_call: bool, client: TestClient, @@ -376,6 +377,7 @@ def standard_user_role() -> tuple[str, tuple]: @pytest.mark.parametrize(*standard_user_role()) async def test_create_and_delete_many_nodes_in_parallel( + mock_dynamic_scheduler: None, disable_max_number_of_running_dynamic_nodes: dict[str, str], client: TestClient, user_project: ProjectDict, @@ -395,8 +397,8 @@ class _RunningServices: running_services_uuids: list[str] = field(default_factory=list) def num_services( - self, *args, **kwargs - ) -> list[DynamicServiceGet]: # noqa: ARG002 + self, *args, **kwargs # noqa: ARG002 + ) -> list[DynamicServiceGet]: return [ DynamicServiceGet.model_validate( DynamicServiceGet.model_config["json_schema_extra"]["examples"][1] @@ -470,6 +472,7 @@ def inc_running_services(self, *args, **kwargs): # noqa: ARG002 @pytest.mark.parametrize(*standard_user_role()) async def test_create_node_does_not_start_dynamic_node_if_there_are_already_too_many_running( + mock_dynamic_scheduler: None, client: TestClient, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], expected: ExpectedResponse, @@ -503,6 +506,7 @@ async def test_create_node_does_not_start_dynamic_node_if_there_are_already_too_ @pytest.mark.parametrize(*standard_user_role()) async def test_create_many_nodes_in_parallel_still_is_limited_to_the_defined_maximum( + mock_dynamic_scheduler: None, client: TestClient, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], expected: ExpectedResponse, @@ -524,8 +528,8 @@ class _RunninServices: running_services_uuids: list[str] = field(default_factory=list) async def num_services( - self, *args, **kwargs - ) -> list[dict[str, Any]]: # noqa: ARG002 + self, *args, **kwargs # noqa: ARG002 + ) -> list[dict[str, Any]]: return [ {"service_uuid": service_uuid} for service_uuid in self.running_services_uuids @@ -588,6 +592,7 @@ async def inc_running_services(self, *args, **kwargs): # noqa: ARG002 @pytest.mark.parametrize(*standard_user_role()) async def test_create_node_does_start_dynamic_node_if_max_num_set_to_0( + mock_dynamic_scheduler: None, disable_max_number_of_running_dynamic_nodes: dict[str, str], client: TestClient, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], @@ -625,6 +630,7 @@ async def test_create_node_does_start_dynamic_node_if_max_num_set_to_0( ) @pytest.mark.parametrize(*standard_role_response(), ids=str) async def test_creating_deprecated_node_returns_406_not_acceptable( + mock_dynamic_scheduler: None, client: TestClient, user_project: ProjectDict, expected: ExpectedResponse, @@ -664,6 +670,7 @@ async def test_creating_deprecated_node_returns_406_not_acceptable( ) @pytest.mark.parametrize(*standard_role_response(), ids=str) async def test_delete_node( + mock_dynamic_scheduler: None, client: TestClient, logged_user: dict, user_project: ProjectDict, diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py index 71101c5dc88..f9af27e9398 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py @@ -53,6 +53,13 @@ def mock_catalog_rpc_check_for_service(mocker: MockerFixture): ) +@pytest.fixture +def mocked_notify_project_node_update(mocker: MockerFixture): + return mocker.patch( + "simcore_service_webserver.projects.projects_api.notify_project_node_update", + ) + + @pytest.mark.parametrize( "user_role,expected", [ @@ -65,6 +72,7 @@ def mock_catalog_rpc_check_for_service(mocker: MockerFixture): ], ) async def test_patch_project_node_entrypoint_access( + mock_dynamic_scheduler: None, client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, @@ -86,6 +94,7 @@ async def test_patch_project_node_entrypoint_access( "user_role,expected", [(UserRole.USER, status.HTTP_204_NO_CONTENT)] ) async def test_patch_project_node( + mock_dynamic_scheduler: None, client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, @@ -193,7 +202,7 @@ async def test_patch_project_node( _tested_node = data["workbench"][node_id] assert _tested_node["label"] == "testing-string" - assert _tested_node["progress"] == None + assert _tested_node["progress"] is None assert _tested_node["key"] == _patch_key["key"] assert _tested_node["version"] == _patch_version["version"] assert _tested_node["inputs"] == _patch_inputs["inputs"] @@ -203,6 +212,83 @@ async def test_patch_project_node( assert _tested_node["outputs"] == _patch_outputs["outputs"] +@pytest.mark.parametrize( + "user_role,expected", [(UserRole.USER, status.HTTP_204_NO_CONTENT)] +) +async def test_patch_project_node_notifies( + mocker: MockerFixture, + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + expected: HTTPStatus, + mock_catalog_api_get_services_for_user_in_product, + mock_project_uses_available_services, + mock_catalog_rpc_check_for_service, + mocked_notify_project_node_update, +): + + node_id = next(iter(user_project["workbench"])) + assert client.app + base_url = client.app.router["patch_project_node"].url_for( + project_id=user_project["uuid"], node_id=node_id + ) + + # inputs + _patch_inputs = { + "key": "simcore/services/dynamic/patch-service-key", + } + resp = await client.patch( + f"{base_url}", + data=json.dumps(_patch_inputs), + ) + await assert_status(resp, expected) + assert mocked_notify_project_node_update.call_count == 1 + args = mocked_notify_project_node_update.await_args_list + assert args[0][0][1]["workbench"][node_id]["key"] == _patch_inputs["key"] + assert f"{args[0][0][2]}" == node_id + + +@pytest.mark.parametrize( + "user_role,expected", [(UserRole.USER, status.HTTP_204_NO_CONTENT)] +) +async def test_patch_project_node_inputs_notifies( + mocker: MockerFixture, + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + expected: HTTPStatus, + mock_catalog_api_get_services_for_user_in_product, + mock_project_uses_available_services, + mocked_notify_project_node_update, +): + node_id = next(iter(user_project["workbench"])) + assert client.app + base_url = client.app.router["patch_project_node"].url_for( + project_id=user_project["uuid"], node_id=node_id + ) + + # inputs + _patch_inputs = { + "inputs": { + "input_1": { + "nodeUuid": "c374e5ba-fc42-5c40-ae74-df7ef337f597", + "output": "out_1", + }, + } + } + resp = await client.patch( + f"{base_url}", + data=json.dumps(_patch_inputs), + ) + await assert_status(resp, expected) + assert mocked_notify_project_node_update.call_count > 1 + # 1 message per node updated + assert [ + call_args[0][2] + for call_args in mocked_notify_project_node_update.await_args_list + ] == list(user_project["workbench"].keys()) + + @pytest.mark.parametrize( "user_role,expected", [(UserRole.USER, status.HTTP_204_NO_CONTENT)] ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py index 8a82df500b6..db4596f06ec 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py @@ -247,6 +247,7 @@ async def test_io_workflow( [UserRole.USER], ) async def test_clone_project_and_set_inputs( + mock_dynamic_scheduler: None, client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py index 15af4778e85..f3b91131b1a 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py @@ -258,6 +258,7 @@ async def _delete_project(client: TestClient, project: dict) -> ClientResponse: ids=str, ) async def test_share_project( + mock_dynamic_scheduler: None, client: TestClient, logged_user: dict, primary_group: dict[str, str], @@ -984,6 +985,7 @@ async def test_get_active_project( ], ) async def test_project_node_lifetime( # noqa: PLR0915 + mock_dynamic_scheduler: None, client: TestClient, logged_user: UserInfoDict, user_project, diff --git a/services/web/server/tests/unit/with_dbs/03/invitations/test_products__invitations_handlers.py b/services/web/server/tests/unit/with_dbs/03/invitations/test_products__invitations_handlers.py index 71da6536363..64aec0a93d9 100644 --- a/services/web/server/tests/unit/with_dbs/03/invitations/test_products__invitations_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/test_products__invitations_handlers.py @@ -153,7 +153,7 @@ async def test_pre_registration_and_invitation_workflow( ).model_dump() # Search user -> nothing - response = await client.get("/v0/users:search", params={"email": guest_email}) + response = await client.get("/v0/admin/users:search", params={"email": guest_email}) data, _ = await assert_status(response, expected_status) # i.e. no info of requester is found, i.e. needs pre-registration assert data == [] @@ -164,17 +164,21 @@ async def test_pre_registration_and_invitation_workflow( # assert response.status == status.HTTP_409_CONFLICT # Accept user for registration and create invitation for her - response = await client.post("/v0/users:pre-register", json=requester_info) + response = await client.post("/v0/admin/users:pre-register", json=requester_info) data, _ = await assert_status(response, expected_status) # Can only pre-register once for _ in range(MANY_TIMES): - response = await client.post("/v0/users:pre-register", json=requester_info) + response = await client.post( + "/v0/admin/users:pre-register", json=requester_info + ) await assert_status(response, status.HTTP_409_CONFLICT) # Search user again for _ in range(MANY_TIMES): - response = await client.get("/v0/users:search", params={"email": guest_email}) + response = await client.get( + "/v0/admin/users:search", params={"email": guest_email} + ) data, _ = await assert_status(response, expected_status) assert len(data) == 1 user_found = data[0] @@ -203,7 +207,7 @@ async def test_pre_registration_and_invitation_workflow( await assert_status(response, status.HTTP_200_OK) # find registered user - response = await client.get("/v0/users:search", params={"email": guest_email}) + response = await client.get("/v0/admin/users:search", params={"email": guest_email}) data, _ = await assert_status(response, expected_status) assert len(data) == 1 user_found = data[0] diff --git a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py b/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py index 862a0db06e8..e00b67c0673 100644 --- a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py +++ b/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py @@ -3,6 +3,7 @@ # pylint: disable=unused-variable from collections.abc import Awaitable, Callable +from typing import Any import pytest from aiohttp import ClientResponse @@ -69,14 +70,18 @@ async def context_with_logged_user(client: TestClient, logged_user: UserInfoDict await conn.execute(projects.delete()) +@pytest.mark.skip( + reason="Blocking testing. Will follow up in https://github.com/ITISFoundation/osparc-simcore/issues/6976 " +) @pytest.mark.acceptance_test() async def test_iterators_workflow( client: TestClient, logged_user: UserInfoDict, - primary_group, + primary_group: dict[str, Any], context_with_logged_user: None, mocker: MockerFixture, faker: Faker, + mock_dynamic_scheduler: None, director_v2_service_mock: None, request_create_project: Callable[..., Awaitable[ProjectDict]], ): diff --git a/services/web/server/tests/unit/with_dbs/03/test_project_db.py b/services/web/server/tests/unit/with_dbs/03/test_project_db.py index 1d73a0e88c4..1ab6ca802f3 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_project_db.py +++ b/services/web/server/tests/unit/with_dbs/03/test_project_db.py @@ -16,11 +16,11 @@ import pytest import sqlalchemy as sa from aiohttp.test_utils import TestClient +from common_library.dict_tools import copy_from_dict_ex from faker import Faker from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, NodeIDStr from psycopg2.errors import UniqueViolation -from pytest_simcore.helpers.dict_tools import copy_from_dict_ex from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from pytest_simcore.helpers.webserver_login import UserInfoDict, log_client_in diff --git a/services/web/server/tests/unit/with_dbs/03/test_session.py b/services/web/server/tests/unit/with_dbs/03/test_session.py index f9f709c8e3f..c3684acb326 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_session.py +++ b/services/web/server/tests/unit/with_dbs/03/test_session.py @@ -12,10 +12,10 @@ from aiohttp import web from aiohttp.test_utils import TestClient from cryptography.fernet import Fernet -from pytest_simcore.helpers.dict_tools import ConfigDict from pytest_simcore.helpers.typing_env import EnvVarsDict from pytest_simcore.helpers.webserver_login import NewUser from simcore_service_webserver.application import create_application +from simcore_service_webserver.application_settings_utils import AppConfigDict from simcore_service_webserver.session._cookie_storage import ( SharedCookieEncryptedCookieStorage, ) @@ -34,7 +34,7 @@ def client( event_loop: asyncio.AbstractEventLoop, aiohttp_client: Callable, disable_static_webserver: Callable, - app_cfg: ConfigDict, + app_cfg: AppConfigDict, app_environment: EnvVarsDict, postgres_db, mock_orphaned_services, # disables gc diff --git a/services/web/server/tests/unit/with_dbs/03/test_users.py b/services/web/server/tests/unit/with_dbs/03/test_users.py index a872b98858c..6b0ba408cc0 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users.py @@ -1,12 +1,14 @@ # pylint: disable=protected-access # pylint: disable=redefined-outer-name # pylint: disable=too-many-arguments +# pylint: disable=too-many-statements # pylint: disable=unused-argument # pylint: disable=unused-variable import functools import sys +from collections.abc import AsyncIterable from copy import deepcopy from http import HTTPStatus from typing import Any @@ -16,28 +18,32 @@ import simcore_service_webserver.login._auth_api from aiohttp.test_utils import TestClient from aiopg.sa.connection import SAConnection +from common_library.users_enums import UserRole, UserStatus from faker import Faker from models_library.api_schemas_webserver.auth import AccountRequestInfo -from models_library.api_schemas_webserver.users import MyProfileGet -from models_library.generics import Envelope +from models_library.api_schemas_webserver.groups import GroupUserGet +from models_library.api_schemas_webserver.users import ( + MyProfileGet, + UserForAdminGet, + UserGet, +) from psycopg2 import OperationalError +from pydantic import TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.faker_factories import ( DEFAULT_TEST_PASSWORD, random_pre_registration_details, ) from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict from servicelib.aiohttp import status from servicelib.rest_constants import RESPONSE_MODEL_POLICY -from simcore_postgres_database.models.users import UserRole, UserStatus -from simcore_service_webserver.users._preferences_api import ( - get_frontend_user_preferences_aggregation, -) -from simcore_service_webserver.users._schemas import ( +from simcore_service_webserver.users._common.schemas import ( MAX_BYTES_SIZE_EXTRAS, - PreUserProfile, - UserProfile, + PreRegisteredUserGet, +) +from simcore_service_webserver.users._preferences_service import ( + get_frontend_user_preferences_aggregation, ) @@ -55,6 +61,188 @@ def app_environment( ) +@pytest.fixture +async def private_user(client: TestClient) -> AsyncIterable[UserInfoDict]: + assert client.app + async with NewUser( + app=client.app, + user_data={ + "name": "jamie01", + "first_name": "James", + "last_name": "Bond", + "email": "james@find.me", + "privacy_hide_email": True, + "privacy_hide_fullname": True, + }, + ) as usr: + yield usr + + +@pytest.fixture +async def semi_private_user(client: TestClient) -> AsyncIterable[UserInfoDict]: + assert client.app + async with NewUser( + app=client.app, + user_data={ + "name": "maxwell", + "first_name": "James", + "last_name": "Maxwell", + "email": "j@maxwell.me", + "privacy_hide_email": True, + "privacy_hide_fullname": False, # <-- + }, + ) as usr: + yield usr + + +@pytest.fixture +async def public_user(client: TestClient) -> AsyncIterable[UserInfoDict]: + assert client.app + async with NewUser( + app=client.app, + user_data={ + "name": "taylie01", + "first_name": "Taylor", + "last_name": "Swift", + "email": "taylor@find.me", + "privacy_hide_email": False, + "privacy_hide_fullname": False, + }, + ) as usr: + yield usr + + +@pytest.mark.acceptance_test( + "https://github.com/ITISFoundation/osparc-issues/issues/1779" +) +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_search_users( + logged_user: UserInfoDict, + client: TestClient, + user_role: UserRole, + public_user: UserInfoDict, + semi_private_user: UserInfoDict, + private_user: UserInfoDict, +): + assert client.app + assert user_role.value == logged_user["role"] + + assert private_user["id"] != logged_user["id"] + assert public_user["id"] != logged_user["id"] + + # SEARCH by partial first_name + partial_name = "james" + assert partial_name in private_user.get("first_name", "").lower() + assert partial_name in semi_private_user.get("first_name", "").lower() + + url = client.app.router["search_users"].url_for() + resp = await client.post(f"{url}", json={"match": partial_name}) + data, _ = await assert_status(resp, status.HTTP_200_OK) + + found = TypeAdapter(list[UserGet]).validate_python(data) + assert found + assert len(found) == 1 + assert semi_private_user["name"] == found[0].user_name + assert found[0].first_name == semi_private_user.get("first_name") + assert found[0].last_name == semi_private_user.get("last_name") + assert found[0].email is None + + # SEARCH by partial email + partial_email = "@find.m" + assert partial_email in private_user["email"] + assert partial_email in public_user["email"] + + url = client.app.router["search_users"].url_for() + resp = await client.post(f"{url}", json={"match": partial_email}) + data, _ = await assert_status(resp, status.HTTP_200_OK) + + found = TypeAdapter(list[UserGet]).validate_python(data) + assert found + assert len(found) == 1 + assert found[0].user_id == public_user["id"] + assert found[0].user_name == public_user["name"] + assert found[0].email == public_user["email"] + assert found[0].first_name == public_user.get("first_name") + assert found[0].last_name == public_user.get("last_name") + + # SEARCH by partial username + partial_username = "ie01" + assert partial_username in private_user["name"] + assert partial_username in public_user["name"] + + url = client.app.router["search_users"].url_for() + resp = await client.post(f"{url}", json={"match": partial_username}) + data, _ = await assert_status(resp, status.HTTP_200_OK) + + found = TypeAdapter(list[UserGet]).validate_python(data) + assert found + assert len(found) == 2 + + index = [u.user_id for u in found].index(public_user["id"]) + assert found[index].user_name == public_user["name"] + + # check privacy + index = (index + 1) % 2 + assert found[index].user_name == private_user["name"] + assert found[index].email is None + assert found[index].first_name is None + assert found[index].last_name is None + + # SEARCH user for admin (from a USER) + url = ( + client.app.router["search_users_for_admin"] + .url_for() + .with_query(email=partial_email) + ) + resp = await client.get(f"{url}") + await assert_status(resp, status.HTTP_403_FORBIDDEN) + + +@pytest.mark.acceptance_test( + "https://github.com/ITISFoundation/osparc-issues/issues/1779" +) +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_get_user_by_group_id( + logged_user: UserInfoDict, + client: TestClient, + user_role: UserRole, + public_user: UserInfoDict, + private_user: UserInfoDict, +): + assert client.app + assert user_role.value == logged_user["role"] + + assert private_user["id"] != logged_user["id"] + assert public_user["id"] != logged_user["id"] + + # GET user by primary GID + url = client.app.router["get_all_group_users"].url_for( + gid=f"{public_user['primary_gid']}" + ) + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + + users = TypeAdapter(list[GroupUserGet]).validate_python(data) + assert len(users) == 1 + assert users[0].id == public_user["id"] + assert users[0].user_name == public_user["name"] + assert users[0].first_name == public_user.get("first_name") + assert users[0].last_name == public_user.get("last_name") + + url = client.app.router["get_all_group_users"].url_for( + gid=f"{private_user['primary_gid']}" + ) + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + + users = TypeAdapter(list[GroupUserGet]).validate_python(data) + assert len(users) == 1 + assert users[0].id == private_user["id"] + assert users[0].user_name == private_user["name"] + assert users[0].first_name is None + assert users[0].last_name is None + + @pytest.mark.parametrize( "user_role,expected", [ @@ -117,36 +305,31 @@ async def test_get_profile( resp = await client.get(f"{url}") data, error = await assert_status(resp, status.HTTP_200_OK) - resp_model = Envelope[MyProfileGet].model_validate(await resp.json()) - - assert resp_model.data.model_dump(**RESPONSE_MODEL_POLICY, mode="json") == data - assert resp_model.error is None - - profile = resp_model.data - - product_group = { - "accessRights": {"delete": False, "read": False, "write": False}, - "description": "osparc product group", - "gid": 2, - "inclusionRules": {}, - "label": "osparc", - "thumbnail": None, - } + assert not error + profile = MyProfileGet.model_validate(data) assert profile.login == logged_user["email"] assert profile.first_name == logged_user.get("first_name", None) assert profile.last_name == logged_user.get("last_name", None) assert profile.role == user_role.name assert profile.groups + assert profile.expiration_date is None got_profile_groups = profile.groups.model_dump(**RESPONSE_MODEL_POLICY, mode="json") assert got_profile_groups["me"] == primary_group assert got_profile_groups["all"] == all_group + assert got_profile_groups["product"] == { + "accessRights": {"delete": False, "read": False, "write": False}, + "description": "osparc product group", + "gid": 2, + "label": "osparc", + "thumbnail": None, + } sorted_by_group_id = functools.partial(sorted, key=lambda d: d["gid"]) assert sorted_by_group_id( got_profile_groups["organizations"] - ) == sorted_by_group_id([*standard_groups, product_group]) + ) == sorted_by_group_id(standard_groups) assert profile.preferences == await get_frontend_user_preferences_aggregation( client.app, user_id=logged_user["id"], product_name="osparc" @@ -161,14 +344,16 @@ async def test_update_profile( ): assert client.app - resp = await client.get("/v0/me") - data, _ = await assert_status(resp, status.HTTP_200_OK) + # GET + url = client.app.router["get_my_profile"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) assert data["role"] == user_role.name before = deepcopy(data) + # UPDATE url = client.app.router["update_my_profile"].url_for() - assert url.path == "/v0/me" resp = await client.patch( f"{url}", json={ @@ -176,10 +361,11 @@ async def test_update_profile( }, ) _, error = await assert_status(resp, status.HTTP_204_NO_CONTENT) - assert not error - resp = await client.get("/v0/me") + # GET + url = client.app.router["get_my_profile"].url_for() + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert data["last_name"] == "Foo" @@ -349,8 +535,8 @@ async def test_access_rights_on_search_users_only_product_owners_can_access( ): assert client.app - url = client.app.router["search_users"].url_for() - assert url.path == "/v0/users:search" + url = client.app.router["search_users_for_admin"].url_for() + assert url.path == "/v0/admin/users:search" resp = await client.get(url.path, params={"email": "do-not-exists@foo.com"}) await assert_status(resp, expected) @@ -379,9 +565,7 @@ def account_request_form(faker: Faker) -> dict[str, Any]: } # keeps in sync fields from example and this fixture - assert set(form) == set( - AccountRequestInfo.model_config["json_schema_extra"]["example"]["form"] - ) + assert set(form) == set(AccountRequestInfo.model_json_schema()["example"]["form"]) return form @@ -402,12 +586,14 @@ async def test_search_and_pre_registration( assert client.app # ONLY in `users` and NOT `users_pre_registration_details` - resp = await client.get("/v0/users:search", params={"email": logged_user["email"]}) + resp = await client.get( + "/v0/admin/users:search", params={"email": logged_user["email"]} + ) assert resp.status == status.HTTP_200_OK found, _ = await assert_status(resp, status.HTTP_200_OK) assert len(found) == 1 - got = UserProfile( + got = UserForAdminGet( **found[0], institution=None, address=None, @@ -436,15 +622,15 @@ async def test_search_and_pre_registration( # NOT in `users` and ONLY `users_pre_registration_details` # create pre-registration - resp = await client.post("/v0/users:pre-register", json=account_request_form) + resp = await client.post("/v0/admin/users:pre-register", json=account_request_form) assert resp.status == status.HTTP_200_OK resp = await client.get( - "/v0/users:search", params={"email": account_request_form["email"]} + "/v0/admin/users:search", params={"email": account_request_form["email"]} ) found, _ = await assert_status(resp, status.HTTP_200_OK) assert len(found) == 1 - got = UserProfile(**found[0], state=None, status=None) + got = UserForAdminGet(**found[0], state=None, status=None) assert got.model_dump(include={"registered", "status"}) == { "registered": False, @@ -463,11 +649,11 @@ async def test_search_and_pre_registration( ) resp = await client.get( - "/v0/users:search", params={"email": account_request_form["email"]} + "/v0/admin/users:search", params={"email": account_request_form["email"]} ) found, _ = await assert_status(resp, status.HTTP_200_OK) assert len(found) == 1 - got = UserProfile(**found[0], state=None) + got = UserForAdminGet(**found[0], state=None) assert got.model_dump(include={"registered", "status"}) == { "registered": True, "status": new_user["status"].name, @@ -493,7 +679,7 @@ def test_preuserprofile_parse_model_from_request_form_data( data["comment"] = "extra comment" # pre-processors - pre_user_profile = PreUserProfile(**data) + pre_user_profile = PreRegisteredUserGet(**data) print(pre_user_profile.model_dump_json(indent=1)) @@ -517,11 +703,11 @@ def test_preuserprofile_parse_model_without_extras( ): required = { f.alias or f_name - for f_name, f in PreUserProfile.model_fields.items() + for f_name, f in PreRegisteredUserGet.model_fields.items() if f.is_required() } data = {k: account_request_form[k] for k in required} - assert not PreUserProfile(**data).extras + assert not PreRegisteredUserGet(**data).extras def test_preuserprofile_max_bytes_size_extras_limits(faker: Faker): @@ -541,7 +727,7 @@ def test_preuserprofile_pre_given_names( account_request_form["firstName"] = given_name account_request_form["lastName"] = given_name - pre_user_profile = PreUserProfile(**account_request_form) + pre_user_profile = PreRegisteredUserGet(**account_request_form) print(pre_user_profile.model_dump_json(indent=1)) assert pre_user_profile.first_name in ["Pedro-Luis", "Pedro Luis"] assert pre_user_profile.first_name == pre_user_profile.last_name diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__notifications.py b/services/web/server/tests/unit/with_dbs/03/test_users__notifications.py index 06484b82683..ccf246540bd 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users__notifications.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users__notifications.py @@ -17,6 +17,7 @@ import pytest import redis.asyncio as aioredis from aiohttp.test_utils import TestClient +from models_library.api_schemas_webserver.users import MyPermissionGet from models_library.products import ProductName from pydantic import TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status @@ -33,10 +34,7 @@ UserNotificationCreate, get_notification_key, ) -from simcore_service_webserver.users._notifications_handlers import ( - _get_user_notifications, -) -from simcore_service_webserver.users.schemas import PermissionGet +from simcore_service_webserver.users._notifications_rest import _get_user_notifications @pytest.fixture @@ -450,7 +448,7 @@ async def test_list_permissions( data, error = await assert_status(resp, expected_response) if data: assert not error - list_of_permissions = TypeAdapter(list[PermissionGet]).validate_python(data) + list_of_permissions = TypeAdapter(list[MyPermissionGet]).validate_python(data) assert ( len(list_of_permissions) == 1 ), "for now there is only 1 permission, but when we sync frontend/backend permissions there will be more" @@ -481,7 +479,7 @@ async def test_list_permissions_with_overriden_extra_properties( data, error = await assert_status(resp, expected_response) assert data assert not error - list_of_permissions = TypeAdapter(list[PermissionGet]).validate_python(data) + list_of_permissions = TypeAdapter(list[MyPermissionGet]).validate_python(data) filtered_permissions = list( filter( lambda x: x.name == "override_services_specifications", list_of_permissions diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_api.py b/services/web/server/tests/unit/with_dbs/03/test_users__preferences_api.py index 8db8935616d..96f6ba52241 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_api.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users__preferences_api.py @@ -10,8 +10,8 @@ import pytest from aiohttp import web from aiohttp.test_utils import TestClient -from faker import Faker from common_library.pydantic_fields_extension import get_type +from faker import Faker from models_library.api_schemas_webserver.users_preferences import Preference from models_library.products import ProductName from models_library.user_preferences import FrontendUserPreference @@ -24,15 +24,15 @@ groups_extra_properties, ) from simcore_postgres_database.models.users import UserStatus -from simcore_service_webserver.users._preferences_api import ( - _get_frontend_user_preferences, - get_frontend_user_preferences_aggregation, - set_frontend_user_preference, -) from simcore_service_webserver.users._preferences_models import ( ALL_FRONTEND_PREFERENCES, BillingCenterUsageColumnOrderFrontendUserPreference, ) +from simcore_service_webserver.users._preferences_service import ( + _get_frontend_user_preferences, + get_frontend_user_preferences_aggregation, + set_frontend_user_preference, +) @pytest.fixture diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__tokens.py b/services/web/server/tests/unit/with_dbs/03/test_users__tokens.py index 315f4884bc0..fd040e1d88a 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users__tokens.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users__tokens.py @@ -7,8 +7,10 @@ import random from collections.abc import AsyncIterator +from copy import deepcopy from http import HTTPStatus from itertools import repeat +from typing import Any import pytest from aiohttp.test_utils import TestClient @@ -59,7 +61,7 @@ async def fake_tokens( logged_user: UserInfoDict, tokens_db_cleanup: None, faker: Faker, -) -> list: +) -> list[dict[str, Any]]: all_tokens = [] assert client.app @@ -133,7 +135,7 @@ async def test_read_token( client: TestClient, logged_user: UserInfoDict, tokens_db_cleanup: None, - fake_tokens, + fake_tokens: list[dict[str, Any]], expected: HTTPStatus, ): assert client.app @@ -145,16 +147,18 @@ async def test_read_token( data, error = await assert_status(resp, expected) if not error: - expected_token = random.choice(fake_tokens) + expected_token = deepcopy(random.choice(fake_tokens)) sid = expected_token["service"] # get one url = client.app.router["get_token"].url_for(service=sid) - assert "/v0/me/tokens/%s" % sid == str(url) + assert f"/v0/me/tokens/{sid}" == str(url) resp = await client.get(url.path) data, error = await assert_status(resp, expected) + expected_token["token_key"] = expected_token["token_key"] + expected_token["token_secret"] = None assert data == expected_token, "list and read item are both read operations" @@ -171,7 +175,7 @@ async def test_delete_token( client: TestClient, logged_user: UserInfoDict, tokens_db_cleanup: None, - fake_tokens: list, + fake_tokens: list[dict[str, Any]], expected: HTTPStatus, ): assert client.app diff --git a/services/web/server/tests/unit/with_dbs/03/test_users_api.py b/services/web/server/tests/unit/with_dbs/03/test_users_api.py index 89b5ddea474..48fe21c24c3 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users_api.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users_api.py @@ -3,24 +3,35 @@ # pylint: disable=unused-variable from datetime import datetime, timedelta +from enum import Enum import pytest from aiohttp.test_utils import TestClient +from common_library.users_enums import UserRole from faker import Faker +from models_library.groups import EVERYONE_GROUP_ID +from models_library.users import UserID, UserNameID +from pydantic import TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.webserver_login import NewUser +from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict from servicelib.aiohttp import status -from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY from simcore_postgres_database.models.users import UserStatus from simcore_service_webserver.users.api import ( + delete_user_without_projects, + get_guest_user_ids_and_names, + get_user, + get_user_credentials, + get_user_display_and_id_names, + get_user_fullname, + get_user_id_from_gid, get_user_name_and_email, + get_user_role, + get_users_in_group, + set_user_as_deleted, update_expired_users, ) - -_NOW = datetime.utcnow() -YESTERDAY = _NOW - timedelta(days=1) -TOMORROW = _NOW + timedelta(days=1) +from simcore_service_webserver.users.exceptions import UserNotFoundError @pytest.fixture @@ -37,6 +48,101 @@ def app_environment( ) +async def test_reading_a_user(client: TestClient, faker: Faker, user: UserInfoDict): + assert client.app + user_id = user["id"] + + got = await get_user(client.app, user_id=user_id) + + keys = set(got.keys()).intersection(user.keys()) + + def _normalize_val(v): + return v.value if isinstance(v, Enum) else v + + assert {k: _normalize_val(got[k]) for k in keys} == {k: user[k] for k in keys} + + user_primary_group_id = got["primary_gid"] + + email, phash, display = await get_user_credentials(client.app, user_id=user_id) + assert email == user["email"] + assert phash + assert display + + # NOTE: designed to always provide some display name + got = await get_user_display_and_id_names(client.app, user_id=user_id) + assert ( + got.first_name.lower() == (user.get("first_name") or user.get("name")).lower() + ) + assert got.last_name.lower() == (user.get("last_name") or "").lower() + assert got.name == user["name"] + + got = await get_user_fullname(client.app, user_id=user_id) + assert got == {k: v for k, v in user.items() if k in got} + + got = await get_user_name_and_email(client.app, user_id=user_id) + assert got.email == user["email"] + assert got.name == user["name"] + + got = await get_user_role(client.app, user_id=user_id) + assert _normalize_val(got) == user["role"] + + got = await get_user_id_from_gid(client.app, primary_gid=user_primary_group_id) + assert got == user_id + + everyone = await get_users_in_group(client.app, gid=EVERYONE_GROUP_ID) + assert user_id in everyone + assert len(everyone) == 1 + + +async def test_listing_users(client: TestClient, faker: Faker, user: UserInfoDict): + assert client.app + + guests = await get_guest_user_ids_and_names(client.app) + assert not guests + + async with NewUser( + user_data={"role": UserRole.GUEST.value}, app=client.app + ) as guest: + got = await get_guest_user_ids_and_names(client.app) + assert (guest["id"], guest["name"]) in TypeAdapter( + list[tuple[UserID, UserNameID]] + ).validate_python(got) + + guests = await get_guest_user_ids_and_names(client.app) + assert not guests + + +async def test_deleting_a_user( + client: TestClient, + faker: Faker, + user: UserInfoDict, +): + assert client.app + user_id = user["id"] + + # exists + got = await get_user(client.app, user_id=user_id) + assert got["id"] == user_id + + # MARK as deleted + await set_user_as_deleted(client.app, user_id=user_id) + + got = await get_user(client.app, user_id=user_id) + assert got["id"] == user_id + + # DO DELETE + await delete_user_without_projects(client.app, user_id=user_id) + + # does not exist + with pytest.raises(UserNotFoundError): + await get_user(client.app, user_id=user_id) + + +_NOW = datetime.now() # WARNING: UTC affects here since expires is not defined as UTC +YESTERDAY = _NOW - timedelta(days=1) +TOMORROW = _NOW + timedelta(days=1) + + @pytest.mark.parametrize("expires_at", [YESTERDAY, TOMORROW, None]) async def test_update_expired_users( expires_at: datetime | None, client: TestClient, faker: Faker @@ -67,7 +173,7 @@ async def _rq_login(): await assert_status(r1, status.HTTP_200_OK) # apply update - expired = await update_expired_users(client.app[APP_AIOPG_ENGINE_KEY]) + expired = await update_expired_users(client.app) if has_expired: assert expired == [user["id"]] else: diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_core.py b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_core.py index 705b0458188..fa05653b87c 100644 --- a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_core.py +++ b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_core.py @@ -37,6 +37,7 @@ async def test_workflow( user_project: ProjectDict, aiohttp_mocked_request: web.Request, request_update_project: Callable[[TestClient, UUID], Awaitable], + mock_dynamic_scheduler: None, director_v2_service_mock: None, ): vc_repo = VersionControlRepository.create_from_request(aiohttp_mocked_request) diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py index ab84b68a3e8..325eb63e353 100644 --- a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py @@ -50,6 +50,7 @@ async def test_workflow( client: TestClient, user_project: ProjectDict, request_update_project: Callable[[TestClient, UUID], Awaitable], + mock_dynamic_scheduler: None, director_v2_service_mock: None, ): # pylint: disable=too-many-statements diff --git a/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_resource_manager.py b/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_resource_manager.py index 962e8539d04..18cba797177 100644 --- a/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_resource_manager.py +++ b/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_resource_manager.py @@ -232,11 +232,6 @@ async def empty_user_project2( print("<----- removed project", project["name"]) -@pytest.fixture(autouse=True) -async def director_v2_mock(director_v2_service_mock) -> aioresponses: - return director_v2_service_mock - - async def test_anonymous_websocket_connection( client_session_id_factory: Callable[[], str], socketio_url_factory: Callable, @@ -541,6 +536,7 @@ async def test_interactive_services_removed_after_logout( ], ) async def test_interactive_services_remain_after_websocket_reconnection_from_2_tabs( + director_v2_service_mock: aioresponses, client: TestClient, logged_user: UserInfoDict, empty_user_project, @@ -678,6 +674,7 @@ async def mocked_notification_system(mocker): ], ) async def test_interactive_services_removed_per_project( + director_v2_service_mock: aioresponses, client, logged_user, empty_user_project, @@ -799,6 +796,7 @@ async def test_interactive_services_removed_per_project( ], ) async def test_services_remain_after_closing_one_out_of_two_tabs( + director_v2_service_mock: aioresponses, client, logged_user, empty_user_project, @@ -854,6 +852,7 @@ async def test_services_remain_after_closing_one_out_of_two_tabs( ], ) async def test_websocket_disconnected_remove_or_maintain_files_based_on_role( + director_v2_service_mock: aioresponses, client, logged_user, empty_user_project, @@ -923,6 +922,7 @@ async def test_websocket_disconnected_remove_or_maintain_files_based_on_role( @pytest.mark.parametrize("user_role", [UserRole.USER, UserRole.TESTER, UserRole.GUEST]) async def test_regression_removing_unexisting_user( + director_v2_service_mock: aioresponses, client: TestClient, logged_user: dict[str, Any], empty_user_project: dict[str, Any], diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_checkouts_rest.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_checkouts_rest.py new file mode 100644 index 00000000000..1a6a81e76f4 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_checkouts_rest.py @@ -0,0 +1,85 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements +from http import HTTPStatus + +import pytest +from aiohttp.test_utils import TestClient +from models_library.api_schemas_resource_usage_tracker.licensed_items_checkouts import ( + LicensedItemCheckoutGet, + LicensedItemsCheckoutsPage, +) +from models_library.api_schemas_webserver.licensed_items_checkouts import ( + LicensedItemCheckoutRestGet, +) +from pytest_mock.plugin import MockerFixture +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict +from servicelib.aiohttp import status +from simcore_service_webserver.db.models import UserRole + +_LICENSED_ITEM_CHECKOUT_GET = LicensedItemCheckoutGet.model_validate( + LicensedItemCheckoutGet.model_config["json_schema_extra"]["examples"][0] +) + +_LICENSED_ITEM_CHECKOUT_PAGE = LicensedItemsCheckoutsPage( + items=[_LICENSED_ITEM_CHECKOUT_GET], + total=1, +) + + +@pytest.fixture +def mock_get_licensed_items_checkouts_page(mocker: MockerFixture) -> tuple: + return mocker.patch( + "simcore_service_webserver.licenses._licensed_items_checkouts_service.licensed_items_checkouts.get_licensed_items_checkouts_page", + spec=True, + return_value=_LICENSED_ITEM_CHECKOUT_PAGE, + ) + + +@pytest.fixture +def mock_get_licensed_item_checkout(mocker: MockerFixture) -> tuple: + return mocker.patch( + "simcore_service_webserver.licenses._licensed_items_checkouts_service.licensed_items_checkouts.get_licensed_item_checkout", + spec=True, + return_value=_LICENSED_ITEM_CHECKOUT_GET, + ) + + +@pytest.fixture +def mock_get_wallet_by_user(mocker: MockerFixture) -> tuple: + return mocker.patch( + "simcore_service_webserver.licenses._licensed_items_checkouts_service.get_wallet_by_user", + spec=True, + ) + + +@pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) +async def test_licensed_items_checkouts_handlers( + client: TestClient, + logged_user: UserInfoDict, + expected: HTTPStatus, + mock_get_licensed_items_checkouts_page: MockerFixture, + mock_get_licensed_item_checkout: MockerFixture, + mock_get_wallet_by_user: MockerFixture, +): + assert client.app + + # list + url = client.app.router["list_licensed_item_checkouts_for_wallet"].url_for( + wallet_id="1" + ) + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert len(data) == 1 + assert LicensedItemCheckoutRestGet(**data[0]) + + # get + url = client.app.router["get_licensed_item_checkout"].url_for( + licensed_item_checkout_id=f"{_LICENSED_ITEM_CHECKOUT_PAGE.items[0].licensed_item_checkout_id}" + ) + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert LicensedItemCheckoutRestGet(**data) diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_purchases_handlers.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_purchases_rest.py similarity index 93% rename from services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_purchases_handlers.py rename to services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_purchases_rest.py index ce0fddeca19..ee3656d2c1c 100644 --- a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_purchases_handlers.py +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_purchases_rest.py @@ -49,7 +49,7 @@ @pytest.fixture def mock_get_licensed_items_purchases_page(mocker: MockerFixture) -> tuple: return mocker.patch( - "simcore_service_webserver.licenses._licensed_items_purchases_api.licensed_items_purchases.get_licensed_items_purchases_page", + "simcore_service_webserver.licenses._licensed_items_purchases_service.licensed_items_purchases.get_licensed_items_purchases_page", spec=True, return_value=_LICENSED_ITEM_PURCHASE_PAGE, ) @@ -58,7 +58,7 @@ def mock_get_licensed_items_purchases_page(mocker: MockerFixture) -> tuple: @pytest.fixture def mock_get_licensed_item_purchase(mocker: MockerFixture) -> tuple: return mocker.patch( - "simcore_service_webserver.licenses._licensed_items_purchases_api.licensed_items_purchases.get_licensed_item_purchase", + "simcore_service_webserver.licenses._licensed_items_purchases_service.licensed_items_purchases.get_licensed_item_purchase", spec=True, return_value=_LICENSED_ITEM_PURCHASE_GET, ) @@ -67,13 +67,13 @@ def mock_get_licensed_item_purchase(mocker: MockerFixture) -> tuple: @pytest.fixture def mock_get_wallet_by_user(mocker: MockerFixture) -> tuple: return mocker.patch( - "simcore_service_webserver.licenses._licensed_items_purchases_api.get_wallet_by_user", + "simcore_service_webserver.licenses._licensed_items_purchases_service.get_wallet_by_user", spec=True, ) @pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) -async def test_licensed_items_db_crud( +async def test_licensed_items_purchaches_handlers( client: TestClient, logged_user: UserInfoDict, expected: HTTPStatus, diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_db.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_repository.py similarity index 84% rename from services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_db.py rename to services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_repository.py index 910e1bdf3f4..dfe04e2e0d3 100644 --- a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_db.py +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_repository.py @@ -16,7 +16,7 @@ from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole -from simcore_service_webserver.licenses import _licensed_items_db +from simcore_service_webserver.licenses import _licensed_items_repository from simcore_service_webserver.licenses.errors import LicensedItemNotFoundError from simcore_service_webserver.projects.models import ProjectDict @@ -32,7 +32,7 @@ async def test_licensed_items_db_crud( ): assert client.app - output: tuple[int, list[LicensedItemDB]] = await _licensed_items_db.list_( + output: tuple[int, list[LicensedItemDB]] = await _licensed_items_repository.list_( client.app, product_name=osparc_product_name, offset=0, @@ -41,7 +41,7 @@ async def test_licensed_items_db_crud( ) assert output[0] == 0 - licensed_item_db = await _licensed_items_db.create( + licensed_item_db = await _licensed_items_repository.create( client.app, product_name=osparc_product_name, name="Model A", @@ -50,7 +50,7 @@ async def test_licensed_items_db_crud( ) _licensed_item_id = licensed_item_db.licensed_item_id - output: tuple[int, list[LicensedItemDB]] = await _licensed_items_db.list_( + output: tuple[int, list[LicensedItemDB]] = await _licensed_items_repository.list_( client.app, product_name=osparc_product_name, offset=0, @@ -59,35 +59,35 @@ async def test_licensed_items_db_crud( ) assert output[0] == 1 - licensed_item_db = await _licensed_items_db.get( + licensed_item_db = await _licensed_items_repository.get( client.app, licensed_item_id=_licensed_item_id, product_name=osparc_product_name, ) assert licensed_item_db.name == "Model A" - await _licensed_items_db.update( + await _licensed_items_repository.update( client.app, licensed_item_id=_licensed_item_id, product_name=osparc_product_name, updates=LicensedItemUpdateDB(name="Model B"), ) - licensed_item_db = await _licensed_items_db.get( + licensed_item_db = await _licensed_items_repository.get( client.app, licensed_item_id=_licensed_item_id, product_name=osparc_product_name, ) assert licensed_item_db.name == "Model B" - licensed_item_db = await _licensed_items_db.delete( + licensed_item_db = await _licensed_items_repository.delete( client.app, licensed_item_id=_licensed_item_id, product_name=osparc_product_name, ) with pytest.raises(LicensedItemNotFoundError): - await _licensed_items_db.get( + await _licensed_items_repository.get( client.app, licensed_item_id=_licensed_item_id, product_name=osparc_product_name, diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_handlers.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_rest.py similarity index 88% rename from services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_handlers.py rename to services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_rest.py index b1fee67dafa..67c36f2581b 100644 --- a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_handlers.py +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_rest.py @@ -18,7 +18,7 @@ from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole -from simcore_service_webserver.licenses import _licensed_items_db +from simcore_service_webserver.licenses import _licensed_items_repository from simcore_service_webserver.projects.models import ProjectDict @@ -39,7 +39,7 @@ async def test_licensed_items_listing( data, _ = await assert_status(resp, status.HTTP_200_OK) assert data == [] - licensed_item_db = await _licensed_items_db.create( + licensed_item_db = await _licensed_items_repository.create( client.app, product_name=osparc_product_name, name="Model A", @@ -67,7 +67,7 @@ async def test_licensed_items_listing( @pytest.fixture def mock_licensed_items_purchase_functions(mocker: MockerFixture) -> tuple: mock_wallet_credits = mocker.patch( - "simcore_service_webserver.licenses._licensed_items_api.get_wallet_with_available_credits_by_user_and_wallet", + "simcore_service_webserver.licenses._licensed_items_service.get_wallet_with_available_credits_by_user_and_wallet", spec=True, return_value=WalletGetWithAvailableCredits.model_validate( WalletGetWithAvailableCredits.model_config["json_schema_extra"]["examples"][ @@ -76,14 +76,14 @@ def mock_licensed_items_purchase_functions(mocker: MockerFixture) -> tuple: ), ) mock_get_pricing_unit = mocker.patch( - "simcore_service_webserver.licenses._licensed_items_api.get_pricing_plan_unit", + "simcore_service_webserver.licenses._licensed_items_service.get_pricing_plan_unit", spec=True, return_value=PricingUnitGet.model_validate( PricingUnitGet.model_config["json_schema_extra"]["examples"][0] ), ) mock_create_licensed_item_purchase = mocker.patch( - "simcore_service_webserver.licenses._licensed_items_api.licensed_items_purchases.create_licensed_item_purchase", + "simcore_service_webserver.licenses._licensed_items_service.licensed_items_purchases.create_licensed_item_purchase", spec=True, ) @@ -106,7 +106,7 @@ async def test_licensed_items_purchase( ): assert client.app - licensed_item_db = await _licensed_items_db.create( + licensed_item_db = await _licensed_items_repository.create( client.app, product_name=osparc_product_name, name="Model A", diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licenses_rpc.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licenses_rpc.py index e3ab4f4cb3d..836a7fe05e6 100644 --- a/services/web/server/tests/unit/with_dbs/04/licenses/test_licenses_rpc.py +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licenses_rpc.py @@ -7,6 +7,9 @@ import pytest from aiohttp.test_utils import TestClient +from models_library.api_schemas_resource_usage_tracker.licensed_items_checkouts import ( + LicensedItemCheckoutGet, +) from models_library.licensed_items import LicensedResourceType from models_library.products import ProductName from pytest_mock import MockerFixture @@ -16,14 +19,14 @@ from servicelib.rabbitmq import RabbitMQRPCClient from servicelib.rabbitmq.rpc_interfaces.webserver.licenses.licensed_items import ( checkout_licensed_item_for_wallet, + get_available_licensed_items_for_wallet, get_licensed_items, - get_licensed_items_for_wallet, release_licensed_item_for_wallet, ) from settings_library.rabbit import RabbitSettings from simcore_postgres_database.models.users import UserRole from simcore_service_webserver.application_settings import ApplicationSettings -from simcore_service_webserver.licenses import _licensed_items_db +from simcore_service_webserver.licenses import _licensed_items_repository pytest_simcore_core_services_selection = [ "rabbit", @@ -67,12 +70,59 @@ async def rpc_client( return await rabbitmq_rpc_client("client") -async def test_api_keys_workflow( +@pytest.fixture +def mock_get_wallet_by_user(mocker: MockerFixture) -> tuple: + return mocker.patch( + "simcore_service_webserver.licenses._licensed_items_checkouts_service.get_wallet_by_user", + spec=True, + ) + + +_LICENSED_ITEM_CHECKOUT_GET = LicensedItemCheckoutGet.model_validate( + LicensedItemCheckoutGet.model_config["json_schema_extra"]["examples"][0] +) + + +@pytest.fixture +def mock_checkout_licensed_item(mocker: MockerFixture) -> tuple: + return mocker.patch( + "simcore_service_webserver.licenses._licensed_items_checkouts_service.licensed_items_checkouts.checkout_licensed_item", + spec=True, + return_value=_LICENSED_ITEM_CHECKOUT_GET, + ) + + +@pytest.fixture +def mock_get_licensed_item_checkout(mocker: MockerFixture) -> tuple: + return mocker.patch( + "simcore_service_webserver.licenses._licensed_items_checkouts_service.licensed_items_checkouts.get_licensed_item_checkout", + spec=True, + return_value=_LICENSED_ITEM_CHECKOUT_GET, + ) + + +@pytest.fixture +def mock_release_licensed_item(mocker: MockerFixture) -> tuple: + return mocker.patch( + "simcore_service_webserver.licenses._licensed_items_checkouts_service.licensed_items_checkouts.release_licensed_item", + spec=True, + return_value=_LICENSED_ITEM_CHECKOUT_GET, + ) + + +@pytest.mark.acceptance_test( + "Implements https://github.com/ITISFoundation/osparc-issues/issues/1800" +) +async def test_license_checkout_workflow( client: TestClient, rpc_client: RabbitMQRPCClient, osparc_product_name: ProductName, logged_user: UserInfoDict, pricing_plan_id: int, + mock_get_wallet_by_user: MockerFixture, + mock_checkout_licensed_item: MockerFixture, + mock_release_licensed_item: MockerFixture, + mock_get_licensed_item_checkout: MockerFixture, ): assert client.app @@ -82,7 +132,7 @@ async def test_api_keys_workflow( assert len(result.items) == 0 assert result.total == 0 - await _licensed_items_db.create( + license_item_db = await _licensed_items_repository.create( client.app, product_name=osparc_product_name, name="Model A", @@ -97,31 +147,26 @@ async def test_api_keys_workflow( assert result.total == 1 with pytest.raises(NotImplementedError): - await get_licensed_items_for_wallet( + await get_available_licensed_items_for_wallet( rpc_client, user_id=logged_user["id"], product_name=osparc_product_name, wallet_id=1, ) - with pytest.raises(NotImplementedError): - await checkout_licensed_item_for_wallet( - rpc_client, - user_id=logged_user["id"], - product_name=osparc_product_name, - wallet_id=1, - licensed_item_id="c5139a2e-4e1f-4ebe-9bfd-d17f195111ee", - num_of_seats=1, - service_run_id="run_1", - ) + checkout = await checkout_licensed_item_for_wallet( + rpc_client, + product_name=osparc_product_name, + user_id=logged_user["id"], + wallet_id=1, + licensed_item_id=license_item_db.licensed_item_id, + num_of_seats=1, + service_run_id="run_1", + ) - with pytest.raises(NotImplementedError): - await release_licensed_item_for_wallet( - rpc_client, - user_id=logged_user["id"], - product_name=osparc_product_name, - wallet_id=1, - licensed_item_id="c5139a2e-4e1f-4ebe-9bfd-d17f195111ee", - num_of_seats=1, - service_run_id="run_1", - ) + await release_licensed_item_for_wallet( + rpc_client, + product_name=osparc_product_name, + user_id=logged_user["id"], + licensed_item_checkout_id=checkout.licensed_item_checkout_id, + ) diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py index 864dbbe7be0..bfa6fe9fece 100644 --- a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py @@ -7,7 +7,6 @@ import logging import pytest -from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_webserver.log import setup_logging @@ -64,11 +63,3 @@ def app_environment(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatc print(plugin_settings.model_dump_json(indent=1)) return {**app_environment, **envs_plugins, **envs_studies_dispatcher} - - -@pytest.fixture -def mock_dynamic_scheduler(mocker: MockerFixture) -> None: - mocker.patch( - "simcore_service_webserver.dynamic_scheduler.api.stop_dynamic_services_in_project", - autospec=True, - ) diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_handlers.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_handlers.py index 6f8853337ee..86ed849075f 100644 --- a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_handlers.py +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_handlers.py @@ -402,8 +402,8 @@ async def test_dispatch_study_anonymously( "simcore_service_webserver.director_v2.api.create_or_update_pipeline", return_value=None, ) - mock_client_director_v2_project_networks = mocker.patch( - "simcore_service_webserver.studies_dispatcher._redirects_handlers.update_dynamic_service_networks_in_project", + mock_dynamic_scheduler_update_project_networks = mocker.patch( + "simcore_service_webserver.studies_dispatcher._redirects_handlers.dynamic_scheduler_api.update_projects_networks", return_value=None, ) @@ -443,7 +443,7 @@ async def test_dispatch_study_anonymously( assert guest_project["prjOwner"] == data["login"] assert mock_client_director_v2_func.called - assert mock_client_director_v2_project_networks.called + assert mock_dynamic_scheduler_update_project_networks.called @pytest.mark.parametrize( @@ -468,8 +468,8 @@ async def test_dispatch_logged_in_user( "simcore_service_webserver.director_v2.api.create_or_update_pipeline", return_value=None, ) - mock_client_director_v2_project_networks = mocker.patch( - "simcore_service_webserver.studies_dispatcher._redirects_handlers.update_dynamic_service_networks_in_project", + mock_dynamic_scheduler_update_project_networks = mocker.patch( + "simcore_service_webserver.studies_dispatcher._redirects_handlers.dynamic_scheduler_api.update_projects_networks", return_value=None, ) @@ -501,7 +501,7 @@ async def test_dispatch_logged_in_user( assert created_project["prjOwner"] == data["login"] assert mock_client_director_v2_pipline_update.called - assert mock_client_director_v2_project_networks.called + assert mock_dynamic_scheduler_update_project_networks.called # delete before exiting url = client.app.router["delete_project"].url_for(project_id=expected_project_id) diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py index cff892d7f00..366f10dba16 100644 --- a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py @@ -259,6 +259,7 @@ async def test_access_study_anonymously( published_project: ProjectDict, storage_subsystem_mock_override: None, catalog_subsystem_mock: Callable[[list[ProjectDict]], None], + mock_dynamic_scheduler: None, director_v2_service_mock: AioResponsesMock, mocks_on_projects_api: None, # needed to cleanup the locks between parametrizations @@ -309,6 +310,7 @@ async def test_access_study_by_logged_user( published_project: ProjectDict, storage_subsystem_mock_override: None, catalog_subsystem_mock: Callable[[list[ProjectDict]], None], + mock_dynamic_scheduler: None, director_v2_service_mock: AioResponsesMock, mocks_on_projects_api: None, auto_delete_projects: None, @@ -365,7 +367,7 @@ async def test_access_cookie_of_expired_user( resp = await client.get(f"{me_url}") data, _ = await assert_status(resp, status.HTTP_200_OK) - assert await get_user_role(app, data["id"]) == UserRole.GUEST + assert await get_user_role(app, user_id=data["id"]) == UserRole.GUEST async def enforce_garbage_collect_guest(uid): # TODO: can be replaced now by actual GC @@ -373,7 +375,7 @@ async def enforce_garbage_collect_guest(uid): # - GUEST user expired, cleaning it up # - client still holds cookie with its identifier nonetheless # - assert await get_user_role(app, uid) == UserRole.GUEST + assert await get_user_role(app, user_id=uid) == UserRole.GUEST projects = await _get_user_projects(client) assert len(projects) == 1 @@ -401,14 +403,14 @@ async def enforce_garbage_collect_guest(uid): # as a guest user resp = await client.get(f"{me_url}") data, _ = await assert_status(resp, status.HTTP_200_OK) - assert await get_user_role(app, data["id"]) == UserRole.GUEST + assert await get_user_role(app, user_id=data["id"]) == UserRole.GUEST # But I am another user assert data["id"] != user_id assert data["login"] != user_email -@pytest.mark.parametrize("number_of_simultaneous_requests", [1, 2, 64]) +@pytest.mark.parametrize("number_of_simultaneous_requests", [1, 2, 32]) async def test_guest_user_is_not_garbage_collected( number_of_simultaneous_requests: int, web_server: TestServer, @@ -416,6 +418,7 @@ async def test_guest_user_is_not_garbage_collected( published_project: ProjectDict, storage_subsystem_mock_override: None, catalog_subsystem_mock: Callable[[list[ProjectDict]], None], + mock_dynamic_scheduler: None, director_v2_service_mock: AioResponsesMock, mocks_on_projects_api: None, # needed to cleanup the locks between parametrizations diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index 991d7fd8d56..6661af40d5e 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -19,6 +19,7 @@ from copy import deepcopy from decimal import Decimal from pathlib import Path +from typing import Any from unittest import mock from unittest.mock import AsyncMock, MagicMock @@ -41,8 +42,8 @@ from models_library.products import ProductName from models_library.services_enums import ServiceState from pydantic import ByteSize, TypeAdapter +from pytest_docker.plugin import Services from pytest_mock import MockerFixture -from pytest_simcore.helpers.dict_tools import ConfigDict from pytest_simcore.helpers.faker_factories import random_product from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -67,6 +68,7 @@ ) from simcore_service_webserver._constants import INDEX_RESOURCE_NAME from simcore_service_webserver.application import create_application +from simcore_service_webserver.application_settings_utils import AppConfigDict from simcore_service_webserver.db.plugin import get_database_engine from simcore_service_webserver.projects.models import ProjectDict from simcore_service_webserver.statics._constants import ( @@ -92,7 +94,7 @@ def disable_swagger_doc_generation( @pytest.fixture(scope="session") -def docker_compose_env(default_app_cfg: ConfigDict) -> Iterator[pytest.MonkeyPatch]: +def docker_compose_env(default_app_cfg: AppConfigDict) -> Iterator[pytest.MonkeyPatch]: postgres_cfg = default_app_cfg["db"]["postgres"] redis_cfg = default_app_cfg["resource_manager"]["redis"] # docker-compose reads these environs @@ -117,7 +119,7 @@ def docker_compose_file(docker_compose_env: pytest.MonkeyPatch) -> str: @pytest.fixture -def app_cfg(default_app_cfg: ConfigDict, unused_tcp_port_factory) -> ConfigDict: +def app_cfg(default_app_cfg: AppConfigDict, unused_tcp_port_factory) -> AppConfigDict: """ NOTE: SHOULD be overriden in any test module to configure the app accordingly """ @@ -133,8 +135,8 @@ def app_cfg(default_app_cfg: ConfigDict, unused_tcp_port_factory) -> ConfigDict: @pytest.fixture def app_environment( monkeypatch: pytest.MonkeyPatch, - app_cfg: ConfigDict, - monkeypatch_setenv_from_app_config: Callable[[ConfigDict], dict[str, str]], + app_cfg: AppConfigDict, + monkeypatch_setenv_from_app_config: Callable[[AppConfigDict], dict[str, str]], ) -> EnvVarsDict: # WARNING: this fixture is commonly overriden. Check before renaming. """overridable fixture that defines the ENV for the webserver application @@ -189,7 +191,7 @@ async def _print_mail_to_stdout( @pytest.fixture def web_server( event_loop: asyncio.AbstractEventLoop, - app_cfg: ConfigDict, + app_cfg: AppConfigDict, app_environment: EnvVarsDict, postgres_db: sa.engine.Engine, # tools @@ -452,7 +454,7 @@ async def _create(**service_override_kwargs) -> DynamicServiceGet: return _create -def _is_postgres_responsive(url): +def _is_postgres_responsive(url: str): """Check if something responds to url""" try: engine = sa.create_engine(url) @@ -464,7 +466,9 @@ def _is_postgres_responsive(url): @pytest.fixture(scope="session") -def postgres_dsn(docker_services, docker_ip, default_app_cfg: dict) -> dict: +def postgres_dsn( + docker_services: Services, docker_ip: str | Any, default_app_cfg: dict +) -> dict: cfg = deepcopy(default_app_cfg["db"]["postgres"]) cfg["host"] = docker_ip cfg["port"] = docker_services.port_for("postgres", 5432) @@ -472,7 +476,7 @@ def postgres_dsn(docker_services, docker_ip, default_app_cfg: dict) -> dict: @pytest.fixture(scope="session") -def postgres_service(docker_services, postgres_dsn): +def postgres_service(docker_services: Services, postgres_dsn: dict) -> str: url = DSN.format(**postgres_dsn) # Wait until service is responsive. @@ -647,6 +651,7 @@ async def with_permitted_override_services_specifications( .where(groups_extra_properties.c.group_id == 1) .values(override_services_specifications=True) ) + yield async with aiopg_engine.acquire() as conn: diff --git a/tests/e2e-playwright/tests/conftest.py b/tests/e2e-playwright/tests/conftest.py index e815ff6c522..085e74b15fe 100644 --- a/tests/e2e-playwright/tests/conftest.py +++ b/tests/e2e-playwright/tests/conftest.py @@ -188,7 +188,7 @@ def pytest_runtest_makereport(item: pytest.Item, call): @pytest.hookimpl(tryfirst=True) -def pytest_configure(config): +def pytest_configure(config: pytest.Config): config.pluginmanager.register(pytest_runtest_setup, "osparc_test_times_plugin") config.pluginmanager.register(pytest_runtest_makereport, "osparc_makereport_plugin") diff --git a/tests/e2e-playwright/tests/platform_CI_tests/conftest.py b/tests/e2e-playwright/tests/platform_CI_tests/conftest.py index e69de29bb2d..300e0d75972 100644 --- a/tests/e2e-playwright/tests/platform_CI_tests/conftest.py +++ b/tests/e2e-playwright/tests/platform_CI_tests/conftest.py @@ -0,0 +1,18 @@ +from pathlib import Path + +import pytest + + +@pytest.fixture +def results_path(request): + """ + Fixture to retrieve the path to the test's results directory. + """ + # Check if `results_dir` is available in the current test's user properties + results_dir = dict(request.node.user_properties).get("results_dir") + if not results_dir: + results_dir = "test-results" # Default results directory + test_name = request.node.name + test_dir = Path(results_dir) / test_name + test_dir.mkdir(parents=True, exist_ok=True) # Ensure the test directory exists + return test_dir diff --git a/tests/e2e-playwright/tests/platform_CI_tests/test_platform.py b/tests/e2e-playwright/tests/platform_CI_tests/test_platform.py index edcac0fca64..15b3a86b730 100644 --- a/tests/e2e-playwright/tests/platform_CI_tests/test_platform.py +++ b/tests/e2e-playwright/tests/platform_CI_tests/test_platform.py @@ -1,13 +1,17 @@ +# pylint: disable=no-name-in-module # pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable # pylint: disable=too-many-arguments # pylint: disable=too-many-statements -# pylint: disable=no-name-in-module +# pylint: disable=unused-argument +# pylint: disable=unused-variable from pathlib import Path +from typing import Iterable import pytest +from playwright.sync_api import expect +from playwright.sync_api._generated import BrowserContext, Playwright +from pydantic import AnyUrl @pytest.fixture(scope="session") @@ -17,11 +21,12 @@ def store_browser_context() -> bool: @pytest.fixture def logged_in_context( - playwright, + playwright: Playwright, store_browser_context: bool, request: pytest.FixtureRequest, - pytestconfig, -): + pytestconfig: pytest.Config, + results_path: Path, +) -> Iterable[BrowserContext]: is_headed = "--headed" in pytestconfig.invocation_params.args file_path = Path("state.json") @@ -30,13 +35,20 @@ def logged_in_context( browser = playwright.chromium.launch(headless=not is_headed) context = browser.new_context(storage_state="state.json") + test_name = request.node.name + context.tracing.start( + title=f"Trace for Browser 2 in test {test_name}", + snapshots=True, + screenshots=True, + ) yield context + context.tracing.stop(path=f"{results_path}/second_browser_trace.zip") context.close() browser.close() @pytest.fixture(scope="module") -def test_module_teardown(): +def test_module_teardown() -> Iterable[None]: yield # Run the tests @@ -45,7 +57,9 @@ def test_module_teardown(): file_path.unlink() -def test_simple_folder_workflow(logged_in_context, product_url, test_module_teardown): +def test_simple_folder_workflow( + logged_in_context: BrowserContext, product_url: AnyUrl, test_module_teardown: None +): page = logged_in_context.new_page() page.goto(f"{product_url}") @@ -66,7 +80,7 @@ def test_simple_folder_workflow(logged_in_context, product_url, test_module_tear def test_simple_workspace_workflow( - logged_in_context, product_url, test_module_teardown + logged_in_context: BrowserContext, product_url: AnyUrl, test_module_teardown: None ): page = logged_in_context.new_page() @@ -80,7 +94,12 @@ def test_simple_workspace_workflow( and response.request.method == "POST" ) as response_info: page.get_by_test_id("newWorkspaceButton").click() + + workspace_title_field = page.get_by_test_id("workspaceEditorTitle") + # wait until the title is automatically filled up + expect(workspace_title_field).not_to_have_value("") page.get_by_test_id("workspaceEditorSave").click() + _workspace_id = response_info.value.json()["data"]["workspaceId"] page.get_by_test_id(f"workspaceItem_{_workspace_id}").click() page.get_by_test_id("workspacesAndFoldersTreeItem_null_null").click()