diff --git a/Makefile b/Makefile index b4d880e2719..b6824c92797 100644 --- a/Makefile +++ b/Makefile @@ -231,20 +231,24 @@ rows="%-24s | %90s | %12s | %12s\n";\ TableWidth=140;\ printf "%24s | %90s | %12s | %12s\n" Name Endpoint User Password;\ printf "%.$${TableWidth}s\n" "$$separator";\ -printf "$$rows" 'oSparc platform' 'http://$(get_my_ip).nip.io:9081';\ -printf "$$rows" 'oSparc web API doc' 'http://$(get_my_ip).nip.io:9081/dev/doc';\ -printf "$$rows" 'oSparc public API doc' 'http://$(get_my_ip).nip.io:8006/dev/doc';\ -printf "$$rows" 'Postgres DB' 'http://$(get_my_ip).nip.io:18080/?pgsql=postgres&username='$${POSTGRES_USER}'&db='$${POSTGRES_DB}'&ns=public' $${POSTGRES_USER} $${POSTGRES_PASSWORD};\ -printf "$$rows" Portainer 'http://$(get_my_ip).nip.io:9000' admin adminadmin;\ -printf "$$rows" Redis 'http://$(get_my_ip).nip.io:18081';\ +printf "$$rows" "oSparc platform" "http://$(get_my_ip).nip.io:9081";\ +printf "$$rows" "oSparc web API doc" "http://$(get_my_ip).nip.io:9081/dev/doc";\ +printf "$$rows" "oSparc public API doc" "http://$(get_my_ip).nip.io:8006/dev/doc";\ +printf "$$rows" "Postgres DB" "http://$(get_my_ip).nip.io:18080/?pgsql=postgres&username="$${POSTGRES_USER}"&db="$${POSTGRES_DB}"&ns=public" $${POSTGRES_USER} $${POSTGRES_PASSWORD};\ +printf "$$rows" "Portainer" "http://$(get_my_ip).nip.io:9000" admin adminadmin;\ +printf "$$rows" "Redis" "http://$(get_my_ip).nip.io:18081";\ printf "$$rows" "Dask Dashboard" "http://$(get_my_ip).nip.io:8787";\ printf "$$rows" "Docker Registry" "$${REGISTRY_URL}" $${REGISTRY_USER} $${REGISTRY_PW};\ printf "$$rows" "Rabbit Dashboard" "http://$(get_my_ip).nip.io:15762" admin adminadmin;\ printf "$$rows" "Traefik Dashboard" "http://$(get_my_ip).nip.io:8080/dashboard/";\ +printf "$$rows" "Storage S3 Filestash" "http://$(get_my_ip).nip.io:9002" 12345678 12345678;\ +printf "$$rows" "Storage S3 Minio" "http://$(get_my_ip).nip.io:9001" 12345678 12345678;\ + printf "\n%s\n" "⚠️ if a DNS is not used (as displayed above), the interactive services started via dynamic-sidecar";\ echo "⚠️ will not be shown. The frontend accesses them via the uuid.services.YOUR_IP.nip.io:9081"; endef + show-endpoints: @$(_show_endpoints) diff --git a/services/storage/src/simcore_service_storage/handlers.py b/services/storage/src/simcore_service_storage/handlers.py index 86a00db024f..4cd53585060 100644 --- a/services/storage/src/simcore_service_storage/handlers.py +++ b/services/storage/src/simcore_service_storage/handlers.py @@ -10,7 +10,7 @@ from servicelib.aiohttp.application_keys import APP_CONFIG_KEY from servicelib.aiohttp.rest_utils import extract_and_validate -from ._meta import __version__, api_vtag +from ._meta import api_vtag from .access_layer import InvalidFileIdentifier from .constants import APP_DSM_KEY, DATCORE_STR, SIMCORE_S3_ID, SIMCORE_S3_STR from .db_tokens import get_api_token_and_secret @@ -257,7 +257,7 @@ async def _go(): except asyncio.TimeoutError: log.error("Sync metadata table timed out (%s seconds)", timeout) - asyncio.create_task(_go(), name="f&f sync_task") + asyncio.create_task(_go(), name="fire&forget sync_task") else: sync_results = await sync_coro diff --git a/services/web/server/requirements/_test.in b/services/web/server/requirements/_test.in index c590a66871e..2e33f51ee38 100644 --- a/services/web/server/requirements/_test.in +++ b/services/web/server/requirements/_test.in @@ -7,13 +7,21 @@ # --constraint _base.txt -# testing -# fixtures -hypothesis +aioresponses +alembic +click +codecov coverage +coveralls +docker Faker flaky +hypothesis +jsonschema +openapi-spec-validator +ptvsd +pylint pytest pytest-aiohttp # incompatible with pytest-asyncio. See https://github.com/pytest-dev/pytest-asyncio/issues/76 pytest-cov @@ -23,21 +31,7 @@ pytest-instafail pytest-mock pytest-runner pytest-sugar - -# helpers -aioresponses -alembic -click -docker -jsonschema -openapi-spec-validator python-dotenv redis tenacity websockets - -# tools -codecov -coveralls -ptvsd -pylint diff --git a/services/web/server/setup.cfg b/services/web/server/setup.cfg index 4f79845b696..ad3fc3ec8e3 100644 --- a/services/web/server/setup.cfg +++ b/services/web/server/setup.cfg @@ -12,6 +12,8 @@ tag = False [tool:pytest] addopts = --strict-markers +# switching to 'auto' for the sake of pytest-aiohttp backward compatibility +asyncio_mode=auto markers = slow: marks tests as slow (deselect with '-m "not slow"') - acceptance_test: marks tests as 'acceptance tests' i.e. does the system do what the user expects? Typically those are workflows. + acceptance_test: "marks tests as 'acceptance tests' i.e. does the system do what the user expects? Typically those are workflows." diff --git a/services/web/server/src/simcore_service_webserver/exporter/formatters/formatter_v1.py b/services/web/server/src/simcore_service_webserver/exporter/formatters/formatter_v1.py index 64405d84549..233700c4ec1 100644 --- a/services/web/server/src/simcore_service_webserver/exporter/formatters/formatter_v1.py +++ b/services/web/server/src/simcore_service_webserver/exporter/formatters/formatter_v1.py @@ -8,6 +8,7 @@ from pathlib import Path from pprint import pformat from typing import Any, Deque, Dict, List, Optional, Tuple +from uuid import UUID import aiofiles from aiohttp import ClientSession, ClientTimeout, web @@ -16,7 +17,7 @@ from models_library.utils.nodes import compute_node_hash, project_node_io_payload_cb from ...director_v2_api import create_or_update_pipeline -from ...projects.projects_api import delete_project, get_project_for_user +from ...projects.projects_api import get_project_for_user, submit_delete_project_task from ...projects.projects_db import APP_PROJECT_DBAPI from ...projects.projects_exceptions import ProjectsException from ...storage_handlers import ( @@ -419,7 +420,9 @@ async def import_files_and_validate_project( "Removing project %s, because there was an error while importing it." ) try: - await delete_project(app=app, project_uuid=project_uuid, user_id=user_id) + await submit_delete_project_task( + app=app, project_uuid=UUID(project_uuid), user_id=user_id + ) except ProjectsException: # no need to raise an error here log.exception( diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py index 84e218cddcc..66b90c9fd1b 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py @@ -2,6 +2,7 @@ """ +import asyncio import logging from itertools import chain from typing import Any, Dict, List, Optional, Set, Tuple @@ -16,23 +17,24 @@ from . import director_v2_api, users_exceptions from .director.director_exceptions import DirectorException, ServiceNotFoundError from .garbage_collector_settings import GUEST_USER_RC_LOCK_FORMAT -from .garbage_collector_utils import ( - get_new_project_owner_gid, - remove_user, - replace_current_owner, -) +from .garbage_collector_utils import get_new_project_owner_gid, replace_current_owner from .projects.projects_api import ( - delete_project, get_project_for_user, get_workbench_node_ids_from_project_uuid, is_node_id_present_in_any_project_workbench, - remove_project_interactive_services, + remove_project_dynamic_services, + submit_delete_project_task, ) from .projects.projects_db import APP_PROJECT_DBAPI -from .projects.projects_exceptions import ProjectNotFoundError +from .projects.projects_exceptions import ProjectDeleteError, ProjectNotFoundError from .redis import get_redis_lock_manager_client from .resource_manager.registry import RedisResourceRegistry, get_registry -from .users_api import get_guest_user_ids_and_names, get_user, get_user_role +from .users_api import ( + delete_user, + get_guest_user_ids_and_names, + get_user, + get_user_role, +) from .users_exceptions import UserNotFoundError logger = logging.getLogger(__name__) @@ -183,7 +185,7 @@ async def remove_disconnected_user_resources( # inform that the project can be closed on the backend side # try: - await remove_project_interactive_services( + await remove_project_dynamic_services( user_id=int(dead_key["user_id"]), project_uuid=resource_value, app=app, @@ -417,44 +419,7 @@ async def remove_orphaned_services( logger.debug("Finished orphaned services removal") -async def remove_guest_user_with_all_its_resources( - app: web.Application, user_id: int -) -> None: - """Removes a GUEST user with all its associated projects and S3/MinIO files""" - - try: - user_role: UserRole = await get_user_role(app, user_id) - if user_role > UserRole.GUEST: - # NOTE: This acts as a protection barrier to avoid removing resources to more - # priviledge users - return - - logger.debug( - "Deleting all projects of user with %s because it is a GUEST", - f"{user_id=}", - ) - await remove_all_projects_for_user(app=app, user_id=user_id) - - logger.debug( - "Deleting user %s because it is a GUEST", - f"{user_id=}", - ) - await remove_user(app=app, user_id=user_id) - - except ( - DatabaseError, - asyncpg.exceptions.PostgresError, - ProjectNotFoundError, - UserNotFoundError, - ) as error: - logger.warning( - "Failure in database while removing user (%s) and its resources with %s", - f"{user_id=}", - f"{error}", - ) - - -async def remove_all_projects_for_user(app: web.Application, user_id: int) -> None: +async def _delete_all_projects_for_user(app: web.Application, user_id: int) -> None: """ Goes through all the projects and will try to remove them but first it will check if the project is shared with others. @@ -491,6 +456,8 @@ async def remove_all_projects_for_user(app: web.Application, user_id: int) -> No f"{user_project_uuids=}", ) + delete_tasks: List[asyncio.Task] = [] + for project_uuid in user_project_uuids: try: project: Dict = await get_project_for_user( @@ -499,11 +466,12 @@ async def remove_all_projects_for_user(app: web.Application, user_id: int) -> No user_id=user_id, include_templates=True, ) - except web.HTTPNotFound: + except (web.HTTPNotFound, ProjectNotFoundError) as err: logger.warning( - "Could not find project %s for user with %s to be removed. Skipping.", + "Could not find project %s for user with %s to be removed: %s. Skipping.", f"{project_uuid=}", f"{user_id=}", + f"{err}", ) continue @@ -525,8 +493,9 @@ async def remove_all_projects_for_user(app: web.Application, user_id: int) -> No f"{project_uuid=}", f"{user_id=}", ) - - await delete_project(app, project_uuid, user_id) + task = await submit_delete_project_task(app, project_uuid, user_id) + assert task # nosec + delete_tasks.append(task) except ProjectNotFoundError: logging.warning( @@ -551,3 +520,45 @@ async def remove_all_projects_for_user(app: web.Application, user_id: int) -> No new_project_owner_gid=new_project_owner_gid, project=project, ) + + # NOTE: ensures all delete_task tasks complete or fails fast + # can raise ProjectDeleteError, CancellationError + await asyncio.gather(*delete_tasks) + + +async def remove_guest_user_with_all_its_resources( + app: web.Application, user_id: int +) -> None: + """Removes a GUEST user with all its associated projects and S3/MinIO files""" + + try: + user_role: UserRole = await get_user_role(app, user_id) + if user_role > UserRole.GUEST: + # NOTE: This acts as a protection barrier to avoid removing resources to more + # priviledge users + return + + logger.debug( + "Deleting all projects of user with %s because it is a GUEST", + f"{user_id=}", + ) + await _delete_all_projects_for_user(app=app, user_id=user_id) + + logger.debug( + "Deleting user %s because it is a GUEST", + f"{user_id=}", + ) + await delete_user(app, user_id) + + except ( + DatabaseError, + asyncpg.exceptions.PostgresError, + ProjectNotFoundError, + UserNotFoundError, + ProjectDeleteError, + ) as error: + logger.warning( + "Failed to delete user %s and its resources: %s", + f"{user_id=}", + f"{error}", + ) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_task.py b/services/web/server/src/simcore_service_webserver/garbage_collector_task.py index 6f3a243e8cf..4ce9f4876c1 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_task.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_task.py @@ -15,7 +15,7 @@ logger = logging.getLogger(__name__) -GC_TASK_NAME = f"{__name__}.collect_garbage_periodically" +GC_TASK_NAME = f"background-task.{__name__}.collect_garbage_periodically" GC_TASK_CONFIG = f"{GC_TASK_NAME}.config" diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_utils.py b/services/web/server/src/simcore_service_webserver/garbage_collector_utils.py index 98a688b4582..9e124e4754d 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_utils.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_utils.py @@ -12,7 +12,7 @@ from .groups_api import get_group_from_gid from .projects.projects_db import APP_PROJECT_DBAPI, ProjectAccessRights from .projects.projects_exceptions import ProjectNotFoundError -from .users_api import delete_user, get_user, get_user_id_from_gid +from .users_api import get_user, get_user_id_from_gid from .users_exceptions import UserNotFoundError from .users_to_groups_api import get_users_for_gid @@ -31,7 +31,7 @@ async def _fetch_new_project_owner_from_groups( for group_gid in standard_groups.keys(): # remove the current owner from the bunch target_group_users = await get_users_for_gid(app=app, gid=group_gid) - {user_id} - logger.error("Found group users '%s'", target_group_users) + logger.info("Found group users '%s'", target_group_users) for possible_user_id in target_group_users: # check if the possible_user is still present in the db @@ -63,7 +63,7 @@ async def get_new_project_owner_gid( # A Set[str] is prefered over Set[int] because access_writes # is a Dict with only key,valus in {str, None} other_users_access_rights: Set[str] = set(access_rights.keys()) - { - str(user_primary_gid) + f"{user_primary_gid}" } logger.debug( "Processing other user and groups access rights '%s'", @@ -151,11 +151,11 @@ async def replace_current_owner( # unseting the project owner and saving the project back project["prj_owner"] = int(new_project_owner_id) # removing access rights entry - del project["accessRights"][str(user_primary_gid)] + del project["accessRights"][f"{user_primary_gid}"] project["accessRights"][ - str(new_project_owner_gid) + f"{new_project_owner_gid}" ] = ProjectAccessRights.OWNER.value - logger.error("Syncing back project %s", project) + logger.info("Syncing back project %s", project) # syncing back project data try: @@ -173,18 +173,3 @@ async def replace_current_owner( "Could not remove old owner and replaced it with user %s", new_project_owner_id, ) - - -async def remove_user(app: web.Application, user_id: UserID) -> None: - """Tries to remove a user, if the users still exists a warning message will be displayed""" - try: - await delete_user(app, user_id) - except ( - DatabaseError, - asyncpg.exceptions.PostgresError, - ProjectNotFoundError, - UserNotFoundError, - ) as err: - logger.warning( - "User '%s' still has some projects, could not be deleted [%s]", user_id, err - ) diff --git a/services/web/server/src/simcore_service_webserver/log.py b/services/web/server/src/simcore_service_webserver/log.py index ffbb8093c70..9d7b7ea4ee2 100644 --- a/services/web/server/src/simcore_service_webserver/log.py +++ b/services/web/server/src/simcore_service_webserver/log.py @@ -9,6 +9,14 @@ from servicelib.logging_utils import config_all_loggers LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR +NOISY_LOGGERS = ( + "engineio", + "openapi_spec_validator", + "sqlalchemy", + "sqlalchemy.engine", + "inotify.adapters", + "servicelib.aiohttp.monitoring", +) def setup_logging(*, level: Union[str, int], slow_duration: Optional[float] = None): @@ -27,11 +35,8 @@ def setup_logging(*, level: Union[str, int], slow_duration: Optional[float] = No min(logging.root.level + LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING ) - logging.getLogger("engineio").setLevel(quiet_level) - logging.getLogger("openapi_spec_validator").setLevel(quiet_level) - logging.getLogger("sqlalchemy").setLevel(quiet_level) - logging.getLogger("sqlalchemy.engine").setLevel(quiet_level) - logging.getLogger("inotify.adapters").setLevel(quiet_level) + for name in NOISY_LOGGERS: + logging.getLogger(name).setLevel(quiet_level) if slow_duration: # NOTE: Every task blocking > AIODEBUG_SLOW_DURATION_SECS secs is considered slow and logged as warning diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling_projects.py b/services/web/server/src/simcore_service_webserver/meta_modeling_projects.py index 8b4fe94861a..9d46d255562 100644 --- a/services/web/server/src/simcore_service_webserver/meta_modeling_projects.py +++ b/services/web/server/src/simcore_service_webserver/meta_modeling_projects.py @@ -22,7 +22,7 @@ get_runnable_projects_ids, ) from .meta_modeling_version_control import CommitID, VersionControlForMetaModeling -from .projects.projects_handlers import RQ_REQUESTED_REPO_PROJECT_UUID_KEY +from .projects.projects_handlers_crud import RQ_REQUESTED_REPO_PROJECT_UUID_KEY log = logging.getLogger(__name__) diff --git a/services/web/server/src/simcore_service_webserver/projects/_delete.py b/services/web/server/src/simcore_service_webserver/projects/_delete.py new file mode 100644 index 00000000000..18127cbcd9d --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_delete.py @@ -0,0 +1,180 @@ +""" Implements logic to delete a project (and all associated services, data, etc) + + +NOTE: this entire module is protected within the `projects` package + and ONLY to be used in the implementation of the project_api module's functions +""" + +import asyncio +import logging +from typing import List, Optional, Protocol + +from aiohttp import web +from models_library.projects import ProjectID +from models_library.users import UserID + +from .. import director_v2_api +from ..storage_api import delete_data_folders_of_project +from ..users_api import UserNameDict +from ..users_exceptions import UserNotFoundError +from .projects_db import APP_PROJECT_DBAPI, ProjectDBAPI +from .projects_exceptions import ( + ProjectDeleteError, + ProjectInvalidRightsError, + ProjectLockError, + ProjectNotFoundError, +) + +log = logging.getLogger(__name__) + +DELETE_PROJECT_TASK_NAME = "background-task.delete_project/project_uuid={0}.user_id={1}" + + +class RemoveProjectServicesCallable(Protocol): + # TODO: this function was tmp added here to avoid refactoring all projects_api in a single PR + async def __call__( + self, + user_id: int, + project_uuid: str, + app: web.Application, + notify_users: bool = True, + user_name: Optional[UserNameDict] = None, + ) -> None: + ... + + +async def mark_project_as_deleted( + app: web.Application, project_uuid: ProjectID, user_id: UserID +): + """ + ::raises ProjectInvalidRightsError + ::raises ProjectNotFoundError + """ + db: ProjectDBAPI = app[APP_PROJECT_DBAPI] + # TODO: tmp using invisible as a "deletion mark" + # Even if any of the steps below fail, the project will remain invisible + # TODO: see https://github.com/ITISFoundation/osparc-simcore/pull/2522 + await db.check_delete_project_permission(user_id, f"{project_uuid}") + + # TODO: note that if any of the steps below fail, it might results in a + # services/projects/data that might be incosistent. The GC should + # be able to detect that and resolve it. + await db.set_hidden_flag(project_uuid, enabled=True) + + +async def delete_project( + app: web.Application, + project_uuid: ProjectID, + user_id: UserID, + # TODO: this function was tmp added here to avoid refactoring all projects_api in a single PR + remove_project_dynamic_services: RemoveProjectServicesCallable, +) -> None: + """Stops dynamic services, deletes data and finally deletes project + + NOTE: this does NOT use fire&forget anymore. This is a decision of the caller to make. + + raises ProjectDeleteError + """ + + log.debug( + "Deleting project '%s' for user '%s' in database", + f"{project_uuid=}", + f"{user_id=}", + ) + db: ProjectDBAPI = app[APP_PROJECT_DBAPI] + + try: + await mark_project_as_deleted(app, project_uuid, user_id) + + # stops dynamic services + # - raises ProjectNotFoundError, UserNotFoundError, ProjectLockError + await remove_project_dynamic_services( + user_id, f"{project_uuid}", app, notify_users=False + ) + + # stops computational services + # - raises DirectorServiceError + await director_v2_api.delete_pipeline(app, user_id, project_uuid) + + # rm data from storage + await delete_data_folders_of_project(app, project_uuid, user_id) + + # rm project from database + await db.delete_user_project(user_id, f"{project_uuid}") + + except ProjectLockError as err: + raise ProjectDeleteError( + project_uuid, reason=f"Project currently in use {err}" + ) from err + + except (ProjectNotFoundError, UserNotFoundError) as err: + raise ProjectDeleteError( + project_uuid, reason=f"Invalid project state {err}" + ) from err + + +def schedule_task( + app: web.Application, + project_uuid: ProjectID, + user_id: UserID, + remove_project_dynamic_services: RemoveProjectServicesCallable, + logger: logging.Logger, +) -> asyncio.Task: + """Wrap `delete_project` for a `project_uuid` and `user_id` into a Task + and schedule its execution in the event loop. + + Return the scheduled Task + """ + + def _log_state_when_done(fut: asyncio.Future): + # the task created in the parent function is typically used + # to fire&forget therefore this internal function will be used as + # callback to provide a minimal log that informs about the + # state of the task when completed. + try: + fut.result() + logger.info(f"Deleted {project_uuid=} using {user_id=} permissions") + + except asyncio.exceptions.CancelledError: + logger.warning( + f"Canceled deletion of {project_uuid=} using {user_id=} permissions" + ) + raise + + except ProjectDeleteError as err: + logger.error( + f"Failed to delete {project_uuid=} using {user_id=} permissions: {err}" + ) + + except ProjectInvalidRightsError as err: + logger.error( + f"{user_id=} does not have permission to delete {project_uuid=}: {err}" + ) + + except Exception: # pylint: disable=broad-except + logger.exception( + f"Unexpected error while deleting {project_uuid=} with {user_id=} permissions" + ) + + # ------ + + task = asyncio.create_task( + delete_project(app, project_uuid, user_id, remove_project_dynamic_services), + name=DELETE_PROJECT_TASK_NAME.format(project_uuid, user_id), + ) + + assert task.get_name() == DELETE_PROJECT_TASK_NAME.format( # nosec + project_uuid, user_id + ) + + task.add_done_callback(_log_state_when_done) + return task + + +def get_scheduled_tasks(project_uuid: ProjectID, user_id: UserID) -> List[asyncio.Task]: + """Returns tasks matching delete-project task's name.""" + return [ + task + for task in asyncio.all_tasks() + if task.get_name() == DELETE_PROJECT_TASK_NAME.format(project_uuid, user_id) + ] diff --git a/services/web/server/src/simcore_service_webserver/projects/plugin.py b/services/web/server/src/simcore_service_webserver/projects/plugin.py index 3f8acdba2a8..332f77bdb0f 100644 --- a/services/web/server/src/simcore_service_webserver/projects/plugin.py +++ b/services/web/server/src/simcore_service_webserver/projects/plugin.py @@ -15,7 +15,12 @@ ) from .._constants import APP_OPENAPI_SPECS_KEY, APP_SETTINGS_KEY -from . import projects_handlers, projects_nodes_handlers, projects_tags_handlers +from . import ( + projects_handlers, + projects_handlers_crud, + projects_nodes_handlers, + projects_tags_handlers, +) from .project_models import setup_projects_model_schema from .projects_access import setup_projects_access from .projects_db import setup_projects_db @@ -75,6 +80,7 @@ def setup_projects(app: web.Application) -> bool: _create_routes( "project", specs, + projects_handlers_crud, projects_handlers, projects_nodes_handlers, projects_tags_handlers, diff --git a/services/web/server/src/simcore_service_webserver/projects/project_lock.py b/services/web/server/src/simcore_service_webserver/projects/project_lock.py index 9ec7d86eb21..5885588de4a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/project_lock.py +++ b/services/web/server/src/simcore_service_webserver/projects/project_lock.py @@ -10,11 +10,11 @@ from ..redis import get_redis_lock_manager_client from ..users_api import UserNameDict +from .projects_exceptions import ProjectLockError PROJECT_REDIS_LOCK_KEY: str = "project_lock:{}" ProjectLock = Lock -ProjectLockError = redis.exceptions.LockError @asynccontextmanager diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index 49180993cbe..3f395cc83e6 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -18,6 +18,7 @@ from uuid import UUID, uuid4 from aiohttp import web +from models_library.projects import ProjectID from models_library.projects_state import ( Owner, ProjectLocked, @@ -26,6 +27,7 @@ ProjectStatus, RunningState, ) +from models_library.users import UserID from pydantic.types import PositiveInt from servicelib.aiohttp.application_keys import APP_JSONSCHEMA_SPECS_KEY from servicelib.aiohttp.jsonschema_validation import validate_instance @@ -48,13 +50,10 @@ ) from ..users_api import UserRole, get_user_name, get_user_role from ..users_exceptions import UserNotFoundError -from .project_lock import ( - ProjectLockError, - UserNameDict, - get_project_locked_state, - lock_project, -) +from . import _delete +from .project_lock import UserNameDict, get_project_locked_state, lock_project from .projects_db import APP_PROJECT_DBAPI, ProjectDBAPI +from .projects_exceptions import ProjectLockError from .projects_utils import extract_dns_without_default_port log = logging.getLogger(__name__) @@ -185,16 +184,41 @@ async def start_project_interactive_services( log.error("Error while starting dynamic service %s", f"{entry=}") -async def delete_project(app: web.Application, project_uuid: str, user_id: int) -> None: - await _delete_project_from_db(app, project_uuid, user_id) +async def submit_delete_project_task( + app: web.Application, project_uuid: ProjectID, user_id: UserID +) -> asyncio.Task: + """ + Marks a project as deleted and schedules a task to performe the entire removal workflow + using user_id's permissions. + + If this task is already scheduled, it returns it otherwise it creates a new one. + + The returned task can be ignored to implement a fire&forget or + followed up with add_done_callback. + + raises ProjectDeleteError + raises ProjectInvalidRightsError + raises ProjectNotFoundError + """ + await _delete.mark_project_as_deleted(app, project_uuid, user_id) - async def _remove_services_and_data(): - await remove_project_interactive_services( - user_id, project_uuid, app, notify_users=False + # Ensures ONE delete task per (project,user) pair + task = get_delete_project_task(project_uuid, user_id) + if not task: + task = _delete.schedule_task( + app, project_uuid, user_id, remove_project_dynamic_services, log ) - await storage_api.delete_data_folders_of_project(app, project_uuid, user_id) + return task - fire_and_forget_task(_remove_services_and_data()) + +def get_delete_project_task( + project_uuid: ProjectID, user_id: UserID +) -> Optional[asyncio.Task]: + if tasks := _delete.get_scheduled_tasks(project_uuid, user_id): + assert len(tasks) == 1, f"{tasks=}" # nosec + task = tasks[0] + return task + return None @observe(event="SIGNAL_USER_DISCONNECTED") @@ -273,7 +297,7 @@ async def lock_with_notification( await retrieve_and_notify_project_locked_state(user_id, project_uuid, app) -async def remove_project_interactive_services( +async def remove_project_dynamic_services( user_id: int, project_uuid: str, app: web.Application, @@ -329,15 +353,6 @@ async def remove_project_interactive_services( pass -async def _delete_project_from_db( - app: web.Application, project_uuid: str, user_id: int -) -> None: - log.debug("deleting project '%s' for user '%s' in database", project_uuid, user_id) - db = app[APP_PROJECT_DBAPI] - await director_v2_api.delete_pipeline(app, user_id, UUID(project_uuid)) - await db.delete_user_project(user_id, project_uuid) - - ## PROJECT NODES ----------------------------------------------------- @@ -737,7 +752,7 @@ async def try_close_project_for_user( if not user_to_session_ids: # NOTE: depending on the garbage collector speed, it might already be removing it fire_and_forget_task( - remove_project_interactive_services(user_id, project_uuid, app) + remove_project_dynamic_services(user_id, project_uuid, app) ) else: log.warning( diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_db.py b/services/web/server/src/simcore_service_webserver/projects/projects_db.py index 6c0a5053431..323b763bf3e 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_db.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_db.py @@ -12,7 +12,7 @@ from copy import deepcopy from datetime import datetime from enum import Enum -from typing import Any, Dict, List, Mapping, Optional, Set, Tuple +from typing import Any, Dict, List, Mapping, Optional, Set, Tuple, Union import psycopg2.errors import sqlalchemy as sa @@ -21,24 +21,19 @@ from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy from change_case import ChangeCase -from models_library.projects import ProjectAtDB, ProjectIDStr +from models_library.projects import ProjectAtDB, ProjectID, ProjectIDStr from pydantic import ValidationError from pydantic.types import PositiveInt from servicelib.aiohttp.application_keys import APP_DB_ENGINE_KEY from servicelib.json_serialization import json_dumps from simcore_postgres_database.webserver_models import ProjectType, projects - -# TODO: test all function return schema-compatible data -# TODO: is user_id str or int? -# TODO: systemaic user_id, project -# TODO: rename add_projects by create_projects -# FIXME: not clear when data is schema-compliant and db-compliant from sqlalchemy import desc, func, literal_column from sqlalchemy.sql import and_, select from ..db_models import GroupType, groups, study_tags, user_to_groups, users from ..users_exceptions import UserNotFoundError from ..utils import format_datetime, now_str +from .project_models import ProjectDict, ProjectProxy from .projects_exceptions import ( NodeNotFoundError, ProjectInvalidRightsError, @@ -61,9 +56,9 @@ class ProjectAccessRights(Enum): def _check_project_permissions( - project: Dict[str, Any], + project: Union[ProjectProxy, ProjectDict], user_id: int, - user_groups: List[Dict[str, Any]], + user_groups: List[RowProxy], permission: str, ) -> None: if not permission: @@ -762,6 +757,21 @@ async def delete_user_project(self, user_id: int, project_uuid: str): projects.delete().where(projects.c.uuid == project_uuid) ) + async def check_delete_project_permission(self, user_id: int, project_uuid: str): + """ + raises ProjectInvalidRightsError + """ + async with self.engine.acquire() as conn: + async with conn.begin() as _transaction: + project = await self._get_project( + conn, user_id, project_uuid, include_templates=True, for_update=True + ) + # if we have delete access we delete the project + user_groups: List[RowProxy] = await self.__load_user_groups( + conn, user_id + ) + _check_project_permissions(project, user_id, user_groups, "delete") + async def make_unique_project_uuid(self) -> str: """Generates a project identifier still not used in database @@ -864,6 +874,15 @@ async def update_project_without_checking_permissions( ) return result.rowcount == 1 + async def set_hidden_flag(self, project_uuid: ProjectID, enabled: bool): + async with self.engine.acquire() as conn: + stmt = ( + projects.update() + .values(hidden=enabled) + .where(projects.c.uuid == f"{project_uuid}") + ) + await conn.execute(stmt) + def setup_projects_db(app: web.Application): # NOTE: inits once per app diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py b/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py index 77128f05c6c..c45550a6b6c 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py @@ -1,4 +1,5 @@ """Defines the different exceptions that may arise in the projects subpackage""" +import redis.exceptions class ProjectsException(Exception): @@ -35,6 +36,12 @@ def __init__(self, project_uuid): self.project_uuid = project_uuid +class ProjectDeleteError(ProjectsException): + def __init__(self, project_uuid, reason): + super().__init__(f"Failed to complete deletion of {project_uuid=}: {reason}") + self.project_uuid = project_uuid + + class NodeNotFoundError(ProjectsException): """Node was not found in project""" @@ -42,3 +49,6 @@ def __init__(self, project_uuid: str, node_uuid: str): super().__init__(f"Node {node_uuid} not found in project {project_uuid}") self.node_uuid = node_uuid self.project_uuid = project_uuid + + +ProjectLockError = redis.exceptions.LockError diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py index cf262547e2c..988216a5f66 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py @@ -1,318 +1,32 @@ -""" Handlers for CRUD operations on /projects/ +""" Handlers for on /projects colletions +Imports in standard methods (SEE projects_handlers_crud) and extends with + - custom methods (https://google.aip.dev/121) + - singleton resources (https://google.aip.dev/156) + - ... """ - -import asyncio import json import logging -from typing import Any, Coroutine, Dict, List, Optional, Set -from uuid import UUID from aiohttp import web -from jsonschema import ValidationError -from models_library.projects import ProjectID -from models_library.projects_state import ProjectState, ProjectStatus -from models_library.rest_pagination import Page -from models_library.rest_pagination_utils import paginate_data +from models_library.projects_state import ProjectState from servicelib.aiohttp.web_exceptions_extension import HTTPLocked from servicelib.json_serialization import json_dumps -from servicelib.utils import logged_gather -from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB -from .. import catalog, director_v2_api -from .._constants import RQ_PRODUCT_KEY from .._meta import api_version_prefix as VTAG from ..director_v2_core import DirectorServiceError from ..login.decorators import RQT_USERID_KEY, login_required from ..resource_manager.websocket_manager import PROJECT_ID_KEY, managed_resource -from ..rest_constants import RESPONSE_MODEL_POLICY -from ..security_api import check_permission from ..security_decorators import permission_required -from ..storage_api import copy_data_folders_from_project -from ..users_api import get_user_name from . import projects_api -from .project_models import ProjectDict, ProjectTypeAPI -from .projects_db import APP_PROJECT_DBAPI, ProjectDBAPI -from .projects_exceptions import ProjectInvalidRightsError, ProjectNotFoundError -from .projects_utils import ( - any_node_inputs_changed, - clone_project_document, - get_project_unavailable_services, - project_uses_available_services, -) - -# When the user requests a project with a repo, the working copy might differ from -# the repo project. A middleware in the meta module (if active) will resolve -# the working copy and redirect to the appropriate project entrypoint. Nonetheless, the -# response needs to refer to the uuid of the request and this is passed through this request key -RQ_REQUESTED_REPO_PROJECT_UUID_KEY = f"{__name__}.RQT_REQUESTED_REPO_PROJECT_UUID_KEY" - -OVERRIDABLE_DOCUMENT_KEYS = [ - "name", - "description", - "thumbnail", - "prjOwner", - "accessRights", -] -# TODO: validate these against api/specs/webserver/v0/components/schemas/project-v0.0.1.json - +from .projects_exceptions import ProjectNotFoundError +from .projects_handlers_crud import routes log = logging.getLogger(__name__) -routes = web.RouteTableDef() - - -@routes.post(f"/{VTAG}/projects") -@login_required -@permission_required("project.create") -@permission_required("services.pipeline.*") # due to update_pipeline_db -async def create_projects( - request: web.Request, -): # pylint: disable=too-many-branches, too-many-statements - user_id: int = request[RQT_USERID_KEY] - db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] - template_uuid = request.query.get("from_template") - as_template = request.query.get("as_template") - copy_data: bool = bool( - request.query.get("copy_data", "true") in [1, "true", "True"] - ) - hidden: bool = bool(request.query.get("hidden", False)) - - new_project = {} - new_project_was_hidden_before_data_was_copied = hidden - try: - clone_data_coro: Optional[Coroutine] = None - source_project: Optional[Dict[str, Any]] = None - if as_template: # create template from - await check_permission(request, "project.template.create") - source_project = await projects_api.get_project_for_user( - request.app, - project_uuid=as_template, - user_id=user_id, - include_templates=False, - ) - elif template_uuid: # create from template - source_project = await db.get_template_project(template_uuid) - if not source_project: - raise web.HTTPNotFound( - reason="Invalid template uuid {}".format(template_uuid) - ) - if source_project: - # clone template as user project - new_project, nodes_map = clone_project_document( - source_project, - forced_copy_project_id=None, - clean_output_data=(copy_data == False), - ) - if template_uuid: - # remove template access rights - new_project["accessRights"] = {} - # the project is to be hidden until the data is copied - hidden = copy_data - clone_data_coro = ( - copy_data_folders_from_project( - request.app, source_project, new_project, nodes_map, user_id - ) - if copy_data - else None - ) - # FIXME: parameterized inputs should get defaults provided by service - - # overrides with body - if request.can_read_body: - predefined = await request.json() - if new_project: - for key in OVERRIDABLE_DOCUMENT_KEYS: - non_null_value = predefined.get(key) - if non_null_value: - new_project[key] = non_null_value - else: - # TODO: take skeleton and fill instead - new_project = predefined - - # re-validate data - await projects_api.validate_project(request.app, new_project) - - # update metadata (uuid, timestamps, ownership) and save - new_project = await db.add_project( - new_project, - user_id, - force_as_template=as_template is not None, - hidden=hidden, - ) - - # copies the project's DATA IF cloned - if clone_data_coro: - assert source_project # nosec - if as_template: - # we need to lock the original study while copying the data - async with projects_api.lock_with_notification( - request.app, - source_project["uuid"], - ProjectStatus.CLONING, - user_id, - await get_user_name(request.app, user_id), - ): - - await clone_data_coro - else: - await clone_data_coro - # unhide the project if needed since it is now complete - if not new_project_was_hidden_before_data_was_copied: - await db.update_project_without_checking_permissions( - new_project, new_project["uuid"], hidden=False - ) - - await director_v2_api.projects_networks_update( - request.app, UUID(new_project["uuid"]) - ) - - # This is a new project and every new graph needs to be reflected in the pipeline tables - await director_v2_api.create_or_update_pipeline( - request.app, user_id, new_project["uuid"] - ) - - # Appends state - new_project = await projects_api.add_project_states_for_user( - user_id=user_id, - project=new_project, - is_template=as_template is not None, - app=request.app, - ) - - except ValidationError as exc: - raise web.HTTPBadRequest(reason="Invalid project data") from exc - except ProjectNotFoundError as exc: - raise web.HTTPNotFound(reason="Project not found") from exc - except ProjectInvalidRightsError as exc: - raise web.HTTPUnauthorized from exc - except asyncio.CancelledError: - log.warning( - "cancelled creation of project for user '%s', cleaning up", f"{user_id=}" - ) - await projects_api.delete_project(request.app, new_project["uuid"], user_id) - raise - else: - log.debug("project created successfuly") - raise web.HTTPCreated( - text=json.dumps(new_project), content_type="application/json" - ) - - -@routes.get(f"/{VTAG}/projects") -@login_required -@permission_required("project.read") -async def list_projects(request: web.Request): - # TODO: implement all query parameters as - # in https://www.ibm.com/support/knowledgecenter/en/SSCRJU_3.2.0/com.ibm.swg.im.infosphere.streams.rest.api.doc/doc/restapis-queryparms-list.html - from servicelib.aiohttp.rest_utils import extract_and_validate - - user_id, product_name = request[RQT_USERID_KEY], request[RQ_PRODUCT_KEY] - _, query, _ = await extract_and_validate(request) - - project_type = ProjectTypeAPI(query["type"]) - offset = query["offset"] - limit = query["limit"] - show_hidden = query["show_hidden"] - - db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] - - async def set_all_project_states( - projects: List[Dict[str, Any]], project_types: List[bool] - ): - await logged_gather( - *[ - projects_api.add_project_states_for_user( - user_id=user_id, - project=prj, - is_template=prj_type == ProjectTypeDB.TEMPLATE, - app=request.app, - ) - for prj, prj_type in zip(projects, project_types) - ], - reraise=True, - max_concurrency=100, - ) - - user_available_services: List[ - Dict - ] = await catalog.get_services_for_user_in_product( - request.app, user_id, product_name, only_key_versions=True - ) - - projects, project_types, total_number_projects = await db.load_projects( - user_id=user_id, - filter_by_project_type=ProjectTypeAPI.to_project_type_db(project_type), - filter_by_services=user_available_services, - offset=offset, - limit=limit, - include_hidden=show_hidden, - ) - await set_all_project_states(projects, project_types) - page = Page[ProjectDict].parse_obj( - paginate_data( - chunk=projects, - request_url=request.url, - total=total_number_projects, - limit=limit, - offset=offset, - ) - ) - return page.dict(**RESPONSE_MODEL_POLICY) - - -@routes.get(f"/{VTAG}/projects/{{project_uuid}}") -@login_required -@permission_required("project.read") -async def get_project(request: web.Request): - """Returns all projects accessible to a user (not necesarly owned)""" - # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! - user_id, product_name = request[RQT_USERID_KEY], request[RQ_PRODUCT_KEY] - try: - project_uuid = request.match_info["project_id"] - except KeyError as err: - raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err - - user_available_services: List[ - Dict - ] = await catalog.get_services_for_user_in_product( - request.app, user_id, product_name, only_key_versions=True - ) - - try: - project = await projects_api.get_project_for_user( - request.app, - project_uuid=project_uuid, - user_id=user_id, - include_templates=True, - include_state=True, - ) - if not await project_uses_available_services(project, user_available_services): - unavilable_services = get_project_unavailable_services( - project, user_available_services - ) - formatted_services = ", ".join( - f"{service}:{version}" for service, version in unavilable_services - ) - # TODO: lack of permissions should be notified with https://httpstatuses.com/403 web.HTTPForbidden - raise web.HTTPNotFound( - reason=( - f"Project '{project_uuid}' uses unavailable services. Please ask " - f"for permission for the following services {formatted_services}" - ) - ) - - if new_uuid := request.get(RQ_REQUESTED_REPO_PROJECT_UUID_KEY): - project["uuid"] = new_uuid - - return {"data": project} - - except ProjectInvalidRightsError as exc: - raise web.HTTPForbidden( - reason=f"You do not have sufficient rights to read project {project_uuid}" - ) from exc - except ProjectNotFoundError as exc: - raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from exc +# +# Singleton per-user resources https://google.aip.dev/156 +# @routes.get(f"/{VTAG}/projects/active") @@ -348,175 +62,9 @@ async def get_active_project(request: web.Request) -> web.Response: raise web.HTTPNotFound(reason="Project not found") from exc -@routes.put(f"/{VTAG}/projects/{{project_uuid}}") -@login_required -@permission_required("project.update") -@permission_required("services.pipeline.*") # due to update_pipeline_db -async def replace_project(request: web.Request): - """Implements PUT /projects - - In a PUT request, the enclosed entity is considered to be a modified version of - the resource stored on the origin server, and the client is requesting that the - stored version be replaced. - - With PATCH, however, the enclosed entity contains a set of instructions describing how a - resource currently residing on the origin server should be modified to produce a new version. - - Also, another difference is that when you want to update a resource with PUT request, you have to send - the full payload as the request whereas with PATCH, you only send the parameters which you want to update. - - :raises web.HTTPNotFound: cannot find project id in repository - """ - user_id: int = request[RQT_USERID_KEY] - try: - project_uuid = ProjectID(request.match_info["project_id"]) - new_project = await request.json() - - # Prune state field (just in case) - new_project.pop("state", None) - - except AttributeError as err: - # NOTE: if new_project is not a dict, .pop will raise this error - raise web.HTTPBadRequest( - reason="Invalid request payload, expected a project model" - ) from err - except KeyError as err: - raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err - except json.JSONDecodeError as exc: - raise web.HTTPBadRequest(reason="Invalid request body") from exc - - db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] - await check_permission( - request, - "project.update | project.workbench.node.inputs.update", - context={ - "dbapi": db, - "project_id": f"{project_uuid}", - "user_id": user_id, - "new_data": new_project, - }, - ) - - try: - await projects_api.validate_project(request.app, new_project) - - current_project = await projects_api.get_project_for_user( - request.app, - project_uuid=f"{project_uuid}", - user_id=user_id, - include_templates=True, - include_state=True, - ) - - if current_project["accessRights"] != new_project["accessRights"]: - await check_permission(request, "project.access_rights.update") - - if await director_v2_api.is_pipeline_running( - request.app, user_id, project_uuid - ): - - if any_node_inputs_changed(new_project, current_project): - # NOTE: This is a conservative measure that we take - # until nodeports logic is re-designed to tackle with this - # particular state. - # - # This measure avoid having a state with different node *links* in the - # comp-tasks table and the project's workbench column. - # The limitation is that nodeports only "sees" those in the comptask - # and this table does not add the new ones since it remains "blocked" - # for modification from that project while the pipeline runs. Therefore - # any extra link created while the pipeline is running can not - # be managed by nodeports because it basically "cannot see it" - # - # Responds https://httpstatuses.com/409: - # The request could not be completed due to a conflict with the current - # state of the target resource (i.e. pipeline is running). This code is used in - # situations where the user might be able to resolve the conflict - # and resubmit the request (front-end will show a pop-up with message below) - # - raise web.HTTPConflict( - reason=f"Project {project_uuid} cannot be modified while pipeline is still running." - ) - - new_project = await db.replace_user_project( - new_project, user_id, f"{project_uuid}", include_templates=True - ) - await director_v2_api.projects_networks_update(request.app, project_uuid) - await director_v2_api.create_or_update_pipeline( - request.app, user_id, project_uuid - ) - # Appends state - new_project = await projects_api.add_project_states_for_user( - user_id=user_id, - project=new_project, - is_template=False, - app=request.app, - ) - - except ValidationError as exc: - raise web.HTTPBadRequest( - reason=f"Invalid project update: {exc.message}" - ) from exc - - except ProjectInvalidRightsError as exc: - raise web.HTTPForbidden( - reason="You do not have sufficient rights to save the project" - ) from exc - - except ProjectNotFoundError as exc: - raise web.HTTPNotFound from exc - - return {"data": new_project} - - -@routes.delete(f"/{VTAG}/projects/{{project_uuid}}") -@login_required -@permission_required("project.delete") -async def delete_project(request: web.Request): - # first check if the project exists - user_id: int = request[RQT_USERID_KEY] - try: - project_uuid = request.match_info["project_id"] - except KeyError as err: - raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err - - try: - await projects_api.get_project_for_user( - request.app, - project_uuid=project_uuid, - user_id=user_id, - include_templates=True, - ) - project_users: Set[int] = set() - with managed_resource(user_id, None, request.app) as rt: - project_users = { - user_session.user_id - for user_session in await rt.find_users_of_resource( - PROJECT_ID_KEY, project_uuid - ) - } - # that project is still in use - if user_id in project_users: - raise web.HTTPForbidden( - reason="Project is still open in another tab/browser. It cannot be deleted until it is closed." - ) - if project_users: - other_user_names = { - await get_user_name(request.app, x) for x in project_users - } - raise web.HTTPForbidden( - reason=f"Project is open by {other_user_names}. It cannot be deleted until the project is closed." - ) - - await projects_api.delete_project(request.app, project_uuid, user_id) - except ProjectInvalidRightsError as err: - raise web.HTTPForbidden( - reason="You do not have sufficient rights to delete this project" - ) from err - except ProjectNotFoundError as err: - raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from err - - raise web.HTTPNoContent(content_type="application/json") +# +# Custom methods https://google.aip.dev/136 +# @routes.post(f"/{VTAG}/projects/{{project_uuid}}:open") diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py new file mode 100644 index 00000000000..f630e0ee0fd --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py @@ -0,0 +1,498 @@ +""" Handlers for STANDARD methods on /projects colletions + + +Standard methods are +- Get https://google.aip.dev/131 +- List https://google.aip.dev/132 +- Create https://google.aip.dev/133 +- Update https://google.aip.dev/134 +- Delete https://google.aip.dev/135 + +and the acronym CRUD states for Create+Read(Get&List)+Update+Delete + +""" + +import asyncio +import json +import logging +from typing import Any, Coroutine, Dict, List, Optional, Set +from uuid import UUID + +from aiohttp import web +from jsonschema import ValidationError +from models_library.projects import ProjectID +from models_library.projects_state import ProjectStatus +from models_library.rest_pagination import Page +from models_library.rest_pagination_utils import paginate_data +from servicelib.utils import logged_gather +from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB + +from .. import catalog, director_v2_api +from .._constants import RQ_PRODUCT_KEY +from .._meta import api_version_prefix as VTAG +from ..login.decorators import RQT_USERID_KEY, login_required +from ..resource_manager.websocket_manager import PROJECT_ID_KEY, managed_resource +from ..rest_constants import RESPONSE_MODEL_POLICY +from ..security_api import check_permission +from ..security_decorators import permission_required +from ..storage_api import copy_data_folders_from_project +from ..users_api import get_user_name +from . import projects_api +from .project_models import ProjectDict, ProjectTypeAPI +from .projects_db import APP_PROJECT_DBAPI, ProjectDBAPI +from .projects_exceptions import ProjectInvalidRightsError, ProjectNotFoundError +from .projects_utils import ( + any_node_inputs_changed, + clone_project_document, + get_project_unavailable_services, + project_uses_available_services, +) + +# When the user requests a project with a repo, the working copy might differ from +# the repo project. A middleware in the meta module (if active) will resolve +# the working copy and redirect to the appropriate project entrypoint. Nonetheless, the +# response needs to refer to the uuid of the request and this is passed through this request key +RQ_REQUESTED_REPO_PROJECT_UUID_KEY = f"{__name__}.RQT_REQUESTED_REPO_PROJECT_UUID_KEY" + +OVERRIDABLE_DOCUMENT_KEYS = [ + "name", + "description", + "thumbnail", + "prjOwner", + "accessRights", +] +# TODO: validate these against api/specs/webserver/v0/components/schemas/project-v0.0.1.json + + +log = logging.getLogger(__name__) + +routes = web.RouteTableDef() + + +@routes.post(f"/{VTAG}/projects") +@login_required +@permission_required("project.create") +@permission_required("services.pipeline.*") # due to update_pipeline_db +async def create_projects( + request: web.Request, +): # pylint: disable=too-many-branches, too-many-statements + user_id: int = request[RQT_USERID_KEY] + db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] + template_uuid = request.query.get("from_template") + as_template = request.query.get("as_template") + copy_data: bool = bool( + request.query.get("copy_data", "true") in [1, "true", "True"] + ) + hidden: bool = bool(request.query.get("hidden", False)) + + new_project = {} + new_project_was_hidden_before_data_was_copied = hidden + try: + clone_data_coro: Optional[Coroutine] = None + source_project: Optional[Dict[str, Any]] = None + if as_template: # create template from + await check_permission(request, "project.template.create") + source_project = await projects_api.get_project_for_user( + request.app, + project_uuid=as_template, + user_id=user_id, + include_templates=False, + ) + elif template_uuid: # create from template + source_project = await db.get_template_project(template_uuid) + if not source_project: + raise web.HTTPNotFound( + reason="Invalid template uuid {}".format(template_uuid) + ) + if source_project: + # clone template as user project + new_project, nodes_map = clone_project_document( + source_project, + forced_copy_project_id=None, + clean_output_data=(copy_data == False), + ) + if template_uuid: + # remove template access rights + new_project["accessRights"] = {} + # the project is to be hidden until the data is copied + hidden = copy_data + clone_data_coro = ( + copy_data_folders_from_project( + request.app, source_project, new_project, nodes_map, user_id + ) + if copy_data + else None + ) + # FIXME: parameterized inputs should get defaults provided by service + + # overrides with body + if request.can_read_body: + predefined = await request.json() + if new_project: + for key in OVERRIDABLE_DOCUMENT_KEYS: + non_null_value = predefined.get(key) + if non_null_value: + new_project[key] = non_null_value + else: + # TODO: take skeleton and fill instead + new_project = predefined + + # re-validate data + await projects_api.validate_project(request.app, new_project) + + # update metadata (uuid, timestamps, ownership) and save + new_project = await db.add_project( + new_project, + user_id, + force_as_template=as_template is not None, + hidden=hidden, + ) + + # copies the project's DATA IF cloned + if clone_data_coro: + assert source_project # nosec + if as_template: + # we need to lock the original study while copying the data + async with projects_api.lock_with_notification( + request.app, + source_project["uuid"], + ProjectStatus.CLONING, + user_id, + await get_user_name(request.app, user_id), + ): + + await clone_data_coro + else: + await clone_data_coro + # unhide the project if needed since it is now complete + if not new_project_was_hidden_before_data_was_copied: + await db.update_project_without_checking_permissions( + new_project, new_project["uuid"], hidden=False + ) + + await director_v2_api.projects_networks_update( + request.app, UUID(new_project["uuid"]) + ) + + # This is a new project and every new graph needs to be reflected in the pipeline tables + await director_v2_api.create_or_update_pipeline( + request.app, user_id, new_project["uuid"] + ) + + # Appends state + new_project = await projects_api.add_project_states_for_user( + user_id=user_id, + project=new_project, + is_template=as_template is not None, + app=request.app, + ) + + except ValidationError as exc: + raise web.HTTPBadRequest(reason="Invalid project data") from exc + except ProjectNotFoundError as exc: + raise web.HTTPNotFound(reason="Project not found") from exc + except ProjectInvalidRightsError as exc: + raise web.HTTPUnauthorized from exc + except asyncio.CancelledError: + log.warning( + "cancelled creation of project for user '%s', cleaning up", f"{user_id=}" + ) + await projects_api.submit_delete_project_task( + request.app, new_project["uuid"], user_id + ) + raise + else: + log.debug("project created successfuly") + raise web.HTTPCreated( + text=json.dumps(new_project), content_type="application/json" + ) + + +@routes.get(f"/{VTAG}/projects") +@login_required +@permission_required("project.read") +async def list_projects(request: web.Request): + # TODO: implement all query parameters as + # in https://www.ibm.com/support/knowledgecenter/en/SSCRJU_3.2.0/com.ibm.swg.im.infosphere.streams.rest.api.doc/doc/restapis-queryparms-list.html + from servicelib.aiohttp.rest_utils import extract_and_validate + + user_id, product_name = request[RQT_USERID_KEY], request[RQ_PRODUCT_KEY] + _, query, _ = await extract_and_validate(request) + + project_type = ProjectTypeAPI(query["type"]) + offset = query["offset"] + limit = query["limit"] + show_hidden = query["show_hidden"] + + db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] + + async def set_all_project_states( + projects: List[Dict[str, Any]], project_types: List[bool] + ): + await logged_gather( + *[ + projects_api.add_project_states_for_user( + user_id=user_id, + project=prj, + is_template=prj_type == ProjectTypeDB.TEMPLATE, + app=request.app, + ) + for prj, prj_type in zip(projects, project_types) + ], + reraise=True, + max_concurrency=100, + ) + + user_available_services: List[ + Dict + ] = await catalog.get_services_for_user_in_product( + request.app, user_id, product_name, only_key_versions=True + ) + + projects, project_types, total_number_projects = await db.load_projects( + user_id=user_id, + filter_by_project_type=ProjectTypeAPI.to_project_type_db(project_type), + filter_by_services=user_available_services, + offset=offset, + limit=limit, + include_hidden=show_hidden, + ) + await set_all_project_states(projects, project_types) + page = Page[ProjectDict].parse_obj( + paginate_data( + chunk=projects, + request_url=request.url, + total=total_number_projects, + limit=limit, + offset=offset, + ) + ) + return page.dict(**RESPONSE_MODEL_POLICY) + + +@routes.get(f"/{VTAG}/projects/{{project_uuid}}") +@login_required +@permission_required("project.read") +async def get_project(request: web.Request): + """Returns all projects accessible to a user (not necesarly owned)""" + # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! + user_id, product_name = request[RQT_USERID_KEY], request[RQ_PRODUCT_KEY] + try: + project_uuid = request.match_info["project_id"] + except KeyError as err: + raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err + + user_available_services: List[ + Dict + ] = await catalog.get_services_for_user_in_product( + request.app, user_id, product_name, only_key_versions=True + ) + + try: + project = await projects_api.get_project_for_user( + request.app, + project_uuid=project_uuid, + user_id=user_id, + include_templates=True, + include_state=True, + ) + if not await project_uses_available_services(project, user_available_services): + unavilable_services = get_project_unavailable_services( + project, user_available_services + ) + formatted_services = ", ".join( + f"{service}:{version}" for service, version in unavilable_services + ) + # TODO: lack of permissions should be notified with https://httpstatuses.com/403 web.HTTPForbidden + raise web.HTTPNotFound( + reason=( + f"Project '{project_uuid}' uses unavailable services. Please ask " + f"for permission for the following services {formatted_services}" + ) + ) + + if new_uuid := request.get(RQ_REQUESTED_REPO_PROJECT_UUID_KEY): + project["uuid"] = new_uuid + + return {"data": project} + + except ProjectInvalidRightsError as exc: + raise web.HTTPForbidden( + reason=f"You do not have sufficient rights to read project {project_uuid}" + ) from exc + except ProjectNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from exc + + +@routes.put(f"/{VTAG}/projects/{{project_uuid}}") +@login_required +@permission_required("project.update") +@permission_required("services.pipeline.*") # due to update_pipeline_db +async def replace_project(request: web.Request): + """Implements PUT /projects + + In a PUT request, the enclosed entity is considered to be a modified version of + the resource stored on the origin server, and the client is requesting that the + stored version be replaced. + + With PATCH, however, the enclosed entity contains a set of instructions describing how a + resource currently residing on the origin server should be modified to produce a new version. + + Also, another difference is that when you want to update a resource with PUT request, you have to send + the full payload as the request whereas with PATCH, you only send the parameters which you want to update. + + :raises web.HTTPNotFound: cannot find project id in repository + """ + user_id: int = request[RQT_USERID_KEY] + try: + project_uuid = ProjectID(request.match_info["project_id"]) + new_project = await request.json() + + # Prune state field (just in case) + new_project.pop("state", None) + + except AttributeError as err: + # NOTE: if new_project is not a dict, .pop will raise this error + raise web.HTTPBadRequest( + reason="Invalid request payload, expected a project model" + ) from err + except KeyError as err: + raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err + except json.JSONDecodeError as exc: + raise web.HTTPBadRequest(reason="Invalid request body") from exc + + db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] + await check_permission( + request, + "project.update | project.workbench.node.inputs.update", + context={ + "dbapi": db, + "project_id": f"{project_uuid}", + "user_id": user_id, + "new_data": new_project, + }, + ) + + try: + await projects_api.validate_project(request.app, new_project) + + current_project = await projects_api.get_project_for_user( + request.app, + project_uuid=f"{project_uuid}", + user_id=user_id, + include_templates=True, + include_state=True, + ) + + if current_project["accessRights"] != new_project["accessRights"]: + await check_permission(request, "project.access_rights.update") + + if await director_v2_api.is_pipeline_running( + request.app, user_id, project_uuid + ): + + if any_node_inputs_changed(new_project, current_project): + # NOTE: This is a conservative measure that we take + # until nodeports logic is re-designed to tackle with this + # particular state. + # + # This measure avoid having a state with different node *links* in the + # comp-tasks table and the project's workbench column. + # The limitation is that nodeports only "sees" those in the comptask + # and this table does not add the new ones since it remains "blocked" + # for modification from that project while the pipeline runs. Therefore + # any extra link created while the pipeline is running can not + # be managed by nodeports because it basically "cannot see it" + # + # Responds https://httpstatuses.com/409: + # The request could not be completed due to a conflict with the current + # state of the target resource (i.e. pipeline is running). This code is used in + # situations where the user might be able to resolve the conflict + # and resubmit the request (front-end will show a pop-up with message below) + # + raise web.HTTPConflict( + reason=f"Project {project_uuid} cannot be modified while pipeline is still running." + ) + + new_project = await db.replace_user_project( + new_project, user_id, f"{project_uuid}", include_templates=True + ) + await director_v2_api.projects_networks_update(request.app, project_uuid) + await director_v2_api.create_or_update_pipeline( + request.app, user_id, project_uuid + ) + # Appends state + new_project = await projects_api.add_project_states_for_user( + user_id=user_id, + project=new_project, + is_template=False, + app=request.app, + ) + + except ValidationError as exc: + raise web.HTTPBadRequest( + reason=f"Invalid project update: {exc.message}" + ) from exc + + except ProjectInvalidRightsError as exc: + raise web.HTTPForbidden( + reason="You do not have sufficient rights to save the project" + ) from exc + + except ProjectNotFoundError as exc: + raise web.HTTPNotFound from exc + + return {"data": new_project} + + +@routes.delete(f"/{VTAG}/projects/{{project_uuid}}") +@login_required +@permission_required("project.delete") +async def delete_project(request: web.Request): + # first check if the project exists + user_id: int = request[RQT_USERID_KEY] + try: + project_uuid = request.match_info["project_id"] + except KeyError as err: + raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err + + try: + await projects_api.get_project_for_user( + request.app, + project_uuid=project_uuid, + user_id=user_id, + include_templates=True, + ) + project_users: Set[int] = set() + with managed_resource(user_id, None, request.app) as rt: + project_users = { + user_session.user_id + for user_session in await rt.find_users_of_resource( + PROJECT_ID_KEY, project_uuid + ) + } + # that project is still in use + if user_id in project_users: + raise web.HTTPForbidden( + reason="Project is still open in another tab/browser. It cannot be deleted until it is closed." + ) + if project_users: + other_user_names = { + await get_user_name(request.app, uid) for uid in project_users + } + raise web.HTTPForbidden( + reason=f"Project is open by {other_user_names}. It cannot be deleted until the project is closed." + ) + + await projects_api.submit_delete_project_task( + request.app, ProjectID(project_uuid), user_id + ) + + except ProjectInvalidRightsError as err: + raise web.HTTPForbidden( + reason="You do not have sufficient rights to delete this project" + ) from err + except ProjectNotFoundError as err: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from err + + raise web.HTTPNoContent(content_type="application/json") diff --git a/services/web/server/tests/integration/01/test_garbage_collection.py b/services/web/server/tests/integration/01/test_garbage_collection.py index e910aae61a5..81632acafee 100644 --- a/services/web/server/tests/integration/01/test_garbage_collection.py +++ b/services/web/server/tests/integration/01/test_garbage_collection.py @@ -8,7 +8,7 @@ from copy import deepcopy from pathlib import Path from typing import Any, AsyncIterable, Callable, Dict, List, Optional -from uuid import uuid4 +from uuid import UUID, uuid4 import aiopg import aiopg.sa @@ -34,6 +34,7 @@ list_user_groups, ) from simcore_service_webserver.login.plugin import setup_login +from simcore_service_webserver.projects._delete import get_scheduled_tasks from simcore_service_webserver.projects.plugin import setup_projects from simcore_service_webserver.projects.project_models import ProjectDict from simcore_service_webserver.resource_manager.plugin import setup_resource_manager @@ -65,7 +66,7 @@ @pytest.fixture(autouse=True) -def __drop_and_recreate_postgres__(database_from_template_before_each_function) -> None: +def __drop_and_recreate_postgres__(database_from_template_before_each_function): yield @@ -166,7 +167,16 @@ def client( ) -################ utils +@pytest.fixture +def disable_garbage_collector_task(mocker): + """patch the setup of the garbage collector so we can call it manually""" + mocker.patch( + "simcore_service_webserver.garbage_collector.setup_garbage_collector", + return_value="", + ) + + +# UTILS -------------- async def login_user(client: TestClient): @@ -190,6 +200,7 @@ async def new_project( if access_rights is not None: project_data["accessRights"] = access_rights + assert client.app return await create_project( client.app, project_data, @@ -205,6 +216,7 @@ async def get_template_project( access_rights=None, ): """returns a tempalte shared with all""" + assert client.app _, _, all_group = await list_user_groups(client.app, user["id"]) # the information comes from a file, randomize it @@ -280,33 +292,25 @@ async def disconnect_user_from_socketio(client, sio_connection_data): assert not await socket_registry.find_resources(resource_key, "socket_id") -async def assert_users_count( - aiopg_engine: aiopg.sa.Engine, expected_users: int -) -> bool: +async def assert_users_count(aiopg_engine: aiopg.sa.Engine, expected_users: int): async with aiopg_engine.acquire() as conn: users_count = await conn.scalar(select(func.count()).select_from(users)) assert users_count == expected_users - return True -async def assert_projects_count( - aiopg_engine: aiopg.sa.Engine, expected_projects: int -) -> bool: +async def assert_projects_count(aiopg_engine: aiopg.sa.Engine, expected_projects: int): async with aiopg_engine.acquire() as conn: projects_count = await conn.scalar(select(func.count()).select_from(projects)) assert projects_count == expected_projects - return True -def assert_dicts_match_by_common_keys(first_dict, second_dict) -> bool: +def assert_dicts_match_by_common_keys(first_dict, second_dict): common_keys = set(first_dict.keys()) & set(second_dict.keys()) for key in common_keys: assert first_dict[key] == second_dict[key], key - return True - -async def query_user_from_db(aiopg_engine: aiopg.sa.Engine, user: Dict): +async def fetch_user_from_db(aiopg_engine: aiopg.sa.Engine, user: UserInfoDict): """returns a user from the db""" async with aiopg_engine.acquire() as conn: user_result = await conn.execute( @@ -315,7 +319,7 @@ async def query_user_from_db(aiopg_engine: aiopg.sa.Engine, user: Dict): return await user_result.first() -async def query_project_from_db(aiopg_engine: aiopg.sa.Engine, user_project: Dict): +async def fetch_project_from_db(aiopg_engine: aiopg.sa.Engine, user_project: Dict): async with aiopg_engine.acquire() as conn: project_result = await conn.execute( projects.select().where(projects.c.uuid == user_project["uuid"]) @@ -323,79 +327,62 @@ async def query_project_from_db(aiopg_engine: aiopg.sa.Engine, user_project: Dic return await project_result.first() -async def assert_user_in_database( - aiopg_engine: aiopg.sa.Engine, logged_user: Dict -) -> bool: - user = await query_user_from_db(aiopg_engine, logged_user) +async def assert_user_in_db(aiopg_engine: aiopg.sa.Engine, logged_user: UserInfoDict): + user = await fetch_user_from_db(aiopg_engine, logged_user) + assert user user_as_dict = dict(user) # some values need to be transformed user_as_dict["role"] = user_as_dict["role"].value user_as_dict["status"] = user_as_dict["status"].value - assert assert_dicts_match_by_common_keys(user_as_dict, logged_user) is True - - return True + assert_dicts_match_by_common_keys(user_as_dict, logged_user) -async def assert_user_not_in_database( - aiopg_engine: aiopg.sa.Engine, user: Dict -) -> bool: - user = await query_user_from_db(aiopg_engine, user) - assert user is None +async def assert_user_not_in_db(aiopg_engine: aiopg.sa.Engine, user: UserInfoDict): + user_db = await fetch_user_from_db(aiopg_engine, user) + assert user_db is None - return True - -async def assert_project_in_database( - aiopg_engine: aiopg.sa.Engine, user_project: Dict -) -> bool: - project = await query_project_from_db(aiopg_engine, user_project) +async def assert_project_in_db(aiopg_engine: aiopg.sa.Engine, user_project: Dict): + project = await fetch_project_from_db(aiopg_engine, user_project) + assert project project_as_dict = dict(project) - assert assert_dicts_match_by_common_keys(project_as_dict, user_project) is True - - return True + assert_dicts_match_by_common_keys(project_as_dict, user_project) async def assert_user_is_owner_of_project( - aiopg_engine: aiopg.sa.Engine, owner_user: Dict, owner_project: Dict -) -> bool: - user = await query_user_from_db(aiopg_engine, owner_user) - project = await query_project_from_db(aiopg_engine, owner_project) + aiopg_engine: aiopg.sa.Engine, owner_user: UserInfoDict, owner_project: Dict +): + user = await fetch_user_from_db(aiopg_engine, owner_user) + assert user - assert user.id == project.prj_owner + project = await fetch_project_from_db(aiopg_engine, owner_project) + assert project - return True + assert user.id == project.prj_owner async def assert_one_owner_for_project( - aiopg_engine: aiopg.sa.Engine, project: Dict, possible_owners: List[Dict] -) -> bool: + aiopg_engine: aiopg.sa.Engine, project: Dict, possible_owners: List[UserInfoDict] +): q_owners = [ - await query_user_from_db(aiopg_engine, owner) for owner in possible_owners + await fetch_user_from_db(aiopg_engine, owner) for owner in possible_owners ] - q_project = await query_project_from_db(aiopg_engine, project) + assert all(q_owners) - assert q_project.prj_owner in set([x.id for x in q_owners]) + q_project = await fetch_project_from_db(aiopg_engine, project) + assert q_project - return True + assert q_project.prj_owner in set(user.id for user in q_owners if user) -################ end utils - - -@pytest.fixture -def mock_garbage_collector_task(mocker): - """patch the setup of the garbage collector so we can call it manually""" - mocker.patch( - "simcore_service_webserver.garbage_collector.setup_garbage_collector", - return_value="", - ) +# TESTS --------------- async def test_t1_while_guest_is_connected_no_resources_are_removed( - mock_garbage_collector_task, + disable_garbage_collector_task: None, client, socketio_client_factory: Callable, aiopg_engine, @@ -407,21 +394,19 @@ async def test_t1_while_guest_is_connected_no_resources_are_removed( empty_guest_user_project = await new_project( client, logged_guest_user, tests_data_dir ) - assert await assert_users_count(aiopg_engine, 1) is True - assert await assert_projects_count(aiopg_engine, 1) is True + await assert_users_count(aiopg_engine, 1) + await assert_projects_count(aiopg_engine, 1) await connect_to_socketio(client, logged_guest_user, socketio_client_factory) await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector_core.collect_garbage(app=client.app) - assert await assert_user_in_database(aiopg_engine, logged_guest_user) is True - assert ( - await assert_project_in_database(aiopg_engine, empty_guest_user_project) is True - ) + await assert_user_in_db(aiopg_engine, logged_guest_user) + await assert_project_in_db(aiopg_engine, empty_guest_user_project) async def test_t2_cleanup_resources_after_browser_is_closed( - mock_garbage_collector_task, + disable_garbage_collector_task: None, simcore_services_ready, client, socketio_client_factory: Callable, @@ -434,8 +419,8 @@ async def test_t2_cleanup_resources_after_browser_is_closed( empty_guest_user_project = await new_project( client, logged_guest_user, tests_data_dir ) - assert await assert_users_count(aiopg_engine, 1) is True - assert await assert_projects_count(aiopg_engine, 1) is True + await assert_users_count(aiopg_engine, 1) + await assert_projects_count(aiopg_engine, 1) sio_connection_data = await connect_to_socketio( client, logged_guest_user, socketio_client_factory @@ -444,15 +429,20 @@ async def test_t2_cleanup_resources_after_browser_is_closed( await garbage_collector_core.collect_garbage(app=client.app) # check user and project are still in the DB - assert await assert_user_in_database(aiopg_engine, logged_guest_user) is True - assert ( - await assert_project_in_database(aiopg_engine, empty_guest_user_project) is True - ) + await assert_user_in_db(aiopg_engine, logged_guest_user) + await assert_project_in_db(aiopg_engine, empty_guest_user_project) await disconnect_user_from_socketio(client, sio_connection_data) await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector_core.collect_garbage(app=client.app) + # ensures all project delete tasks are + delete_tasks = get_scheduled_tasks( + project_uuid=UUID(empty_guest_user_project["uuid"]), + user_id=logged_guest_user["id"], + ) + assert not delete_tasks or all(t.done() for t in delete_tasks) + # check user and project are no longer in the DB async with aiopg_engine.acquire() as conn: user_result = await conn.execute(users.select()) @@ -460,8 +450,8 @@ async def test_t2_cleanup_resources_after_browser_is_closed( project_result = await conn.execute(projects.select()) project = await project_result.first() - assert user is None assert project is None + assert user is None async def test_t3_gc_will_not_intervene_for_regular_users_and_their_resources( @@ -487,15 +477,15 @@ async def test_t3_gc_will_not_intervene_for_regular_users_and_their_resources( async def assert_projects_and_users_are_present(): # check user and projects and templates are still in the DB - assert await assert_user_in_database(aiopg_engine, logged_user) is True + await assert_user_in_db(aiopg_engine, logged_user) for project in user_projects: - assert await assert_project_in_database(aiopg_engine, project) is True + await assert_project_in_db(aiopg_engine, project) for template in user_template_projects: - assert await assert_project_in_database(aiopg_engine, template) is True + await assert_project_in_db(aiopg_engine, template) - assert await assert_users_count(aiopg_engine, 1) is True + await assert_users_count(aiopg_engine, 1) expected_count = number_of_projects + number_of_templates - assert await assert_projects_count(aiopg_engine, expected_count) is True + await assert_projects_count(aiopg_engine, expected_count) # connect the user and wait for gc sio_connection_data = await connect_to_socketio( @@ -543,15 +533,15 @@ async def test_t4_project_shared_with_group_transferred_to_user_in_group_on_owne # mark u1 as guest await change_user_role(aiopg_engine, u1, UserRole.GUEST) - assert await assert_users_count(aiopg_engine, 3) is True - assert await assert_projects_count(aiopg_engine, 1) is True - assert await assert_user_is_owner_of_project(aiopg_engine, u1, project) is True + await assert_users_count(aiopg_engine, 3) + await assert_projects_count(aiopg_engine, 1) + await assert_user_is_owner_of_project(aiopg_engine, u1, project) await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) # expected outcome: u1 was deleted, one of the users in g1 is the new owner - assert await assert_user_not_in_database(aiopg_engine, u1) is True - assert await assert_one_owner_for_project(aiopg_engine, project, [u2, u3]) is True + await assert_user_not_in_db(aiopg_engine, u1) + await assert_one_owner_for_project(aiopg_engine, project, [u2, u3]) async def test_t5_project_shared_with_other_users_transferred_to_one_of_them( @@ -566,8 +556,8 @@ async def test_t5_project_shared_with_other_users_transferred_to_one_of_them( u2 = await login_user(client) u3 = await login_user(client) - q_u2 = await query_user_from_db(aiopg_engine, u2) - q_u3 = await query_user_from_db(aiopg_engine, u3) + q_u2 = await fetch_user_from_db(aiopg_engine, u2) + q_u3 = await fetch_user_from_db(aiopg_engine, u3) # u1 creates project and shares it with g1 project = await new_project( @@ -583,15 +573,15 @@ async def test_t5_project_shared_with_other_users_transferred_to_one_of_them( # mark u1 as guest await change_user_role(aiopg_engine, u1, UserRole.GUEST) - assert await assert_users_count(aiopg_engine, 3) is True - assert await assert_projects_count(aiopg_engine, 1) is True - assert await assert_user_is_owner_of_project(aiopg_engine, u1, project) is True + await assert_users_count(aiopg_engine, 3) + await assert_projects_count(aiopg_engine, 1) + await assert_user_is_owner_of_project(aiopg_engine, u1, project) await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) # expected outcome: u1 was deleted, one of the users in g1 is the new owner - assert await assert_user_not_in_database(aiopg_engine, u1) is True - assert await assert_one_owner_for_project(aiopg_engine, project, [u2, u3]) is True + await assert_user_not_in_db(aiopg_engine, u1) + await assert_one_owner_for_project(aiopg_engine, project, [u2, u3]) async def test_t6_project_shared_with_group_transferred_to_last_user_in_group_on_owner_removal( @@ -625,20 +615,20 @@ async def test_t6_project_shared_with_group_transferred_to_last_user_in_group_on # mark u1 as guest await change_user_role(aiopg_engine, u1, UserRole.GUEST) - assert await assert_users_count(aiopg_engine, 3) is True - assert await assert_projects_count(aiopg_engine, 1) is True - assert await assert_user_is_owner_of_project(aiopg_engine, u1, project) is True + await assert_users_count(aiopg_engine, 3) + await assert_projects_count(aiopg_engine, 1) + await assert_user_is_owner_of_project(aiopg_engine, u1, project) await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) # expected outcome: u1 was deleted, one of the users in g1 is the new owner - assert await assert_user_not_in_database(aiopg_engine, u1) is True - assert await assert_one_owner_for_project(aiopg_engine, project, [u2, u3]) is True + await assert_user_not_in_db(aiopg_engine, u1) + await assert_one_owner_for_project(aiopg_engine, project, [u2, u3]) # find new owner and mark hims as GUEST - q_u2 = await query_user_from_db(aiopg_engine, u2) - q_u3 = await query_user_from_db(aiopg_engine, u3) - q_project = await query_project_from_db(aiopg_engine, project) + q_u2 = await fetch_user_from_db(aiopg_engine, u2) + q_u3 = await fetch_user_from_db(aiopg_engine, u3) + q_project = await fetch_project_from_db(aiopg_engine, project) new_owner = None remaining_others = [] @@ -655,15 +645,16 @@ async def test_t6_project_shared_with_group_transferred_to_last_user_in_group_on await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) # expected outcome: the new_owner will be deleted and one of the remainint_others wil be the new owner - assert await assert_user_not_in_database(aiopg_engine, new_owner) is True - assert ( - await assert_one_owner_for_project(aiopg_engine, project, remaining_others) - is True - ) + await assert_user_not_in_db(aiopg_engine, new_owner) + await assert_one_owner_for_project(aiopg_engine, project, remaining_others) async def test_t7_project_shared_with_group_transferred_from_one_member_to_the_last_and_all_is_removed( - simcore_services_ready, client, aiopg_engine, tests_data_dir: Path + disable_garbage_collector_task: None, + simcore_services_ready, + client, + aiopg_engine, + tests_data_dir: Path, ): """ USER "u1" creates a GROUP "g1" and invites USERS "u2" and "u3"; @@ -695,52 +686,54 @@ async def test_t7_project_shared_with_group_transferred_from_one_member_to_the_l # mark u1 as guest await change_user_role(aiopg_engine, u1, UserRole.GUEST) - assert await assert_users_count(aiopg_engine, 3) is True - assert await assert_projects_count(aiopg_engine, 1) is True - assert await assert_user_is_owner_of_project(aiopg_engine, u1, project) is True + await assert_projects_count(aiopg_engine, 1) + await assert_users_count(aiopg_engine, 3) + await assert_user_is_owner_of_project(aiopg_engine, u1, project) - await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) + # await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) + await garbage_collector_core.collect_garbage(app=client.app) # expected outcome: u1 was deleted, one of the users in g1 is the new owner - assert await assert_user_not_in_database(aiopg_engine, u1) is True - assert await assert_one_owner_for_project(aiopg_engine, project, [u2, u3]) is True + await assert_one_owner_for_project(aiopg_engine, project, [u2, u3]) + await assert_user_not_in_db(aiopg_engine, u1) # find new owner and mark hims as GUEST - q_u2 = await query_user_from_db(aiopg_engine, u2) - q_u3 = await query_user_from_db(aiopg_engine, u3) - q_project = await query_project_from_db(aiopg_engine, project) + q_u2 = await fetch_user_from_db(aiopg_engine, u2) + q_u3 = await fetch_user_from_db(aiopg_engine, u3) + q_project = await fetch_project_from_db(aiopg_engine, project) + assert q_project - new_owner = None - remaining_others = [] + new_owner: Optional[UserInfoDict] = None + remaining_users = [] for user in [q_u2, q_u3]: + assert user if user.id == q_project.prj_owner: new_owner = user else: - remaining_others.append(user) + remaining_users.append(user) assert new_owner is not None # expected to a new owner between the 2 other users # mark new owner as guest await change_user_role(aiopg_engine, new_owner, UserRole.GUEST) - await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) + # await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) + await garbage_collector_core.collect_garbage(app=client.app) # expected outcome: the new_owner will be deleted and one of the remainint_others wil be the new owner - assert await assert_user_not_in_database(aiopg_engine, new_owner) is True - assert ( - await assert_one_owner_for_project(aiopg_engine, project, remaining_others) - is True - ) + await assert_one_owner_for_project(aiopg_engine, project, remaining_users) + await assert_user_not_in_db(aiopg_engine, new_owner) # only 1 user is left as the owner mark him as GUEST - for user in remaining_others: + for user in remaining_users: # mark new owner as guest await change_user_role(aiopg_engine, user, UserRole.GUEST) - await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) + # await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) + await garbage_collector_core.collect_garbage(app=client.app) # expected outcome: the last user will be removed and the project will be removed - assert await assert_users_count(aiopg_engine, 0) is True - assert await assert_projects_count(aiopg_engine, 0) is True + await assert_projects_count(aiopg_engine, 0) + await assert_users_count(aiopg_engine, 0) async def test_t8_project_shared_with_other_users_transferred_to_one_of_them_until_one_user_remains( @@ -757,8 +750,8 @@ async def test_t8_project_shared_with_other_users_transferred_to_one_of_them_unt u2 = await login_user(client) u3 = await login_user(client) - q_u2 = await query_user_from_db(aiopg_engine, u2) - q_u3 = await query_user_from_db(aiopg_engine, u3) + q_u2 = await fetch_user_from_db(aiopg_engine, u2) + q_u3 = await fetch_user_from_db(aiopg_engine, u3) # u1 creates project and shares it with g1 project = await new_project( @@ -774,20 +767,20 @@ async def test_t8_project_shared_with_other_users_transferred_to_one_of_them_unt # mark u1 as guest await change_user_role(aiopg_engine, u1, UserRole.GUEST) - assert await assert_users_count(aiopg_engine, 3) is True - assert await assert_projects_count(aiopg_engine, 1) is True - assert await assert_user_is_owner_of_project(aiopg_engine, u1, project) is True + await assert_users_count(aiopg_engine, 3) + await assert_projects_count(aiopg_engine, 1) + await assert_user_is_owner_of_project(aiopg_engine, u1, project) await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) # expected outcome: u1 was deleted, one of the users in g1 is the new owner - assert await assert_user_not_in_database(aiopg_engine, u1) is True - assert await assert_one_owner_for_project(aiopg_engine, project, [u2, u3]) is True + await assert_user_not_in_db(aiopg_engine, u1) + await assert_one_owner_for_project(aiopg_engine, project, [u2, u3]) # find new owner and mark hims as GUEST - q_u2 = await query_user_from_db(aiopg_engine, u2) - q_u3 = await query_user_from_db(aiopg_engine, u3) - q_project = await query_project_from_db(aiopg_engine, project) + q_u2 = await fetch_user_from_db(aiopg_engine, u2) + q_u3 = await fetch_user_from_db(aiopg_engine, u3) + q_project = await fetch_project_from_db(aiopg_engine, project) new_owner = None remaining_others = [] @@ -804,13 +797,10 @@ async def test_t8_project_shared_with_other_users_transferred_to_one_of_them_unt await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) # expected outcome: the new_owner will be deleted and one of the remainint_others wil be the new owner - assert await assert_user_not_in_database(aiopg_engine, new_owner) is True - assert ( - await assert_one_owner_for_project(aiopg_engine, project, remaining_others) - is True - ) - assert await assert_users_count(aiopg_engine, 1) is True - assert await assert_projects_count(aiopg_engine, 1) is True + await assert_user_not_in_db(aiopg_engine, new_owner) + await assert_one_owner_for_project(aiopg_engine, project, remaining_others) + await assert_users_count(aiopg_engine, 1) + await assert_projects_count(aiopg_engine, 1) async def test_t9_project_shared_with_other_users_transferred_between_them_and_then_removed( @@ -829,8 +819,8 @@ async def test_t9_project_shared_with_other_users_transferred_between_them_and_t u2 = await login_user(client) u3 = await login_user(client) - q_u2 = await query_user_from_db(aiopg_engine, u2) - q_u3 = await query_user_from_db(aiopg_engine, u3) + q_u2 = await fetch_user_from_db(aiopg_engine, u2) + q_u3 = await fetch_user_from_db(aiopg_engine, u3) # u1 creates project and shares it with g1 project = await new_project( @@ -846,20 +836,20 @@ async def test_t9_project_shared_with_other_users_transferred_between_them_and_t # mark u1 as guest await change_user_role(aiopg_engine, u1, UserRole.GUEST) - assert await assert_users_count(aiopg_engine, 3) is True - assert await assert_projects_count(aiopg_engine, 1) is True - assert await assert_user_is_owner_of_project(aiopg_engine, u1, project) is True + await assert_users_count(aiopg_engine, 3) + await assert_projects_count(aiopg_engine, 1) + await assert_user_is_owner_of_project(aiopg_engine, u1, project) await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) # expected outcome: u1 was deleted, one of the users in g1 is the new owner - assert await assert_user_not_in_database(aiopg_engine, u1) is True - assert await assert_one_owner_for_project(aiopg_engine, project, [u2, u3]) is True + await assert_user_not_in_db(aiopg_engine, u1) + await assert_one_owner_for_project(aiopg_engine, project, [u2, u3]) # find new owner and mark hims as GUEST - q_u2 = await query_user_from_db(aiopg_engine, u2) - q_u3 = await query_user_from_db(aiopg_engine, u3) - q_project = await query_project_from_db(aiopg_engine, project) + q_u2 = await fetch_user_from_db(aiopg_engine, u2) + q_u3 = await fetch_user_from_db(aiopg_engine, u3) + q_project = await fetch_project_from_db(aiopg_engine, project) new_owner = None remaining_others = [] @@ -876,13 +866,10 @@ async def test_t9_project_shared_with_other_users_transferred_between_them_and_t await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) # expected outcome: the new_owner will be deleted and one of the remainint_others wil be the new owner - assert await assert_user_not_in_database(aiopg_engine, new_owner) is True - assert ( - await assert_one_owner_for_project(aiopg_engine, project, remaining_others) - is True - ) - assert await assert_users_count(aiopg_engine, 1) is True - assert await assert_projects_count(aiopg_engine, 1) is True + await assert_user_not_in_db(aiopg_engine, new_owner) + await assert_one_owner_for_project(aiopg_engine, project, remaining_others) + await assert_users_count(aiopg_engine, 1) + await assert_projects_count(aiopg_engine, 1) # only 1 user is left as the owner mark him as GUEST for user in remaining_others: @@ -892,8 +879,8 @@ async def test_t9_project_shared_with_other_users_transferred_between_them_and_t await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) # expected outcome: the last user will be removed and the project will be removed - assert await assert_users_count(aiopg_engine, 0) is True - assert await assert_projects_count(aiopg_engine, 0) is True + await assert_users_count(aiopg_engine, 0) + await assert_projects_count(aiopg_engine, 0) async def test_t10_owner_and_all_shared_users_marked_as_guests( @@ -908,8 +895,10 @@ async def test_t10_owner_and_all_shared_users_marked_as_guests( u2 = await login_user(client) u3 = await login_user(client) - q_u2 = await query_user_from_db(aiopg_engine, u2) - q_u3 = await query_user_from_db(aiopg_engine, u3) + q_u2 = await fetch_user_from_db(aiopg_engine, u2) + q_u3 = await fetch_user_from_db(aiopg_engine, u3) + assert q_u2 + assert q_u3 # u1 creates project and shares it with g1 project = await new_project( @@ -927,14 +916,14 @@ async def test_t10_owner_and_all_shared_users_marked_as_guests( await change_user_role(aiopg_engine, u2, UserRole.GUEST) await change_user_role(aiopg_engine, u3, UserRole.GUEST) - assert await assert_users_count(aiopg_engine, 3) is True - assert await assert_projects_count(aiopg_engine, 1) is True - assert await assert_user_is_owner_of_project(aiopg_engine, u1, project) is True + await assert_users_count(aiopg_engine, 3) + await assert_projects_count(aiopg_engine, 1) + await assert_user_is_owner_of_project(aiopg_engine, u1, project) await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) - assert await assert_users_count(aiopg_engine, 0) is True - assert await assert_projects_count(aiopg_engine, 0) is True + await assert_users_count(aiopg_engine, 0) + await assert_projects_count(aiopg_engine, 0) async def test_t11_owner_and_all_users_in_group_marked_as_guests( @@ -968,11 +957,11 @@ async def test_t11_owner_and_all_users_in_group_marked_as_guests( await change_user_role(aiopg_engine, u2, UserRole.GUEST) await change_user_role(aiopg_engine, u3, UserRole.GUEST) - assert await assert_users_count(aiopg_engine, 3) is True - assert await assert_projects_count(aiopg_engine, 1) is True - assert await assert_user_is_owner_of_project(aiopg_engine, u1, project) is True + await assert_users_count(aiopg_engine, 3) + await assert_projects_count(aiopg_engine, 1) + await assert_user_is_owner_of_project(aiopg_engine, u1, project) await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) - assert await assert_users_count(aiopg_engine, 0) is True - assert await assert_projects_count(aiopg_engine, 0) is True + await assert_users_count(aiopg_engine, 0) + await assert_projects_count(aiopg_engine, 0) diff --git a/services/web/server/tests/integration/01/test_project_workflow.py b/services/web/server/tests/integration/01/test_project_workflow.py index a1b3d6e776e..e5afff1223b 100644 --- a/services/web/server/tests/integration/01/test_project_workflow.py +++ b/services/web/server/tests/integration/01/test_project_workflow.py @@ -102,7 +102,7 @@ async def storage_subsystem_mock(mocker): # requests storage to copy data mock = mocker.patch( - "simcore_service_webserver.projects.projects_handlers.copy_data_folders_from_project" + "simcore_service_webserver.projects.projects_handlers_crud.copy_data_folders_from_project" ) async def _mock_copy_data_from_project(*args): @@ -112,7 +112,7 @@ async def _mock_copy_data_from_project(*args): # requests storage to delete data mock1 = mocker.patch( - "simcore_service_webserver.projects.projects_handlers.projects_api.storage_api.delete_data_folders_of_project", + "simcore_service_webserver.projects._delete.delete_data_folders_of_project", return_value="", ) return mock, mock1 diff --git a/services/web/server/tests/integration/conftest.py b/services/web/server/tests/integration/conftest.py index d3092907c06..66a5a86c370 100644 --- a/services/web/server/tests/integration/conftest.py +++ b/services/web/server/tests/integration/conftest.py @@ -155,7 +155,7 @@ def _default_app_config_for_integration_tests( @pytest.fixture(scope="function") def app_config( _default_app_config_for_integration_tests: ConfigDict, unused_tcp_port_factory -) -> Dict: +) -> ConfigDict: """ Swarm with integration stack already started This fixture can be safely modified during test since it is renovated on every call diff --git a/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_studies_access.py b/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_studies_access.py index 518f3e8bffe..0d0d66cfffd 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_studies_access.py +++ b/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_studies_access.py @@ -27,7 +27,7 @@ from settings_library.redis import RedisSettings from simcore_service_webserver import catalog from simcore_service_webserver.log import setup_logging -from simcore_service_webserver.projects.projects_api import delete_project +from simcore_service_webserver.projects.projects_api import submit_delete_project_task from simcore_service_webserver.users_api import delete_user, get_user_role SHARED_STUDY_UUID = "e2e38eee-c569-4e55-b104-70d159e49c87" @@ -364,6 +364,7 @@ async def test_access_cookie_of_expired_user( assert await get_user_role(app, data["id"]) == UserRole.GUEST async def enforce_garbage_collect_guest(uid): + # TODO: can be replaced now by actual GC # Emulates garbage collector: # - GUEST user expired, cleaning it up # - client still holds cookie with its identifier nonetheless @@ -373,7 +374,10 @@ async def enforce_garbage_collect_guest(uid): assert len(projects) == 1 prj_id = projects[0]["uuid"] - await delete_project(app, prj_id, uid) + + delete_task = await submit_delete_project_task(app, prj_id, uid) + await delete_task + await delete_user(app, uid) return uid diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud.py index c3c375b2e5d..9cfa17105d7 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud.py @@ -10,11 +10,7 @@ from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union import pytest -from _helpers import ( # type: ignore - ExpectedResponse, - MockedStorageSubsystem, - standard_role_response, -) +from _helpers import ExpectedResponse, MockedStorageSubsystem, standard_role_response from aiohttp import web from aiohttp.test_utils import TestClient from aioresponses import aioresponses @@ -22,7 +18,7 @@ from pytest_simcore.helpers.utils_assert import assert_status from simcore_service_webserver._meta import api_version_prefix from simcore_service_webserver.db_models import UserRole -from simcore_service_webserver.projects.projects_handlers import ( +from simcore_service_webserver.projects.projects_handlers_crud import ( OVERRIDABLE_DOCUMENT_KEYS, ) from simcore_service_webserver.utils import now_str, to_datetime diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_delete.py b/services/web/server/tests/unit/with_dbs/02/test_projects_delete.py index 0e8bd1675fc..7bf4addc15d 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_delete.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_delete.py @@ -1,29 +1,25 @@ # pylint: disable=redefined-outer-name -# pylint: disable=too-many-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable -import asyncio + from typing import Any, Callable, Dict, Type from unittest.mock import MagicMock, call import pytest -from _helpers import ExpectedResponse # type: ignore -from _helpers import MockedStorageSubsystem # type: ignore -from _helpers import standard_role_response # type: ignore +from _helpers import ExpectedResponse, MockedStorageSubsystem, standard_role_response from aiohttp import web - -# TESTS ----------------------------------------------------------------------------------------- from aiohttp.test_utils import TestClient from pytest_simcore.helpers.utils_assert import assert_status from simcore_service_webserver._meta import api_version_prefix from simcore_service_webserver.db_models import UserRole +from simcore_service_webserver.projects import _delete from socketio.exceptions import ConnectionError as SocketConnectionError # HELPERS ----------------------------------------------------------------------------------------- -async def _delete_project( +async def _request_delete_project( client, project: Dict, expected: Type[web.HTTPException] ) -> None: url = client.app.router["delete_project"].url_for(project_id=project["uuid"]) @@ -33,6 +29,9 @@ async def _delete_project( await assert_status(resp, expected) +# TESTS ----------------------------------------------------------------------------------------- + + @pytest.mark.parametrize(*standard_role_response()) async def test_delete_project( client: TestClient, @@ -45,20 +44,31 @@ async def test_delete_project( fake_services: Callable, assert_get_same_project_caller: Callable, ): - # DELETE /v0/projects/{project_id} + assert client.app + # DELETE /v0/projects/{project_id} fakes = fake_services(5) mocked_director_v2_api["director_v2_core.get_services"].return_value = fakes - await _delete_project(client, user_project, expected.no_content) - await asyncio.sleep(2) # let some time fly for the background tasks to run + await _request_delete_project(client, user_project, expected.no_content) + + tasks = _delete.get_scheduled_tasks( + project_uuid=user_project["uuid"], user_id=logged_user["id"] + ) if expected.no_content == web.HTTPNoContent: + # Waits until deletion tasks are done + assert ( + len(tasks) == 1 + ), f"Only one delete fire&forget task expected, got {tasks=}" + # might have finished, and therefore there is no need to waith + await tasks[0] + mocked_director_v2_api["director_v2_core.get_services"].assert_called_once() expected_calls = [ call( - app=client.server.app, + app=client.app, service_uuid=service["service_uuid"], save_state=True, ) @@ -68,11 +78,13 @@ async def test_delete_project( expected_calls ) - # wait for the fire&forget to run - await asyncio.sleep(2) - await assert_get_same_project_caller(client, user_project, web.HTTPNotFound) + else: + assert ( + len(tasks) == 0 + ), f"NO delete fire&forget tasks expected when response is {expected.no_content}, got {tasks=}" + @pytest.mark.parametrize( "user_role, expected_ok, expected_forbidden", @@ -95,7 +107,7 @@ async def test_delete_multiple_opened_project_forbidden( expected_ok, expected_forbidden, ): - # service in project = await create_dynamic_service_mock(logged_user["id"], empty_user_project["uuid"]) + # service in project service = await create_dynamic_service_mock(logged_user["id"], user_project["uuid"]) # open project in tab1 @@ -105,6 +117,7 @@ async def test_delete_multiple_opened_project_forbidden( except SocketConnectionError: if user_role != UserRole.ANONYMOUS: pytest.fail("socket io connection should not fail") + url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) resp = await client.post(url, json=client_session_id1) await assert_status(resp, expected_ok) @@ -116,4 +129,5 @@ async def test_delete_multiple_opened_project_forbidden( except SocketConnectionError: if user_role != UserRole.ANONYMOUS: pytest.fail("socket io connection should not fail") - await _delete_project(client, user_project, expected_forbidden) + + await _request_delete_project(client, user_project, expected_forbidden) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_open_close.py b/services/web/server/tests/unit/with_dbs/02/test_projects_open_close.py index cd5d6502f76..88ac130924f 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_open_close.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_open_close.py @@ -13,7 +13,7 @@ import pytest import socketio from _helpers import ExpectedResponse, standard_role_response -from aiohttp import web +from aiohttp import ClientResponse, web from aiohttp.test_utils import TestClient, TestServer from models_library.projects_access import Owner from models_library.projects_state import ( @@ -28,7 +28,7 @@ from pytest_simcore.helpers.utils_projects import assert_get_same_project from servicelib.aiohttp.web_exceptions_extension import HTTPLocked from simcore_service_webserver.db_models import UserRole -from simcore_service_webserver.projects.projects_handlers import ( +from simcore_service_webserver.projects.projects_handlers_crud import ( OVERRIDABLE_DOCUMENT_KEYS, ) from simcore_service_webserver.socketio.events import SOCKET_IO_PROJECT_UPDATED_EVENT @@ -285,13 +285,11 @@ async def _assert_project_state_updated( handler.reset_mock() -async def _delete_project( - client, project: Dict, expected: Type[web.HTTPException] -) -> None: +async def _delete_project(client, project: Dict) -> ClientResponse: url = client.app.router["delete_project"].url_for(project_id=project["uuid"]) assert str(url) == f"{API_PREFIX}/projects/{project['uuid']}" resp = await client.delete(url) - await assert_status(resp, expected) + return resp # TESTS ---------------------------------------------------------------------------------------------------- @@ -361,10 +359,12 @@ async def test_share_project( expected.ok if share_rights["write"] else expected.forbidden, ) # user 2 can only delete projects if user 2 has delete access - await _delete_project( - client, - new_project, - expected.no_content if share_rights["delete"] else expected.forbidden, + resp = await _delete_project(client, new_project) + await assert_status( + resp, + expected_cls=expected.no_content + if share_rights["delete"] + else expected.forbidden, ) @@ -573,7 +573,7 @@ async def test_project_node_lifetime( ): mock_storage_api_delete_data_folders_of_project_node = mocker.patch( - "simcore_service_webserver.projects.projects_handlers.projects_api.storage_api.delete_data_folders_of_project_node", + "simcore_service_webserver.projects.projects_handlers_crud.projects_api.storage_api.delete_data_folders_of_project_node", return_value="", ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_pagination.py b/services/web/server/tests/unit/with_dbs/02/test_projects_pagination.py index 96ef63cf626..b334648f11f 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_pagination.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_pagination.py @@ -17,7 +17,7 @@ from pytest_simcore.helpers.utils_assert import assert_status from simcore_service_webserver._meta import api_version_prefix from simcore_service_webserver.db_models import UserRole -from simcore_service_webserver.projects.projects_handlers import ( +from simcore_service_webserver.projects.projects_handlers_crud import ( OVERRIDABLE_DOCUMENT_KEYS, ) from simcore_service_webserver.utils import now_str, to_datetime diff --git a/services/web/server/tests/unit/with_dbs/03/test_resource_manager.py b/services/web/server/tests/unit/with_dbs/03/test_resource_manager.py index bb5bc543f88..b3c9491f909 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_resource_manager.py +++ b/services/web/server/tests/unit/with_dbs/03/test_resource_manager.py @@ -36,8 +36,8 @@ from simcore_service_webserver.login.plugin import setup_login from simcore_service_webserver.projects.plugin import setup_projects from simcore_service_webserver.projects.projects_api import ( - delete_project, - remove_project_interactive_services, + remove_project_dynamic_services, + submit_delete_project_task, ) from simcore_service_webserver.projects.projects_exceptions import ProjectNotFoundError from simcore_service_webserver.resource_manager.plugin import setup_resource_manager @@ -140,7 +140,7 @@ def client( @pytest.fixture def mock_storage_delete_data_folders(mocker) -> mock.Mock: return mocker.patch( - "simcore_service_webserver.projects.projects_api.storage_api.delete_data_folders_of_project", + "simcore_service_webserver.projects._delete.delete_data_folders_of_project", return_value=None, ) @@ -789,22 +789,23 @@ async def test_regression_removing_unexisting_user( # regression test for https://github.com/ITISFoundation/osparc-simcore/issues/2504 assert client.app # remove project - await delete_project( + delete_task = await submit_delete_project_task( app=client.app, project_uuid=empty_user_project["uuid"], user_id=logged_user["id"], ) + await delete_task # remove user await delete_user(app=client.app, user_id=logged_user["id"]) with pytest.raises(UserNotFoundError): - await remove_project_interactive_services( + await remove_project_dynamic_services( user_id=logged_user["id"], project_uuid=empty_user_project["uuid"], app=client.app, ) with pytest.raises(ProjectNotFoundError): - await remove_project_interactive_services( + await remove_project_dynamic_services( user_id=logged_user["id"], project_uuid=empty_user_project["uuid"], app=client.app, diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py b/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py index 70aa846c4b2..dd602e52839 100644 --- a/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py @@ -165,10 +165,10 @@ async def user_project( @pytest.fixture -def do_update_user_project( - logged_user: UserInfoDict, client: TestClient, faker: Faker -) -> Callable[[UUID], Awaitable]: - async def _doit(project_uuid: UUID) -> None: +def request_update_project( + logged_user: UserInfoDict, faker: Faker +) -> Callable[[TestClient, UUID], Awaitable]: + async def _go(client: TestClient, project_uuid: UUID) -> None: resp: aiohttp.ClientResponse = await client.get(f"{VX}/projects/{project_uuid}") assert resp.status == 200 @@ -188,31 +188,36 @@ async def _doit(project_uuid: UUID) -> None: body = await resp.json() assert resp.status == 200, str(body) - return _doit + return _go @pytest.fixture -async def do_delete_user_project( - logged_user: UserInfoDict, client: TestClient, mocker -) -> AsyncIterator[Callable[[UUID], Awaitable]]: - direct_call_to_director_v2: mock.Mock = mocker.patch( +async def request_delete_project( + logged_user: UserInfoDict, + mocker, +) -> AsyncIterator[Callable[[TestClient, UUID], Awaitable]]: + director_v2_api_delete_pipeline: mock.AsyncMock = mocker.patch( "simcore_service_webserver.projects.projects_api.director_v2_api.delete_pipeline", ) + director_v2_api_stop_services: mock.AsyncMock = mocker.patch( + "simcore_service_webserver.projects.projects_api.director_v2_api.stop_services", + ) fire_and_forget_call_to_storage: mock.Mock = mocker.patch( - "simcore_service_webserver.projects.projects_api.storage_api.delete_data_folders_of_project", + "simcore_service_webserver.projects._delete.delete_data_folders_of_project", ) - async def _doit(project_uuid: UUID) -> None: + async def _go(client: TestClient, project_uuid: UUID) -> None: resp: aiohttp.ClientResponse = await client.delete( f"{VX}/projects/{project_uuid}" ) assert resp.status == 204 - yield _doit + yield _go # ensure the call to delete data was completed async for attempt in AsyncRetrying(reraise=True, stop=stop_after_delay(20)): with attempt: - direct_call_to_director_v2.assert_called() + director_v2_api_delete_pipeline.assert_called() + director_v2_api_stop_services.assert_awaited() fire_and_forget_call_to_storage.assert_called() diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_core.py b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_core.py index 12be9936c59..46cfbf091ff 100644 --- a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_core.py +++ b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_core.py @@ -2,7 +2,7 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -from typing import Any, Callable, Dict +from typing import Any, Awaitable, Callable, Dict from uuid import UUID import pytest @@ -40,12 +40,13 @@ def aiohttp_mocked_request(client: TestClient, user_id: int) -> web.Request: @pytest.mark.acceptance_test async def test_workflow( + client: TestClient, project_uuid: UUID, faker: Faker, user_id: int, user_project: ProjectDict, aiohttp_mocked_request: web.Request, - do_update_user_project: Callable, + request_update_project: Callable[[TestClient, UUID], Awaitable], director_v2_service_mock: None, ): vc_repo = VersionControlRepository(aiohttp_mocked_request) @@ -60,7 +61,7 @@ async def test_workflow( assert checkpoint1.message == "first commit" # ------------------------------------- - await do_update_user_project(project_uuid) + await request_update_project(client, project_uuid) checkpoint2 = await create_checkpoint( vc_repo, project_uuid, tag="v1", message="second commit" @@ -96,7 +97,7 @@ async def test_workflow( # ------------------------------------- # creating branches - await do_update_user_project(project_uuid) + await request_update_project(client, project_uuid) checkpoint3 = await create_checkpoint( vc_repo, diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py index ed41b2c0ea0..b8e00a0954d 100644 --- a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py @@ -9,8 +9,9 @@ import pytest from aiohttp import web from aiohttp.test_utils import TestClient -from models_library.projects import Project +from models_library.projects import Project, ProjectID from models_library.rest_pagination import Page +from models_library.users import UserID from pydantic.main import BaseModel from pytest_simcore.helpers.utils_assert import assert_status from simcore_service_webserver._meta import API_VTAG as VX @@ -57,7 +58,7 @@ async def assert_status_and_body( async def test_workflow( client: TestClient, user_project: ProjectDict, - do_update_user_project: Callable[[UUID], Awaitable], + request_update_project: Callable[[TestClient, UUID], Awaitable], director_v2_service_mock: None, ): @@ -152,7 +153,7 @@ async def test_workflow( ) # do some changes in project - await do_update_user_project(project.uuid) + await request_update_project(client, project.uuid) # CREATE new checkpoint resp = await client.post( @@ -218,8 +219,9 @@ async def test_create_checkpoint_without_changes( async def test_delete_project_and_repo( client: TestClient, - project_uuid: UUID, - do_delete_user_project: Callable[[UUID], Awaitable], + user_id: UserID, + project_uuid: ProjectID, + request_delete_project: Callable[[TestClient, UUID], Awaitable], ): # CREATE a checkpoint @@ -239,7 +241,16 @@ async def test_delete_project_and_repo( ) # DELETE project -> projects_vc_* deletion follow - await do_delete_user_project(project_uuid) + await request_delete_project(client, project_uuid) + + # TMP fix here waits ------------ + # FIXME: mark as deleted, still gets entrypoints!! + from simcore_service_webserver.projects import projects_api + + delete_task = projects_api.get_delete_project_task(project_uuid, user_id) + assert delete_task + await delete_task + # -------------------------------- # LIST empty resp = await client.get(f"/{VX}/repos/projects/{project_uuid}/checkpoints") diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index 616fe7b3350..1458d635274 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -182,15 +182,6 @@ def add_index_route(app: web.Application) -> None: return add_index_route -@pytest.fixture -def computational_system_mock(mocker): - mock_fun = mocker.patch( - "simcore_service_webserver.projects.projects_handlers.update_pipeline_db", - return_value="", - ) - return mock_fun - - @pytest.fixture async def storage_subsystem_mock(mocker) -> MockedStorageSubsystem: """ @@ -203,14 +194,14 @@ async def _mock_copy_data_from_project(*args): return args[2] mock = mocker.patch( - "simcore_service_webserver.projects.projects_handlers.copy_data_folders_from_project", + "simcore_service_webserver.projects.projects_handlers_crud.copy_data_folders_from_project", autospec=True, side_effect=_mock_copy_data_from_project, ) async_mock = mocker.AsyncMock(return_value="") mock1 = mocker.patch( - "simcore_service_webserver.projects.projects_handlers.projects_api.storage_api.delete_data_folders_of_project", + "simcore_service_webserver.projects._delete.delete_data_folders_of_project", side_effect=async_mock, ) return MockedStorageSubsystem(mock, mock1)