diff --git a/packages/models-library/src/models_library/docker.py b/packages/models-library/src/models_library/docker.py index 49aec884b74..1ad9c2faf78 100644 --- a/packages/models-library/src/models_library/docker.py +++ b/packages/models-library/src/models_library/docker.py @@ -22,6 +22,7 @@ from .users import UserID DockerNetworkID: TypeAlias = str +DockerServiceID: TypeAlias = str class DockerLabelKey(ConstrainedStr): diff --git a/services/director-v2/src/simcore_service_director_v2/api/entrypoints.py b/services/director-v2/src/simcore_service_director_v2/api/entrypoints.py index 6bb355dda3d..89467a13eb8 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/entrypoints.py +++ b/services/director-v2/src/simcore_service_director_v2/api/entrypoints.py @@ -5,6 +5,7 @@ computations, computations_tasks, docker_networks, + docker_services, dynamic_scheduler, dynamic_services, health, @@ -30,6 +31,9 @@ v2_router.include_router( docker_networks.router, tags=["docker networks"], prefix="/docker/networks" ) +v2_router.include_router( + docker_services.router, tags=["docker services"], prefix="/docker/services" +) v2_router.include_router( dynamic_scheduler.router, tags=["dynamic scheduler"], prefix="/dynamic_scheduler" diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/docker_services.py b/services/director-v2/src/simcore_service_director_v2/api/routes/docker_services.py new file mode 100644 index 00000000000..569b654965c --- /dev/null +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/docker_services.py @@ -0,0 +1,62 @@ +from typing import Annotated + +from aiodocker import Docker +from fastapi import APIRouter, Depends, status +from models_library.docker import DockerServiceID +from models_library.generated_models.docker_rest_api import ServiceSpec + +from ..dependencies.docker import get_shared_docker_client + +router = APIRouter() + + +def _envs_to_dict(data: list | dict) -> dict: + if isinstance(data, dict): + return data + + result = {} + for item in data: + key, value = item.split("=", 1) + result[key] = value + + return result + + +@router.post("/", summary="create a docker service", status_code=status.HTTP_200_OK) +async def create_docker_service( + service_spec: ServiceSpec, + shared_docker_client: Annotated[Docker, Depends(get_shared_docker_client)], +) -> DockerServiceID: + params = service_spec.model_dump(mode="json", by_alias=True) + + if ( + "ContainerSpec" in params["TaskTemplate"] + and "Env" in params["TaskTemplate"]["ContainerSpec"] + ): + params["TaskTemplate"]["ContainerSpec"]["Env"] = _envs_to_dict( + params["TaskTemplate"]["ContainerSpec"]["Env"] + ) + + created_service = await shared_docker_client.services.create( + task_template=params["TaskTemplate"], + name=params["Name"], + labels=params["Labels"], + mode=params["Mode"], + update_config=params["UpdateConfig"], + rollback_config=params["RollbackConfig"], + networks=params["Networks"], + endpoint_spec=params["EndpointSpec"], + ) + return DockerServiceID(created_service["ID"]) + + +@router.delete( + "/{service_id}", + summary="removes an existing docker service", + status_code=status.HTTP_204_NO_CONTENT, +) +async def remove_docker_service( + service_id: DockerServiceID, + shared_docker_client: Annotated[Docker, Depends(get_shared_docker_client)], +): + await shared_docker_client.services.delete(service_id) diff --git a/services/director-v2/tests/unit/test_api_routes_docker_services.py b/services/director-v2/tests/unit/test_api_routes_docker_services.py new file mode 100644 index 00000000000..44924fdf6be --- /dev/null +++ b/services/director-v2/tests/unit/test_api_routes_docker_services.py @@ -0,0 +1,101 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument + + +import pytest +from aiodocker import Docker, DockerError +from faker import Faker +from fastapi import status +from models_library.docker import DockerServiceID +from models_library.generated_models.docker_rest_api import ServiceSpec +from pydantic import TypeAdapter +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_director_v2.api.routes.docker_services import _envs_to_dict +from starlette.testclient import TestClient +from tenacity import AsyncRetrying, stop_after_delay, wait_fixed + + +@pytest.mark.parametrize( + "provided,result", + [ + (["some_value=k1=k_continued"], {"some_value": "k1=k_continued"}), + ({"some_value": "k1=k_continued"}, {"some_value": "k1=k_continued"}), + ], +) +def test__envs_to_dict(provided: dict | list, result: dict): + assert _envs_to_dict(provided) == result + + +@pytest.fixture +def mock_env( + mock_exclusive: None, + disable_rabbitmq: None, + disable_postgres: None, + mock_env: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, + faker: Faker, +) -> None: + monkeypatch.setenv("DIRECTOR_V2_DOCKER_ENTRYPOINT_ACCESS_TOKEN", "adminadmin") + + monkeypatch.setenv("SC_BOOT_MODE", "default") + monkeypatch.setenv("DIRECTOR_ENABLED", "false") + monkeypatch.setenv("COMPUTATIONAL_BACKEND_ENABLED", "false") + monkeypatch.setenv("COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED", "false") + + monkeypatch.setenv("DIRECTOR_V2_DYNAMIC_SCHEDULER_ENABLED", "false") + + monkeypatch.setenv("R_CLONE_PROVIDER", "MINIO") + monkeypatch.setenv("S3_ENDPOINT", faker.url()) + monkeypatch.setenv("S3_ACCESS_KEY", faker.pystr()) + monkeypatch.setenv("S3_REGION", faker.pystr()) + monkeypatch.setenv("S3_SECRET_KEY", faker.pystr()) + monkeypatch.setenv("S3_BUCKET_NAME", faker.pystr()) + + +async def test_service_workflow( + docker_swarm: None, + client: TestClient, + async_docker_client: Docker, +): + service_name: str = "a_test_service" + service_spec: ServiceSpec = TypeAdapter(ServiceSpec).validate_python( + { + "Name": "a_test_service", + "TaskTemplate": { + "ContainerSpec": {"Image": "nginx:latest", "Env": ["ENV_VAR=a_value"]}, + "RestartPolicy": { + "Condition": "any", + "Delay": 5000000000, + "MaxAttempts": 3, + }, + }, + "Mode": {"Replicated": {"Replicas": 2}}, + "EndpointSpec": { + "Ports": [{"Protocol": "tcp", "TargetPort": 80, "PublishedPort": 8080}] + }, + } + ) + + response = client.post( + "/v2/docker/services/", json=service_spec.model_dump(mode="json") + ) + assert response.status_code == status.HTTP_200_OK, response.text + service_id: DockerServiceID = response.json() + + # check service is present + async for attempt in AsyncRetrying( + reraise=True, wait=wait_fixed(1), stop=stop_after_delay(10) + ): + with attempt: + service_inspect = await async_docker_client.services.inspect(service_name) + assert service_inspect["ID"] == service_id + + response = client.delete(f"/v2/docker/services/{service_id}") + assert response.status_code == status.HTTP_204_NO_CONTENT, response.text + + # check service was removed + for name_or_id in (service_name, service_id): + with pytest.raises(DockerError) as exc: + await async_docker_client.services.inspect(name_or_id) + + assert exc.value.status == status.HTTP_404_NOT_FOUND